From 102f95ef10eaafd0b60e8bdff533d950383dd936 Mon Sep 17 00:00:00 2001 From: Michal Fiedorowicz Date: Mon, 14 Oct 2024 16:55:13 +0100 Subject: [PATCH 01/52] chore: add v0.6.0 to netbox-plugin manifest (#44) Signed-off-by: Michal Fiedorowicz --- netbox-plugin.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/netbox-plugin.yaml b/netbox-plugin.yaml index 28cf9ad..3046a2a 100644 --- a/netbox-plugin.yaml +++ b/netbox-plugin.yaml @@ -1,6 +1,9 @@ version: 0.1 package_name: netboxlabs-diode-netbox-plugin compatibility: + - release: 0.6.0 + netbox_min: 4.1.0 + netbox_max: 4.1.3 - release: 0.5.1 netbox_min: 4.1.0 netbox_max: 4.1.3 From abaea64c69a47e799bb0c055dd370c6f0297907e Mon Sep 17 00:00:00 2001 From: Michal Fiedorowicz Date: Mon, 14 Oct 2024 17:03:31 +0100 Subject: [PATCH 02/52] chore: run lint and tests workflow on plugin code change only (#45) Signed-off-by: Michal Fiedorowicz --- .github/workflows/lint-tests.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/lint-tests.yml b/.github/workflows/lint-tests.yml index e47f24f..572872b 100644 --- a/.github/workflows/lint-tests.yml +++ b/.github/workflows/lint-tests.yml @@ -2,9 +2,13 @@ name: Lint and tests on: workflow_dispatch: pull_request: + paths: + - "netbox_diode_plugin/**" push: branches: - "!release" + paths: + - "netbox_diode_plugin/**" concurrency: group: ${{ github.workflow }} From 2f5a8637dba7af513ddc025056dd5eb3991a2794 Mon Sep 17 00:00:00 2001 From: Michal Fiedorowicz Date: Mon, 21 Oct 2024 11:32:55 +0100 Subject: [PATCH 03/52] chore: add GitHub issue templates (#46) Signed-off-by: Michal Fiedorowicz --- .../ISSUE_TEMPLATE/01-feature_request.yaml | 42 +++++++++++++ .github/ISSUE_TEMPLATE/02-bug_report.yaml | 62 +++++++++++++++++++ .../03-documentation_change.yaml | 35 +++++++++++ .github/ISSUE_TEMPLATE/config.yaml | 1 + 4 files changed, 140 insertions(+) create mode 100644 .github/ISSUE_TEMPLATE/01-feature_request.yaml create mode 100644 .github/ISSUE_TEMPLATE/02-bug_report.yaml create mode 100644 .github/ISSUE_TEMPLATE/03-documentation_change.yaml create mode 100644 .github/ISSUE_TEMPLATE/config.yaml diff --git a/.github/ISSUE_TEMPLATE/01-feature_request.yaml b/.github/ISSUE_TEMPLATE/01-feature_request.yaml new file mode 100644 index 0000000..a7d3037 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/01-feature_request.yaml @@ -0,0 +1,42 @@ +--- +name: ✨ Feature Request +description: Propose a new Diode NetBox Plugin feature or enhancement +labels: ["enhancement", "status: needs triage"] +body: + - type: input + attributes: + label: Diode NetBox Plugin version + description: What version of Diode NetBox Plugin are you currently running? + placeholder: v0.6.0 + validations: + required: true + - type: input + attributes: + label: NetBox version + description: What version of NetBox are you currently running? + placeholder: v4.1.3 + validations: + required: true + - type: dropdown + attributes: + label: Feature type + options: + - New functionality + - Change to existing functionality + validations: + required: true + - type: textarea + attributes: + label: Proposed feature or enhancement + description: > + Describe in detail the new feature or enhancement you are proposing. The more detail you provide here, + the greater chance your proposal has of being discussed. + validations: + required: true + - type: textarea + attributes: + label: Use case + description: > + Explain how adding this feature or enhancement would benefit Diode users. What need does it address? + validations: + required: true diff --git a/.github/ISSUE_TEMPLATE/02-bug_report.yaml b/.github/ISSUE_TEMPLATE/02-bug_report.yaml new file mode 100644 index 0000000..009ccf4 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/02-bug_report.yaml @@ -0,0 +1,62 @@ +--- +name: 🐛 Bug Report +description: Report a reproducible bug in the current release of Diode NetBox Plugin +labels: ["bug", "status: needs triage"] +body: + - type: input + attributes: + label: Diode NetBox Plugin version + description: What version of Diode NetBox Plugin are you currently running? + placeholder: v0.6.0 + validations: + required: true + - type: input + attributes: + label: NetBox version + description: What version of NetBox are you currently running? + placeholder: v4.1.3 + validations: + required: true + - type: input + attributes: + label: Diode version + description: What version of Diode are you currently running? + placeholder: v0.6.0 + validations: + required: true + - type: dropdown + attributes: + label: Diode SDK type + description: What type of Diode SDK are you currently running? + options: + - diode-sdk-python + - diode-sdk-go + validations: + required: true + - type: input + attributes: + label: Diode SDK version + description: What version of Diode SDK are you currently running? + placeholder: v0.4.0 + validations: + required: true + - type: textarea + attributes: + label: Steps to reproduce + description: > + Describe in detail the exact steps that someone else can take to reproduce this bug using given Diode NetBox + Plugin, NetBox, Diode and Diode SDK versions. + validations: + required: true + - type: textarea + attributes: + label: Expected behavior + description: What did you expect to happen? + validations: + required: true + - type: textarea + attributes: + label: Observed behavior + description: What happened instead? + validations: + required: true diff --git a/.github/ISSUE_TEMPLATE/03-documentation_change.yaml b/.github/ISSUE_TEMPLATE/03-documentation_change.yaml new file mode 100644 index 0000000..ed36ab8 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/03-documentation_change.yaml @@ -0,0 +1,35 @@ +--- +name: 📖 Documentation Change +description: Suggest an addition or modification to the Diode NetBox Plugin documentation +labels: ["documentation", "status: needs triage"] +body: + - type: dropdown + attributes: + label: Change type + description: What type of change are you proposing? + options: + - Addition + - Correction + - Removal + - Cleanup (formatting, typos, etc.) + validations: + required: true + - type: dropdown + attributes: + label: Area + description: To what section of the documentation does this change primarily pertain? + options: + - Features + - Installation/upgrade + - Getting started + - Configuration + - Development + - Other + validations: + required: true + - type: textarea + attributes: + label: Proposed changes + description: Describe the proposed changes and why they are necessary. + validations: + required: true diff --git a/.github/ISSUE_TEMPLATE/config.yaml b/.github/ISSUE_TEMPLATE/config.yaml new file mode 100644 index 0000000..3ba13e0 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yaml @@ -0,0 +1 @@ +blank_issues_enabled: false From a49113b19f3c617fdba2b2b96b629502aba82b5f Mon Sep 17 00:00:00 2001 From: Michal Fiedorowicz Date: Wed, 27 Nov 2024 11:22:51 +0000 Subject: [PATCH 04/52] chore: GHA release - use GITHUB_TOKEN (#48) Signed-off-by: Michal Fiedorowicz --- .github/workflows/release.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 28e15f4..a121e67 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -9,7 +9,7 @@ concurrency: cancel-in-progress: false env: - GH_TOKEN: ${{ secrets.ORB_CI_GH_TOKEN }} + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} SEMANTIC_RELEASE_PACKAGE: ${{ github.repository }} PYTHON_RUNTIME_VERSION: "3.11" APP_NAME: diode-netbox-plugin From 5f44578ce202a3fe4f33460a0592c28bbe90a47a Mon Sep 17 00:00:00 2001 From: Michal Fiedorowicz Date: Wed, 27 Nov 2024 12:15:42 +0000 Subject: [PATCH 05/52] chore: GHA release - set permissions (#49) Signed-off-by: Michal Fiedorowicz --- .github/workflows/release.yaml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index a121e67..7b563dc 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -15,6 +15,10 @@ env: APP_NAME: diode-netbox-plugin PYTHON_PACKAGE_NAME: netboxlabs-diode-netbox-plugin +permissions: + id-token: write + contents: write + jobs: get-python-package-name: name: Get Python package name @@ -126,9 +130,6 @@ jobs: needs: [ get-python-package-name, get-next-version ] runs-on: ubuntu-latest timeout-minutes: 5 - permissions: - id-token: write - contents: read env: BUILD_VERSION: ${{ needs.get-next-version.outputs.new-release-version }} BUILD_TRACK: release From 7838f9d68c6cd07cef0038ee0aca86e62707633e Mon Sep 17 00:00:00 2001 From: Luke Tucker <64618+ltucker@users.noreply.github.com> Date: Thu, 12 Dec 2024 12:14:00 -0500 Subject: [PATCH 06/52] feat: enable netbox_branching plugin by default (#51) * enable netbox_branching plugin by default in development/test docker image * include netbox-worker * add development setting defaults * don't pin branching plugin --- docker/Dockerfile-diode-netbox-plugin | 4 ++++ docker/docker-compose.yaml | 21 +++++++++++++++++++-- docker/netbox/configuration/plugins.py | 5 ++++- docker/netbox/local_settings.py | 12 ++++++++++++ docker/netbox/plugins_dev.py | 18 ++++++++++++++++++ docker/netbox/plugins_test.py | 5 ++++- docker/requirements-diode-netbox-plugin.txt | 1 + 7 files changed, 62 insertions(+), 4 deletions(-) create mode 100644 docker/netbox/local_settings.py create mode 100644 docker/netbox/plugins_dev.py diff --git a/docker/Dockerfile-diode-netbox-plugin b/docker/Dockerfile-diode-netbox-plugin index 4a712d6..2453ef7 100644 --- a/docker/Dockerfile-diode-netbox-plugin +++ b/docker/Dockerfile-diode-netbox-plugin @@ -4,5 +4,9 @@ COPY ./netbox/configuration/ /etc/netbox/config/ RUN chmod 755 /etc/netbox/config/* && \ chown unit:root /etc/netbox/config/* +COPY ./netbox/local_settings.py /opt/netbox/netbox/netbox/local_settings.py +RUN chmod 755 /opt/netbox/netbox/netbox/local_settings.py && \ + chown unit:root /opt/netbox/netbox/netbox/local_settings.py + COPY ./requirements-diode-netbox-plugin.txt /opt/netbox/ RUN /opt/netbox/venv/bin/pip install --no-warn-script-location -r /opt/netbox/requirements-diode-netbox-plugin.txt diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index f276518..81dd163 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -1,6 +1,6 @@ name: diode-netbox-plugin services: - netbox: + netbox: &netbox image: netboxcommunity/netbox:v4.1-3.0.1-diode-netbox-plugin build: context: . @@ -22,13 +22,30 @@ services: - ./netbox/nginx-unit.json:/opt/netbox/nginx-unit.json:z,ro - ../netbox_diode_plugin:/opt/netbox/netbox/netbox_diode_plugin:ro - ./netbox/launch-netbox.sh:/opt/netbox/launch-netbox.sh:z,ro + - ./netbox/plugins_dev.py:/etc/netbox/config/plugins.py:z,ro - ./coverage:/opt/netbox/netbox/coverage:z,rw - netbox-media-files:/opt/netbox/netbox/media:rw - netbox-reports-files:/opt/netbox/netbox/reports:rw - netbox-scripts-files:/opt/netbox/netbox/scripts:rw ports: - "8000:8080" - + + netbox-worker: + <<: *netbox + depends_on: + netbox: + condition: service_healthy + command: + - /opt/netbox/venv/bin/python + - /opt/netbox/netbox/manage.py + - rqworker + healthcheck: + test: ps -aux | grep -v grep | grep -q rqworker || exit 1 + start_period: 20s + timeout: 3s + interval: 15s + ports: [] + # postgres netbox-postgres: image: docker.io/postgres:16-alpine diff --git a/docker/netbox/configuration/plugins.py b/docker/netbox/configuration/plugins.py index 09ea896..c6deec2 100644 --- a/docker/netbox/configuration/plugins.py +++ b/docker/netbox/configuration/plugins.py @@ -4,7 +4,10 @@ # To learn how to build images with your required plugins # See https://github.com/netbox-community/netbox-docker/wiki/Using-Netbox-Plugins -PLUGINS = ["netbox_diode_plugin"] +PLUGINS = [ + "netbox_diode_plugin", + "netbox_branching", +] # PLUGINS_CONFIG = { # "netbox_diode_plugin": { diff --git a/docker/netbox/local_settings.py b/docker/netbox/local_settings.py new file mode 100644 index 0000000..6ab2063 --- /dev/null +++ b/docker/netbox/local_settings.py @@ -0,0 +1,12 @@ +from netbox_branching.utilities import DynamicSchemaDict +from .configuration import DATABASE + +# Wrap DATABASES with DynamicSchemaDict for dynamic schema support +DATABASES = DynamicSchemaDict({ + 'default': DATABASE, +}) + +# Employ our custom database router +DATABASE_ROUTERS = [ + 'netbox_branching.database.BranchAwareRouter', +] \ No newline at end of file diff --git a/docker/netbox/plugins_dev.py b/docker/netbox/plugins_dev.py new file mode 100644 index 0000000..a550150 --- /dev/null +++ b/docker/netbox/plugins_dev.py @@ -0,0 +1,18 @@ +# Add your plugins and plugin settings here. +# Of course uncomment this file out. + +# To learn how to build images with your required plugins +# See https://github.com/netbox-community/netbox-docker/wiki/Using-Netbox-Plugins + +PLUGINS = [ + "netbox_diode_plugin", + "netbox_branching", +] + +PLUGINS_CONFIG = { + "netbox_diode_plugin": { + "auto_provision_users": True, + # Diode gRPC target for communication with Diode server + "diode_target_override": "grpc://host.docker.internal:8080/diode", + } +} diff --git a/docker/netbox/plugins_test.py b/docker/netbox/plugins_test.py index 2b3058c..2bb3e8a 100644 --- a/docker/netbox/plugins_test.py +++ b/docker/netbox/plugins_test.py @@ -4,7 +4,10 @@ # To learn how to build images with your required plugins # See https://github.com/netbox-community/netbox-docker/wiki/Using-Netbox-Plugins -PLUGINS = ["netbox_diode_plugin"] +PLUGINS = [ + "netbox_diode_plugin", + "netbox_branching", +] PLUGINS_CONFIG = { "netbox_diode_plugin": { diff --git a/docker/requirements-diode-netbox-plugin.txt b/docker/requirements-diode-netbox-plugin.txt index 7ced5cf..8285846 100644 --- a/docker/requirements-diode-netbox-plugin.txt +++ b/docker/requirements-diode-netbox-plugin.txt @@ -4,3 +4,4 @@ coverage==7.6.0 grpcio==1.62.1 protobuf==5.28.1 pytest==8.0.2 +netboxlabs-netbox-branching \ No newline at end of file From 3477b04abc9d2d3c58b4aaca66fdeb74656abc07 Mon Sep 17 00:00:00 2001 From: Luke Tucker <64618+ltucker@users.noreply.github.com> Date: Fri, 13 Dec 2024 13:10:44 -0500 Subject: [PATCH 07/52] fix: do not treat _branch parameter as search criterion (#52) --- netbox_diode_plugin/api/views.py | 2 +- netbox_diode_plugin/tests/test_api_object_state.py | 14 ++++++++++++++ 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/netbox_diode_plugin/api/views.py b/netbox_diode_plugin/api/views.py index 173a97d..1768e5a 100644 --- a/netbox_diode_plugin/api/views.py +++ b/netbox_diode_plugin/api/views.py @@ -145,7 +145,7 @@ def _additional_attributes_query_filter(self): """Get the additional attributes query filter.""" additional_attributes = {} for attr in self.request.query_params: - if attr not in ["object_type", "id", "q"]: + if attr not in ["object_type", "id", "q", "_branch"]: additional_attributes[attr] = self.request.query_params.get(attr) return dict(additional_attributes.items()) diff --git a/netbox_diode_plugin/tests/test_api_object_state.py b/netbox_diode_plugin/tests/test_api_object_state.py index 4aa16b8..7031549 100644 --- a/netbox_diode_plugin/tests/test_api_object_state.py +++ b/netbox_diode_plugin/tests/test_api_object_state.py @@ -379,3 +379,17 @@ def test_common_user_with_permissions_get_ip_state_using_q_objects(self): .get("name"), self.interfaces[0].name, ) + + def test_common_user_get_object_state_with_branch_parameter_specified(self): + """Test searching accepts _branch parameter with additional attributes specified.""" + query_parameters = { + "q": self.ip_addresses[0].address.__str__(), + "object_type": "ipam.ipaddress", + "interface": self.interfaces[0].id, + "_branch": "" + } + + response = self.client.get(self.url, query_parameters, **self.user_header) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.json().get("object_type"), "ipam.ipaddress") From 53fbc02566a5933f95adde4f5fb009747248edaf Mon Sep 17 00:00:00 2001 From: Michal Fiedorowicz Date: Mon, 30 Dec 2024 11:21:59 +0000 Subject: [PATCH 08/52] chore: security improvements (#53) * chore: add safe redirect to login Signed-off-by: Michal Fiedorowicz * chore: gha - add missing permissions Signed-off-by: Michal Fiedorowicz * chore: gha - pin actions to commit hashes Signed-off-by: Michal Fiedorowicz * tidy up Signed-off-by: Michal Fiedorowicz --------- Signed-off-by: Michal Fiedorowicz --- .github/workflows/lint-tests.yml | 2 +- .github/workflows/manifest-modified.yaml | 3 +++ .github/workflows/release.yaml | 10 +++++----- netbox_diode_plugin/views.py | 25 ++++++++++++++++++------ 4 files changed, 28 insertions(+), 12 deletions(-) diff --git a/.github/workflows/lint-tests.yml b/.github/workflows/lint-tests.yml index 572872b..199b0d5 100644 --- a/.github/workflows/lint-tests.yml +++ b/.github/workflows/lint-tests.yml @@ -47,7 +47,7 @@ jobs: run: | make docker-compose-netbox-plugin-test-cover - name: Coverage comment - uses: orgoro/coverage@v3.2 + uses: orgoro/coverage@3f13a558c5af7376496aa4848bf0224aead366ac # v3.2 with: coverageFile: ./docker/coverage/report.xml token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/manifest-modified.yaml b/.github/workflows/manifest-modified.yaml index 28bd89e..bd33623 100644 --- a/.github/workflows/manifest-modified.yaml +++ b/.github/workflows/manifest-modified.yaml @@ -10,6 +10,9 @@ concurrency: group: ${{ github.workflow }} cancel-in-progress: false +permissions: + contents: read + jobs: manifest-modified: uses: netboxlabs/public-workflows/.github/workflows/reusable-plugin-manifest-modified.yml@release diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 7b563dc..621dd4f 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -42,7 +42,7 @@ jobs: with: node-version: "lts/*" - name: Write package.json - uses: DamianReeves/write-file-action@master + uses: DamianReeves/write-file-action@6929a9a6d1807689191dcc8bbe62b54d70a32b42 # v1.3 with: path: ./package.json write-mode: overwrite @@ -56,7 +56,7 @@ jobs: } } - name: Write .releaserc.json - uses: DamianReeves/write-file-action@master + uses: DamianReeves/write-file-action@6929a9a6d1807689191dcc8bbe62b54d70a32b42 # v1.3 with: path: ./.releaserc.json write-mode: overwrite @@ -166,7 +166,7 @@ jobs: retention-days: 30 if-no-files-found: error - name: Publish release distributions to PyPI - uses: pypa/gh-action-pypi-publish@release/v1 + uses: pypa/gh-action-pypi-publish@67339c736fd9354cd4f8cb0b744f2b82a74b5c70 # v.1.12.3 with: packages-dir: dist @@ -181,7 +181,7 @@ jobs: with: node-version: "lts/*" - name: Write package.json - uses: DamianReeves/write-file-action@master + uses: DamianReeves/write-file-action@6929a9a6d1807689191dcc8bbe62b54d70a32b42 # v1.3 with: path: ./package.json write-mode: overwrite @@ -195,7 +195,7 @@ jobs: } } - name: Write .releaserc.json - uses: DamianReeves/write-file-action@master + uses: DamianReeves/write-file-action@6929a9a6d1807689191dcc8bbe62b54d70a32b42 # v1.3 with: path: ./.releaserc.json write-mode: overwrite diff --git a/netbox_diode_plugin/views.py b/netbox_diode_plugin/views.py index 8acfe70..75f884b 100644 --- a/netbox_diode_plugin/views.py +++ b/netbox_diode_plugin/views.py @@ -7,7 +7,9 @@ from django.contrib import messages from django.contrib.auth import get_user, get_user_model from django.core.cache import cache +from django.http import HttpResponseRedirect from django.shortcuts import redirect, render +from django.utils.http import url_has_allowed_host_and_scheme from django.views.generic import View from netbox.plugins import get_plugin_config from netbox.views import generic @@ -28,6 +30,17 @@ User = get_user_model() +def redirect_to_login(request): + """Redirect to login view.""" + redirect_url = netbox_settings.LOGIN_URL + target = request.path + + if target and url_has_allowed_host_and_scheme(target, allowed_hosts=None): + redirect_url = f"{netbox_settings.LOGIN_URL}?next={target}" + + return HttpResponseRedirect(redirect_url) + + class IngestionLogsView(View): """Ingestion logs view.""" @@ -36,7 +49,7 @@ class IngestionLogsView(View): def get(self, request): """Render ingestion logs template.""" if not request.user.is_authenticated or not request.user.is_staff: - return redirect(f"{netbox_settings.LOGIN_URL}?next={request.path}") + return redirect_to_login(request) netbox_to_diode_username = get_diode_username_for_user_type("netbox_to_diode") try: @@ -118,7 +131,7 @@ class SettingsView(View): def get(self, request): """Render settings template.""" if not request.user.is_authenticated or not request.user.is_staff: - return redirect(f"{netbox_settings.LOGIN_URL}?next={request.path}") + return redirect_to_login(request) diode_target_override = get_plugin_config( "netbox_diode_plugin", "diode_target_override" @@ -187,7 +200,7 @@ class SettingsEditView(generic.ObjectEditView): def get(self, request, *args, **kwargs): """GET request handler.""" if not request.user.is_authenticated or not request.user.is_staff: - return redirect(f"{netbox_settings.LOGIN_URL}?next={request.path}") + return redirect_to_login(request) diode_target_override = get_plugin_config( "netbox_diode_plugin", "diode_target_override" @@ -207,7 +220,7 @@ def get(self, request, *args, **kwargs): def post(self, request, *args, **kwargs): """POST request handler.""" if not request.user.is_authenticated or not request.user.is_staff: - return redirect(f"{netbox_settings.LOGIN_URL}?next={request.path}") + return redirect_to_login(request) diode_target_override = get_plugin_config( "netbox_diode_plugin", "diode_target_override" @@ -272,7 +285,7 @@ def _retrieve_users(self): def get(self, request): """GET request handler.""" if not request.user.is_authenticated or not request.user.is_staff: - return redirect(f"{netbox_settings.LOGIN_URL}?next={request.path}") + return redirect_to_login(request) users = self._retrieve_users() @@ -285,7 +298,7 @@ def get(self, request): def post(self, request): """POST request handler.""" if not request.user.is_authenticated or not request.user.is_staff: - return redirect(f"{netbox_settings.LOGIN_URL}?next={request.path}") + return redirect_to_login(request) users = self._retrieve_users() From cd55ef843132d8bac6601f4e2c9a5339ef8ca492 Mon Sep 17 00:00:00 2001 From: Luke Tucker <64618+ltucker@users.noreply.github.com> Date: Tue, 31 Dec 2024 15:34:19 -0500 Subject: [PATCH 09/52] chore: update reconciler sdk to pick up ignored state (#54) * updates sdk to pick up ignored state * handle unknown states gracefully --- .../reconciler/sdk/v1/reconciler_pb2.py | 24 +++++++++---------- .../reconciler/sdk/v1/reconciler_pb2.pyi | 8 +++++-- netbox_diode_plugin/tables.py | 5 +++- 3 files changed, 22 insertions(+), 15 deletions(-) diff --git a/netbox_diode_plugin/reconciler/sdk/v1/reconciler_pb2.py b/netbox_diode_plugin/reconciler/sdk/v1/reconciler_pb2.py index 08a5dd6..293ef8d 100644 --- a/netbox_diode_plugin/reconciler/sdk/v1/reconciler_pb2.py +++ b/netbox_diode_plugin/reconciler/sdk/v1/reconciler_pb2.py @@ -16,7 +16,7 @@ from netbox_diode_plugin.reconciler.sdk.validate import validate_pb2 as validate_dot_validate__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19\x64iode/v1/reconciler.proto\x12\x08\x64iode.v1\x1a\x17\x64iode/v1/ingester.proto\x1a\x17validate/validate.proto\"Y\n\x13IngestionDataSource\x12\x1e\n\x04name\x18\x01 \x01(\tB\n\xfa\x42\x07r\x05\x10\x01\x18\xff\x01R\x04name\x12\"\n\x07\x61pi_key\x18\x02 \x01(\tB\t\xfa\x42\x06r\x04\x10(\x18(R\x06\x61piKey\"\xab\x01\n#RetrieveIngestionDataSourcesRequest\x12\x1e\n\x04name\x18\x01 \x01(\tB\n\xfa\x42\x07r\x05\x10\x01\x18\xff\x01R\x04name\x12%\n\x08sdk_name\x18\x02 \x01(\tB\n\xfa\x42\x07r\x05\x10\x01\x18\xff\x01R\x07sdkName\x12=\n\x0bsdk_version\x18\x03 \x01(\tB\x1c\xfa\x42\x19r\x17\x32\x15^(\\d)+\\.(\\d)+\\.(\\d)+$R\nsdkVersion\"{\n$RetrieveIngestionDataSourcesResponse\x12S\n\x16ingestion_data_sources\x18\x01 \x03(\x0b\x32\x1d.diode.v1.IngestionDataSourceR\x14ingestionDataSources\"\xbe\x02\n\x0eIngestionError\x12\x18\n\x07message\x18\x01 \x01(\tR\x07message\x12\x12\n\x04\x63ode\x18\x02 \x01(\x05R\x04\x63ode\x12:\n\x07\x64\x65tails\x18\x03 \x01(\x0b\x32 .diode.v1.IngestionError.DetailsR\x07\x64\x65tails\x1a\xc1\x01\n\x07\x44\x65tails\x12\"\n\rchange_set_id\x18\x01 \x01(\tR\x0b\x63hangeSetId\x12\x16\n\x06result\x18\x02 \x01(\tR\x06result\x12>\n\x06\x65rrors\x18\x03 \x03(\x0b\x32&.diode.v1.IngestionError.Details.ErrorR\x06\x65rrors\x1a:\n\x05\x45rror\x12\x14\n\x05\x65rror\x18\x01 \x01(\tR\x05\x65rror\x12\x1b\n\tchange_id\x18\x02 \x01(\tR\x08\x63hangeId\"\x97\x01\n\x10IngestionMetrics\x12\x14\n\x05total\x18\x01 \x01(\x05R\x05total\x12\x16\n\x06queued\x18\x02 \x01(\x05R\x06queued\x12\x1e\n\nreconciled\x18\x03 \x01(\x05R\nreconciled\x12\x16\n\x06\x66\x61iled\x18\x04 \x01(\x05R\x06\x66\x61iled\x12\x1d\n\nno_changes\x18\x05 \x01(\x05R\tnoChanges\"/\n\tChangeSet\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x12\n\x04\x64\x61ta\x18\x02 \x01(\x0cR\x04\x64\x61ta\"\xcc\x03\n\x0cIngestionLog\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x1b\n\tdata_type\x18\x02 \x01(\tR\x08\x64\x61taType\x12%\n\x05state\x18\x03 \x01(\x0e\x32\x0f.diode.v1.StateR\x05state\x12\x1d\n\nrequest_id\x18\x04 \x01(\tR\trequestId\x12!\n\x0cingestion_ts\x18\x05 \x01(\x03R\x0bingestionTs\x12*\n\x11producer_app_name\x18\x06 \x01(\tR\x0fproducerAppName\x12\x30\n\x14producer_app_version\x18\x07 \x01(\tR\x12producerAppVersion\x12\x19\n\x08sdk_name\x18\x08 \x01(\tR\x07sdkName\x12\x1f\n\x0bsdk_version\x18\t \x01(\tR\nsdkVersion\x12(\n\x06\x65ntity\x18\n \x01(\x0b\x32\x10.diode.v1.EntityR\x06\x65ntity\x12.\n\x05\x65rror\x18\x0b \x01(\x0b\x32\x18.diode.v1.IngestionErrorR\x05\x65rror\x12\x32\n\nchange_set\x18\x0c \x01(\x0b\x32\x13.diode.v1.ChangeSetR\tchangeSet\"\xda\x02\n\x1cRetrieveIngestionLogsRequest\x12 \n\tpage_size\x18\x01 \x01(\x05H\x00R\x08pageSize\x88\x01\x01\x12*\n\x05state\x18\x02 \x01(\x0e\x32\x0f.diode.v1.StateH\x01R\x05state\x88\x01\x01\x12\x1b\n\tdata_type\x18\x03 \x01(\tR\x08\x64\x61taType\x12\x1d\n\nrequest_id\x18\x04 \x01(\tR\trequestId\x12,\n\x12ingestion_ts_start\x18\x05 \x01(\x03R\x10ingestionTsStart\x12(\n\x10ingestion_ts_end\x18\x06 \x01(\x03R\x0eingestionTsEnd\x12\x1d\n\npage_token\x18\x07 \x01(\tR\tpageToken\x12!\n\x0conly_metrics\x18\x08 \x01(\x08R\x0bonlyMetricsB\x0c\n\n_page_sizeB\x08\n\x06_state\"\xa9\x01\n\x1dRetrieveIngestionLogsResponse\x12*\n\x04logs\x18\x01 \x03(\x0b\x32\x16.diode.v1.IngestionLogR\x04logs\x12\x34\n\x07metrics\x18\x02 \x01(\x0b\x32\x1a.diode.v1.IngestionMetricsR\x07metrics\x12&\n\x0fnext_page_token\x18\x03 \x01(\tR\rnextPageToken*P\n\x05State\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\n\n\x06QUEUED\x10\x01\x12\x0e\n\nRECONCILED\x10\x02\x12\n\n\x06\x46\x41ILED\x10\x03\x12\x0e\n\nNO_CHANGES\x10\x04\x32\xfe\x01\n\x11ReconcilerService\x12\x7f\n\x1cRetrieveIngestionDataSources\x12-.diode.v1.RetrieveIngestionDataSourcesRequest\x1a..diode.v1.RetrieveIngestionDataSourcesResponse\"\x00\x12h\n\x15RetrieveIngestionLogs\x12&.diode.v1.RetrieveIngestionLogsRequest\x1a\'.diode.v1.RetrieveIngestionLogsResponseBDZBgithub.com/netboxlabs/diode/diode-server/gen/diode/v1/reconcilerpbb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19\x64iode/v1/reconciler.proto\x12\x08\x64iode.v1\x1a\x17\x64iode/v1/ingester.proto\x1a\x17validate/validate.proto\"Y\n\x13IngestionDataSource\x12\x1e\n\x04name\x18\x01 \x01(\tB\n\xfa\x42\x07r\x05\x10\x01\x18\xff\x01R\x04name\x12\"\n\x07\x61pi_key\x18\x02 \x01(\tB\t\xfa\x42\x06r\x04\x10(\x18(R\x06\x61piKey\"\xab\x01\n#RetrieveIngestionDataSourcesRequest\x12\x1e\n\x04name\x18\x01 \x01(\tB\n\xfa\x42\x07r\x05\x10\x01\x18\xff\x01R\x04name\x12%\n\x08sdk_name\x18\x02 \x01(\tB\n\xfa\x42\x07r\x05\x10\x01\x18\xff\x01R\x07sdkName\x12=\n\x0bsdk_version\x18\x03 \x01(\tB\x1c\xfa\x42\x19r\x17\x32\x15^(\\d)+\\.(\\d)+\\.(\\d)+$R\nsdkVersion\"{\n$RetrieveIngestionDataSourcesResponse\x12S\n\x16ingestion_data_sources\x18\x01 \x03(\x0b\x32\x1d.diode.v1.IngestionDataSourceR\x14ingestionDataSources\"\xbe\x02\n\x0eIngestionError\x12\x18\n\x07message\x18\x01 \x01(\tR\x07message\x12\x12\n\x04\x63ode\x18\x02 \x01(\x05R\x04\x63ode\x12:\n\x07\x64\x65tails\x18\x03 \x01(\x0b\x32 .diode.v1.IngestionError.DetailsR\x07\x64\x65tails\x1a\xc1\x01\n\x07\x44\x65tails\x12\"\n\rchange_set_id\x18\x01 \x01(\tR\x0b\x63hangeSetId\x12\x16\n\x06result\x18\x02 \x01(\tR\x06result\x12>\n\x06\x65rrors\x18\x03 \x03(\x0b\x32&.diode.v1.IngestionError.Details.ErrorR\x06\x65rrors\x1a:\n\x05\x45rror\x12\x14\n\x05\x65rror\x18\x01 \x01(\tR\x05\x65rror\x12\x1b\n\tchange_id\x18\x02 \x01(\tR\x08\x63hangeId\"\x97\x01\n\x10IngestionMetrics\x12\x14\n\x05total\x18\x01 \x01(\x05R\x05total\x12\x16\n\x06queued\x18\x02 \x01(\x05R\x06queued\x12\x1e\n\nreconciled\x18\x03 \x01(\x05R\nreconciled\x12\x16\n\x06\x66\x61iled\x18\x04 \x01(\x05R\x06\x66\x61iled\x12\x1d\n\nno_changes\x18\x05 \x01(\x05R\tnoChanges\"_\n\tChangeSet\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x12\n\x04\x64\x61ta\x18\x02 \x01(\x0cR\x04\x64\x61ta\x12 \n\tbranch_id\x18\x03 \x01(\tH\x00R\x08\x62ranchId\x88\x01\x01\x42\x0c\n\n_branch_id\"\xcc\x03\n\x0cIngestionLog\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x1b\n\tdata_type\x18\x02 \x01(\tR\x08\x64\x61taType\x12%\n\x05state\x18\x03 \x01(\x0e\x32\x0f.diode.v1.StateR\x05state\x12\x1d\n\nrequest_id\x18\x04 \x01(\tR\trequestId\x12!\n\x0cingestion_ts\x18\x05 \x01(\x03R\x0bingestionTs\x12*\n\x11producer_app_name\x18\x06 \x01(\tR\x0fproducerAppName\x12\x30\n\x14producer_app_version\x18\x07 \x01(\tR\x12producerAppVersion\x12\x19\n\x08sdk_name\x18\x08 \x01(\tR\x07sdkName\x12\x1f\n\x0bsdk_version\x18\t \x01(\tR\nsdkVersion\x12(\n\x06\x65ntity\x18\n \x01(\x0b\x32\x10.diode.v1.EntityR\x06\x65ntity\x12.\n\x05\x65rror\x18\x0b \x01(\x0b\x32\x18.diode.v1.IngestionErrorR\x05\x65rror\x12\x32\n\nchange_set\x18\x0c \x01(\x0b\x32\x13.diode.v1.ChangeSetR\tchangeSet\"\xda\x02\n\x1cRetrieveIngestionLogsRequest\x12 \n\tpage_size\x18\x01 \x01(\x05H\x00R\x08pageSize\x88\x01\x01\x12*\n\x05state\x18\x02 \x01(\x0e\x32\x0f.diode.v1.StateH\x01R\x05state\x88\x01\x01\x12\x1b\n\tdata_type\x18\x03 \x01(\tR\x08\x64\x61taType\x12\x1d\n\nrequest_id\x18\x04 \x01(\tR\trequestId\x12,\n\x12ingestion_ts_start\x18\x05 \x01(\x03R\x10ingestionTsStart\x12(\n\x10ingestion_ts_end\x18\x06 \x01(\x03R\x0eingestionTsEnd\x12\x1d\n\npage_token\x18\x07 \x01(\tR\tpageToken\x12!\n\x0conly_metrics\x18\x08 \x01(\x08R\x0bonlyMetricsB\x0c\n\n_page_sizeB\x08\n\x06_state\"\xa9\x01\n\x1dRetrieveIngestionLogsResponse\x12*\n\x04logs\x18\x01 \x03(\x0b\x32\x16.diode.v1.IngestionLogR\x04logs\x12\x34\n\x07metrics\x18\x02 \x01(\x0b\x32\x1a.diode.v1.IngestionMetricsR\x07metrics\x12&\n\x0fnext_page_token\x18\x03 \x01(\tR\rnextPageToken*]\n\x05State\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\n\n\x06QUEUED\x10\x01\x12\x0e\n\nRECONCILED\x10\x02\x12\n\n\x06\x46\x41ILED\x10\x03\x12\x0e\n\nNO_CHANGES\x10\x04\x12\x0b\n\x07IGNORED\x10\x05\x32\xfe\x01\n\x11ReconcilerService\x12\x7f\n\x1cRetrieveIngestionDataSources\x12-.diode.v1.RetrieveIngestionDataSourcesRequest\x1a..diode.v1.RetrieveIngestionDataSourcesResponse\"\x00\x12h\n\x15RetrieveIngestionLogs\x12&.diode.v1.RetrieveIngestionLogsRequest\x1a\'.diode.v1.RetrieveIngestionLogsResponseBDZBgithub.com/netboxlabs/diode/diode-server/gen/diode/v1/reconcilerpbb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -34,8 +34,8 @@ _globals['_RETRIEVEINGESTIONDATASOURCESREQUEST'].fields_by_name['sdk_name']._serialized_options = b'\372B\007r\005\020\001\030\377\001' _globals['_RETRIEVEINGESTIONDATASOURCESREQUEST'].fields_by_name['sdk_version']._loaded_options = None _globals['_RETRIEVEINGESTIONDATASOURCESREQUEST'].fields_by_name['sdk_version']._serialized_options = b'\372B\031r\0272\025^(\\d)+\\.(\\d)+\\.(\\d)+$' - _globals['_STATE']._serialized_start=1987 - _globals['_STATE']._serialized_end=2067 + _globals['_STATE']._serialized_start=2035 + _globals['_STATE']._serialized_end=2128 _globals['_INGESTIONDATASOURCE']._serialized_start=89 _globals['_INGESTIONDATASOURCE']._serialized_end=178 _globals['_RETRIEVEINGESTIONDATASOURCESREQUEST']._serialized_start=181 @@ -51,13 +51,13 @@ _globals['_INGESTIONMETRICS']._serialized_start=801 _globals['_INGESTIONMETRICS']._serialized_end=952 _globals['_CHANGESET']._serialized_start=954 - _globals['_CHANGESET']._serialized_end=1001 - _globals['_INGESTIONLOG']._serialized_start=1004 - _globals['_INGESTIONLOG']._serialized_end=1464 - _globals['_RETRIEVEINGESTIONLOGSREQUEST']._serialized_start=1467 - _globals['_RETRIEVEINGESTIONLOGSREQUEST']._serialized_end=1813 - _globals['_RETRIEVEINGESTIONLOGSRESPONSE']._serialized_start=1816 - _globals['_RETRIEVEINGESTIONLOGSRESPONSE']._serialized_end=1985 - _globals['_RECONCILERSERVICE']._serialized_start=2070 - _globals['_RECONCILERSERVICE']._serialized_end=2324 + _globals['_CHANGESET']._serialized_end=1049 + _globals['_INGESTIONLOG']._serialized_start=1052 + _globals['_INGESTIONLOG']._serialized_end=1512 + _globals['_RETRIEVEINGESTIONLOGSREQUEST']._serialized_start=1515 + _globals['_RETRIEVEINGESTIONLOGSREQUEST']._serialized_end=1861 + _globals['_RETRIEVEINGESTIONLOGSRESPONSE']._serialized_start=1864 + _globals['_RETRIEVEINGESTIONLOGSRESPONSE']._serialized_end=2033 + _globals['_RECONCILERSERVICE']._serialized_start=2131 + _globals['_RECONCILERSERVICE']._serialized_end=2385 # @@protoc_insertion_point(module_scope) diff --git a/netbox_diode_plugin/reconciler/sdk/v1/reconciler_pb2.pyi b/netbox_diode_plugin/reconciler/sdk/v1/reconciler_pb2.pyi index b17ab63..17d2407 100644 --- a/netbox_diode_plugin/reconciler/sdk/v1/reconciler_pb2.pyi +++ b/netbox_diode_plugin/reconciler/sdk/v1/reconciler_pb2.pyi @@ -15,11 +15,13 @@ class State(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): RECONCILED: _ClassVar[State] FAILED: _ClassVar[State] NO_CHANGES: _ClassVar[State] + IGNORED: _ClassVar[State] UNSPECIFIED: State QUEUED: State RECONCILED: State FAILED: State NO_CHANGES: State +IGNORED: State class IngestionDataSource(_message.Message): __slots__ = ("name", "api_key") @@ -86,12 +88,14 @@ class IngestionMetrics(_message.Message): def __init__(self, total: _Optional[int] = ..., queued: _Optional[int] = ..., reconciled: _Optional[int] = ..., failed: _Optional[int] = ..., no_changes: _Optional[int] = ...) -> None: ... class ChangeSet(_message.Message): - __slots__ = ("id", "data") + __slots__ = ("id", "data", "branch_id") ID_FIELD_NUMBER: _ClassVar[int] DATA_FIELD_NUMBER: _ClassVar[int] + BRANCH_ID_FIELD_NUMBER: _ClassVar[int] id: str data: bytes - def __init__(self, id: _Optional[str] = ..., data: _Optional[bytes] = ...) -> None: ... + branch_id: str + def __init__(self, id: _Optional[str] = ..., data: _Optional[bytes] = ..., branch_id: _Optional[str] = ...) -> None: ... class IngestionLog(_message.Message): __slots__ = ("id", "data_type", "state", "request_id", "ingestion_ts", "producer_app_name", "producer_app_version", "sdk_name", "sdk_version", "entity", "error", "change_set") diff --git a/netbox_diode_plugin/tables.py b/netbox_diode_plugin/tables.py index 05b20b3..0d8869d 100644 --- a/netbox_diode_plugin/tables.py +++ b/netbox_diode_plugin/tables.py @@ -31,7 +31,10 @@ class IngestionStateColumn(tables.Column): def render(self, value): """Renders the ingestion state as a human-readable string.""" if value: - state_name = reconciler_pb2.State.Name(value) + try: + state_name = reconciler_pb2.State.Name(value) + except ValueError: + state_name = reconciler_pb2.State.Name(reconciler_pb2.State.UNSPECIFIED) return " ".join(state_name.title().split("_")) return None From 0bfbb7762d2361fc908d098545c5773e66fa6030 Mon Sep 17 00:00:00 2001 From: Luke Tucker <64618+ltucker@users.noreply.github.com> Date: Wed, 22 Jan 2025 11:04:21 -0500 Subject: [PATCH 10/52] chore: update protobufs (#55) * chore: security improvements (#53) * chore: add safe redirect to login Signed-off-by: Michal Fiedorowicz * chore: gha - add missing permissions Signed-off-by: Michal Fiedorowicz * chore: gha - pin actions to commit hashes Signed-off-by: Michal Fiedorowicz * tidy up Signed-off-by: Michal Fiedorowicz --------- Signed-off-by: Michal Fiedorowicz * update protobufs * update constants in tests --------- Signed-off-by: Michal Fiedorowicz Co-authored-by: Michal Fiedorowicz --- .github/workflows/lint-tests.yml | 2 +- .github/workflows/manifest-modified.yaml | 3 + .github/workflows/release.yaml | 10 +- .../reconciler/sdk/v1/reconciler_pb2.py | 78 +++++----- .../reconciler/sdk/v1/reconciler_pb2.pyi | 144 +++++++++++++----- .../reconciler/sdk/v1/reconciler_pb2_grpc.py | 78 +++++++--- netbox_diode_plugin/tests/test_tables.py | 4 +- netbox_diode_plugin/tests/test_views.py | 6 +- netbox_diode_plugin/views.py | 25 ++- 9 files changed, 240 insertions(+), 110 deletions(-) diff --git a/.github/workflows/lint-tests.yml b/.github/workflows/lint-tests.yml index 572872b..199b0d5 100644 --- a/.github/workflows/lint-tests.yml +++ b/.github/workflows/lint-tests.yml @@ -47,7 +47,7 @@ jobs: run: | make docker-compose-netbox-plugin-test-cover - name: Coverage comment - uses: orgoro/coverage@v3.2 + uses: orgoro/coverage@3f13a558c5af7376496aa4848bf0224aead366ac # v3.2 with: coverageFile: ./docker/coverage/report.xml token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/manifest-modified.yaml b/.github/workflows/manifest-modified.yaml index 28bd89e..bd33623 100644 --- a/.github/workflows/manifest-modified.yaml +++ b/.github/workflows/manifest-modified.yaml @@ -10,6 +10,9 @@ concurrency: group: ${{ github.workflow }} cancel-in-progress: false +permissions: + contents: read + jobs: manifest-modified: uses: netboxlabs/public-workflows/.github/workflows/reusable-plugin-manifest-modified.yml@release diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 7b563dc..621dd4f 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -42,7 +42,7 @@ jobs: with: node-version: "lts/*" - name: Write package.json - uses: DamianReeves/write-file-action@master + uses: DamianReeves/write-file-action@6929a9a6d1807689191dcc8bbe62b54d70a32b42 # v1.3 with: path: ./package.json write-mode: overwrite @@ -56,7 +56,7 @@ jobs: } } - name: Write .releaserc.json - uses: DamianReeves/write-file-action@master + uses: DamianReeves/write-file-action@6929a9a6d1807689191dcc8bbe62b54d70a32b42 # v1.3 with: path: ./.releaserc.json write-mode: overwrite @@ -166,7 +166,7 @@ jobs: retention-days: 30 if-no-files-found: error - name: Publish release distributions to PyPI - uses: pypa/gh-action-pypi-publish@release/v1 + uses: pypa/gh-action-pypi-publish@67339c736fd9354cd4f8cb0b744f2b82a74b5c70 # v.1.12.3 with: packages-dir: dist @@ -181,7 +181,7 @@ jobs: with: node-version: "lts/*" - name: Write package.json - uses: DamianReeves/write-file-action@master + uses: DamianReeves/write-file-action@6929a9a6d1807689191dcc8bbe62b54d70a32b42 # v1.3 with: path: ./package.json write-mode: overwrite @@ -195,7 +195,7 @@ jobs: } } - name: Write .releaserc.json - uses: DamianReeves/write-file-action@master + uses: DamianReeves/write-file-action@6929a9a6d1807689191dcc8bbe62b54d70a32b42 # v1.3 with: path: ./.releaserc.json write-mode: overwrite diff --git a/netbox_diode_plugin/reconciler/sdk/v1/reconciler_pb2.py b/netbox_diode_plugin/reconciler/sdk/v1/reconciler_pb2.py index 293ef8d..02d4c58 100644 --- a/netbox_diode_plugin/reconciler/sdk/v1/reconciler_pb2.py +++ b/netbox_diode_plugin/reconciler/sdk/v1/reconciler_pb2.py @@ -16,7 +16,7 @@ from netbox_diode_plugin.reconciler.sdk.validate import validate_pb2 as validate_dot_validate__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19\x64iode/v1/reconciler.proto\x12\x08\x64iode.v1\x1a\x17\x64iode/v1/ingester.proto\x1a\x17validate/validate.proto\"Y\n\x13IngestionDataSource\x12\x1e\n\x04name\x18\x01 \x01(\tB\n\xfa\x42\x07r\x05\x10\x01\x18\xff\x01R\x04name\x12\"\n\x07\x61pi_key\x18\x02 \x01(\tB\t\xfa\x42\x06r\x04\x10(\x18(R\x06\x61piKey\"\xab\x01\n#RetrieveIngestionDataSourcesRequest\x12\x1e\n\x04name\x18\x01 \x01(\tB\n\xfa\x42\x07r\x05\x10\x01\x18\xff\x01R\x04name\x12%\n\x08sdk_name\x18\x02 \x01(\tB\n\xfa\x42\x07r\x05\x10\x01\x18\xff\x01R\x07sdkName\x12=\n\x0bsdk_version\x18\x03 \x01(\tB\x1c\xfa\x42\x19r\x17\x32\x15^(\\d)+\\.(\\d)+\\.(\\d)+$R\nsdkVersion\"{\n$RetrieveIngestionDataSourcesResponse\x12S\n\x16ingestion_data_sources\x18\x01 \x03(\x0b\x32\x1d.diode.v1.IngestionDataSourceR\x14ingestionDataSources\"\xbe\x02\n\x0eIngestionError\x12\x18\n\x07message\x18\x01 \x01(\tR\x07message\x12\x12\n\x04\x63ode\x18\x02 \x01(\x05R\x04\x63ode\x12:\n\x07\x64\x65tails\x18\x03 \x01(\x0b\x32 .diode.v1.IngestionError.DetailsR\x07\x64\x65tails\x1a\xc1\x01\n\x07\x44\x65tails\x12\"\n\rchange_set_id\x18\x01 \x01(\tR\x0b\x63hangeSetId\x12\x16\n\x06result\x18\x02 \x01(\tR\x06result\x12>\n\x06\x65rrors\x18\x03 \x03(\x0b\x32&.diode.v1.IngestionError.Details.ErrorR\x06\x65rrors\x1a:\n\x05\x45rror\x12\x14\n\x05\x65rror\x18\x01 \x01(\tR\x05\x65rror\x12\x1b\n\tchange_id\x18\x02 \x01(\tR\x08\x63hangeId\"\x97\x01\n\x10IngestionMetrics\x12\x14\n\x05total\x18\x01 \x01(\x05R\x05total\x12\x16\n\x06queued\x18\x02 \x01(\x05R\x06queued\x12\x1e\n\nreconciled\x18\x03 \x01(\x05R\nreconciled\x12\x16\n\x06\x66\x61iled\x18\x04 \x01(\x05R\x06\x66\x61iled\x12\x1d\n\nno_changes\x18\x05 \x01(\x05R\tnoChanges\"_\n\tChangeSet\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x12\n\x04\x64\x61ta\x18\x02 \x01(\x0cR\x04\x64\x61ta\x12 \n\tbranch_id\x18\x03 \x01(\tH\x00R\x08\x62ranchId\x88\x01\x01\x42\x0c\n\n_branch_id\"\xcc\x03\n\x0cIngestionLog\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x1b\n\tdata_type\x18\x02 \x01(\tR\x08\x64\x61taType\x12%\n\x05state\x18\x03 \x01(\x0e\x32\x0f.diode.v1.StateR\x05state\x12\x1d\n\nrequest_id\x18\x04 \x01(\tR\trequestId\x12!\n\x0cingestion_ts\x18\x05 \x01(\x03R\x0bingestionTs\x12*\n\x11producer_app_name\x18\x06 \x01(\tR\x0fproducerAppName\x12\x30\n\x14producer_app_version\x18\x07 \x01(\tR\x12producerAppVersion\x12\x19\n\x08sdk_name\x18\x08 \x01(\tR\x07sdkName\x12\x1f\n\x0bsdk_version\x18\t \x01(\tR\nsdkVersion\x12(\n\x06\x65ntity\x18\n \x01(\x0b\x32\x10.diode.v1.EntityR\x06\x65ntity\x12.\n\x05\x65rror\x18\x0b \x01(\x0b\x32\x18.diode.v1.IngestionErrorR\x05\x65rror\x12\x32\n\nchange_set\x18\x0c \x01(\x0b\x32\x13.diode.v1.ChangeSetR\tchangeSet\"\xda\x02\n\x1cRetrieveIngestionLogsRequest\x12 \n\tpage_size\x18\x01 \x01(\x05H\x00R\x08pageSize\x88\x01\x01\x12*\n\x05state\x18\x02 \x01(\x0e\x32\x0f.diode.v1.StateH\x01R\x05state\x88\x01\x01\x12\x1b\n\tdata_type\x18\x03 \x01(\tR\x08\x64\x61taType\x12\x1d\n\nrequest_id\x18\x04 \x01(\tR\trequestId\x12,\n\x12ingestion_ts_start\x18\x05 \x01(\x03R\x10ingestionTsStart\x12(\n\x10ingestion_ts_end\x18\x06 \x01(\x03R\x0eingestionTsEnd\x12\x1d\n\npage_token\x18\x07 \x01(\tR\tpageToken\x12!\n\x0conly_metrics\x18\x08 \x01(\x08R\x0bonlyMetricsB\x0c\n\n_page_sizeB\x08\n\x06_state\"\xa9\x01\n\x1dRetrieveIngestionLogsResponse\x12*\n\x04logs\x18\x01 \x03(\x0b\x32\x16.diode.v1.IngestionLogR\x04logs\x12\x34\n\x07metrics\x18\x02 \x01(\x0b\x32\x1a.diode.v1.IngestionMetricsR\x07metrics\x12&\n\x0fnext_page_token\x18\x03 \x01(\tR\rnextPageToken*]\n\x05State\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\n\n\x06QUEUED\x10\x01\x12\x0e\n\nRECONCILED\x10\x02\x12\n\n\x06\x46\x41ILED\x10\x03\x12\x0e\n\nNO_CHANGES\x10\x04\x12\x0b\n\x07IGNORED\x10\x05\x32\xfe\x01\n\x11ReconcilerService\x12\x7f\n\x1cRetrieveIngestionDataSources\x12-.diode.v1.RetrieveIngestionDataSourcesRequest\x1a..diode.v1.RetrieveIngestionDataSourcesResponse\"\x00\x12h\n\x15RetrieveIngestionLogs\x12&.diode.v1.RetrieveIngestionLogsRequest\x1a\'.diode.v1.RetrieveIngestionLogsResponseBDZBgithub.com/netboxlabs/diode/diode-server/gen/diode/v1/reconcilerpbb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19\x64iode/v1/reconciler.proto\x12\x08\x64iode.v1\x1a\x17\x64iode/v1/ingester.proto\x1a\x17validate/validate.proto\"\xbe\x02\n\x0eIngestionError\x12\x18\n\x07message\x18\x01 \x01(\tR\x07message\x12\x12\n\x04\x63ode\x18\x02 \x01(\x05R\x04\x63ode\x12:\n\x07\x64\x65tails\x18\x03 \x01(\x0b\x32 .diode.v1.IngestionError.DetailsR\x07\x64\x65tails\x1a\xc1\x01\n\x07\x44\x65tails\x12\"\n\rchange_set_id\x18\x01 \x01(\tR\x0b\x63hangeSetId\x12\x16\n\x06result\x18\x02 \x01(\tR\x06result\x12>\n\x06\x65rrors\x18\x03 \x03(\x0b\x32&.diode.v1.IngestionError.Details.ErrorR\x06\x65rrors\x1a:\n\x05\x45rror\x12\x14\n\x05\x65rror\x18\x01 \x01(\tR\x05\x65rror\x12\x1b\n\tchange_id\x18\x02 \x01(\tR\x08\x63hangeId\"\x97\x01\n\x10IngestionMetrics\x12\x14\n\x05total\x18\x01 \x01(\x05R\x05total\x12\x16\n\x06queued\x18\x02 \x01(\x05R\x06queued\x12\x1e\n\nreconciled\x18\x03 \x01(\x05R\nreconciled\x12\x16\n\x06\x66\x61iled\x18\x04 \x01(\x05R\x06\x66\x61iled\x12\x1d\n\nno_changes\x18\x05 \x01(\x05R\tnoChanges\"\x9e\x01\n\tChangeSet\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x12\n\x04\x64\x61ta\x18\x02 \x01(\x0cR\x04\x64\x61ta\x12 \n\tbranch_id\x18\x03 \x01(\tH\x00R\x08\x62ranchId\x88\x01\x01\x12*\n\x0e\x64\x65viation_name\x18\x04 \x01(\tH\x01R\rdeviationName\x88\x01\x01\x42\x0c\n\n_branch_idB\x11\n\x0f_deviation_name\"\xf1\x03\n\x0cIngestionLog\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x1f\n\tdata_type\x18\x02 \x01(\tB\x02\x18\x01R\x08\x64\x61taType\x12%\n\x05state\x18\x03 \x01(\x0e\x32\x0f.diode.v1.StateR\x05state\x12\x1d\n\nrequest_id\x18\x04 \x01(\tR\trequestId\x12!\n\x0cingestion_ts\x18\x05 \x01(\x03R\x0bingestionTs\x12*\n\x11producer_app_name\x18\x06 \x01(\tR\x0fproducerAppName\x12\x30\n\x14producer_app_version\x18\x07 \x01(\tR\x12producerAppVersion\x12\x19\n\x08sdk_name\x18\x08 \x01(\tR\x07sdkName\x12\x1f\n\x0bsdk_version\x18\t \x01(\tR\nsdkVersion\x12(\n\x06\x65ntity\x18\n \x01(\x0b\x32\x10.diode.v1.EntityR\x06\x65ntity\x12.\n\x05\x65rror\x18\x0b \x01(\x0b\x32\x18.diode.v1.IngestionErrorR\x05\x65rror\x12\x32\n\nchange_set\x18\x0c \x01(\x0b\x32\x13.diode.v1.ChangeSetR\tchangeSet\x12\x1f\n\x0bobject_type\x18\r \x01(\tR\nobjectType\"\xff\x02\n\x1cRetrieveIngestionLogsRequest\x12 \n\tpage_size\x18\x01 \x01(\x05H\x00R\x08pageSize\x88\x01\x01\x12*\n\x05state\x18\x02 \x01(\x0e\x32\x0f.diode.v1.StateH\x01R\x05state\x88\x01\x01\x12\x1f\n\tdata_type\x18\x03 \x01(\tB\x02\x18\x01R\x08\x64\x61taType\x12\x1d\n\nrequest_id\x18\x04 \x01(\tR\trequestId\x12,\n\x12ingestion_ts_start\x18\x05 \x01(\x03R\x10ingestionTsStart\x12(\n\x10ingestion_ts_end\x18\x06 \x01(\x03R\x0eingestionTsEnd\x12\x1d\n\npage_token\x18\x07 \x01(\tR\tpageToken\x12!\n\x0conly_metrics\x18\x08 \x01(\x08R\x0bonlyMetrics\x12\x1f\n\x0bobject_type\x18\t \x01(\tR\nobjectTypeB\x0c\n\n_page_sizeB\x08\n\x06_state\"\xa9\x01\n\x1dRetrieveIngestionLogsResponse\x12*\n\x04logs\x18\x01 \x03(\x0b\x32\x16.diode.v1.IngestionLogR\x04logs\x12\x34\n\x07metrics\x18\x02 \x01(\x0b\x32\x1a.diode.v1.IngestionMetricsR\x07metrics\x12&\n\x0fnext_page_token\x18\x03 \x01(\tR\rnextPageToken\"\xbb\x02\n\x19RetrieveDeviationsRequest\x12 \n\tpage_size\x18\x01 \x01(\x05H\x00R\x08pageSize\x88\x01\x01\x12\x1d\n\npage_token\x18\x02 \x01(\tR\tpageToken\x12,\n\x12ingestion_ts_start\x18\x03 \x01(\x03R\x10ingestionTsStart\x12(\n\x10ingestion_ts_end\x18\x04 \x01(\x03R\x0eingestionTsEnd\x12%\n\x05state\x18\x05 \x03(\x0e\x32\x0f.diode.v1.StateR\x05state\x12\x1f\n\x0bobject_type\x18\x06 \x03(\tR\nobjectType\x12\x1b\n\tbranch_id\x18\x07 \x03(\tR\x08\x62ranchId\x12\x12\n\x04site\x18\x08 \x03(\tR\x04siteB\x0c\n\n_page_size\">\n\x0e\x44\x65viationError\x12\x18\n\x07message\x18\x01 \x01(\tR\x07message\x12\x12\n\x04\x63ode\x18\x02 \x01(\x05R\x04\x63ode\"\xba\x01\n\x06\x43hange\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x1f\n\x0bobject_type\x18\x02 \x01(\tR\nobjectType\x12\x30\n\x14object_primary_value\x18\x03 \x01(\tR\x12objectPrimaryValue\x12\x1f\n\x0b\x63hange_type\x18\x04 \x01(\tR\nchangeType\x12\x16\n\x06\x62\x65\x66ore\x18\x05 \x01(\x0cR\x06\x62\x65\x66ore\x12\x14\n\x05\x61\x66ter\x18\x06 \x01(\x0cR\x05\x61\x66ter\"\x9f\x03\n\tDeviation\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12!\n\x0cingestion_ts\x18\x02 \x01(\x03R\x0bingestionTs\x12$\n\x0elast_update_ts\x18\x03 \x01(\x03R\x0clastUpdateTs\x12\x12\n\x04name\x18\x04 \x01(\tR\x04name\x12\x16\n\x06source\x18\x05 \x01(\tR\x06source\x12%\n\x05state\x18\x06 \x01(\x0e\x32\x0f.diode.v1.StateR\x05state\x12\x1f\n\x0bobject_type\x18\x07 \x01(\tR\nobjectType\x12 \n\tbranch_id\x18\x08 \x01(\tH\x00R\x08\x62ranchId\x88\x01\x01\x12\x39\n\x0fingested_entity\x18\t \x01(\x0b\x32\x10.diode.v1.EntityR\x0eingestedEntity\x12.\n\x05\x65rror\x18\n \x01(\x0b\x32\x18.diode.v1.DeviationErrorR\x05\x65rror\x12*\n\x07\x63hanges\x18\x0b \x03(\x0b\x32\x10.diode.v1.ChangeR\x07\x63hangesB\x0c\n\n_branch_id\"y\n\x1aRetrieveDeviationsResponse\x12\x33\n\ndeviations\x18\x01 \x03(\x0b\x32\x13.diode.v1.DeviationR\ndeviations\x12&\n\x0fnext_page_token\x18\x02 \x01(\tR\rnextPageToken\".\n\x1cRetrieveDeviationByIDRequest\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\"R\n\x1dRetrieveDeviationByIDResponse\x12\x31\n\tdeviation\x18\x01 \x01(\x0b\x32\x13.diode.v1.DeviationR\tdeviation*w\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\n\n\x06QUEUED\x10\x01\x12\x08\n\x04OPEN\x10\x02\x12\x0b\n\x07\x41PPLIED\x10\x03\x12\n\n\x06\x46\x41ILED\x10\x04\x12\x0e\n\nNO_CHANGES\x10\x05\x12\x0b\n\x07IGNORED\x10\x06\x12\x0b\n\x07\x45RRORED\x10\x07\x32\xcd\x02\n\x11ReconcilerService\x12m\n\x15RetrieveIngestionLogs\x12&.diode.v1.RetrieveIngestionLogsRequest\x1a\'.diode.v1.RetrieveIngestionLogsResponse\"\x03\x88\x02\x01\x12_\n\x12RetrieveDeviations\x12#.diode.v1.RetrieveDeviationsRequest\x1a$.diode.v1.RetrieveDeviationsResponse\x12h\n\x15RetrieveDeviationByID\x12&.diode.v1.RetrieveDeviationByIDRequest\x1a\'.diode.v1.RetrieveDeviationByIDResponseBDZBgithub.com/netboxlabs/diode/diode-server/gen/diode/v1/reconcilerpbb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -24,40 +24,44 @@ if not _descriptor._USE_C_DESCRIPTORS: _globals['DESCRIPTOR']._loaded_options = None _globals['DESCRIPTOR']._serialized_options = b'ZBgithub.com/netboxlabs/diode/diode-server/gen/diode/v1/reconcilerpb' - _globals['_INGESTIONDATASOURCE'].fields_by_name['name']._loaded_options = None - _globals['_INGESTIONDATASOURCE'].fields_by_name['name']._serialized_options = b'\372B\007r\005\020\001\030\377\001' - _globals['_INGESTIONDATASOURCE'].fields_by_name['api_key']._loaded_options = None - _globals['_INGESTIONDATASOURCE'].fields_by_name['api_key']._serialized_options = b'\372B\006r\004\020(\030(' - _globals['_RETRIEVEINGESTIONDATASOURCESREQUEST'].fields_by_name['name']._loaded_options = None - _globals['_RETRIEVEINGESTIONDATASOURCESREQUEST'].fields_by_name['name']._serialized_options = b'\372B\007r\005\020\001\030\377\001' - _globals['_RETRIEVEINGESTIONDATASOURCESREQUEST'].fields_by_name['sdk_name']._loaded_options = None - _globals['_RETRIEVEINGESTIONDATASOURCESREQUEST'].fields_by_name['sdk_name']._serialized_options = b'\372B\007r\005\020\001\030\377\001' - _globals['_RETRIEVEINGESTIONDATASOURCESREQUEST'].fields_by_name['sdk_version']._loaded_options = None - _globals['_RETRIEVEINGESTIONDATASOURCESREQUEST'].fields_by_name['sdk_version']._serialized_options = b'\372B\031r\0272\025^(\\d)+\\.(\\d)+\\.(\\d)+$' - _globals['_STATE']._serialized_start=2035 - _globals['_STATE']._serialized_end=2128 - _globals['_INGESTIONDATASOURCE']._serialized_start=89 - _globals['_INGESTIONDATASOURCE']._serialized_end=178 - _globals['_RETRIEVEINGESTIONDATASOURCESREQUEST']._serialized_start=181 - _globals['_RETRIEVEINGESTIONDATASOURCESREQUEST']._serialized_end=352 - _globals['_RETRIEVEINGESTIONDATASOURCESRESPONSE']._serialized_start=354 - _globals['_RETRIEVEINGESTIONDATASOURCESRESPONSE']._serialized_end=477 - _globals['_INGESTIONERROR']._serialized_start=480 - _globals['_INGESTIONERROR']._serialized_end=798 - _globals['_INGESTIONERROR_DETAILS']._serialized_start=605 - _globals['_INGESTIONERROR_DETAILS']._serialized_end=798 - _globals['_INGESTIONERROR_DETAILS_ERROR']._serialized_start=740 - _globals['_INGESTIONERROR_DETAILS_ERROR']._serialized_end=798 - _globals['_INGESTIONMETRICS']._serialized_start=801 - _globals['_INGESTIONMETRICS']._serialized_end=952 - _globals['_CHANGESET']._serialized_start=954 - _globals['_CHANGESET']._serialized_end=1049 - _globals['_INGESTIONLOG']._serialized_start=1052 - _globals['_INGESTIONLOG']._serialized_end=1512 - _globals['_RETRIEVEINGESTIONLOGSREQUEST']._serialized_start=1515 - _globals['_RETRIEVEINGESTIONLOGSREQUEST']._serialized_end=1861 - _globals['_RETRIEVEINGESTIONLOGSRESPONSE']._serialized_start=1864 - _globals['_RETRIEVEINGESTIONLOGSRESPONSE']._serialized_end=2033 - _globals['_RECONCILERSERVICE']._serialized_start=2131 - _globals['_RECONCILERSERVICE']._serialized_end=2385 + _globals['_INGESTIONLOG'].fields_by_name['data_type']._loaded_options = None + _globals['_INGESTIONLOG'].fields_by_name['data_type']._serialized_options = b'\030\001' + _globals['_RETRIEVEINGESTIONLOGSREQUEST'].fields_by_name['data_type']._loaded_options = None + _globals['_RETRIEVEINGESTIONLOGSREQUEST'].fields_by_name['data_type']._serialized_options = b'\030\001' + _globals['_RECONCILERSERVICE'].methods_by_name['RetrieveIngestionLogs']._loaded_options = None + _globals['_RECONCILERSERVICE'].methods_by_name['RetrieveIngestionLogs']._serialized_options = b'\210\002\001' + _globals['_STATE']._serialized_start=3027 + _globals['_STATE']._serialized_end=3146 + _globals['_INGESTIONERROR']._serialized_start=90 + _globals['_INGESTIONERROR']._serialized_end=408 + _globals['_INGESTIONERROR_DETAILS']._serialized_start=215 + _globals['_INGESTIONERROR_DETAILS']._serialized_end=408 + _globals['_INGESTIONERROR_DETAILS_ERROR']._serialized_start=350 + _globals['_INGESTIONERROR_DETAILS_ERROR']._serialized_end=408 + _globals['_INGESTIONMETRICS']._serialized_start=411 + _globals['_INGESTIONMETRICS']._serialized_end=562 + _globals['_CHANGESET']._serialized_start=565 + _globals['_CHANGESET']._serialized_end=723 + _globals['_INGESTIONLOG']._serialized_start=726 + _globals['_INGESTIONLOG']._serialized_end=1223 + _globals['_RETRIEVEINGESTIONLOGSREQUEST']._serialized_start=1226 + _globals['_RETRIEVEINGESTIONLOGSREQUEST']._serialized_end=1609 + _globals['_RETRIEVEINGESTIONLOGSRESPONSE']._serialized_start=1612 + _globals['_RETRIEVEINGESTIONLOGSRESPONSE']._serialized_end=1781 + _globals['_RETRIEVEDEVIATIONSREQUEST']._serialized_start=1784 + _globals['_RETRIEVEDEVIATIONSREQUEST']._serialized_end=2099 + _globals['_DEVIATIONERROR']._serialized_start=2101 + _globals['_DEVIATIONERROR']._serialized_end=2163 + _globals['_CHANGE']._serialized_start=2166 + _globals['_CHANGE']._serialized_end=2352 + _globals['_DEVIATION']._serialized_start=2355 + _globals['_DEVIATION']._serialized_end=2770 + _globals['_RETRIEVEDEVIATIONSRESPONSE']._serialized_start=2772 + _globals['_RETRIEVEDEVIATIONSRESPONSE']._serialized_end=2893 + _globals['_RETRIEVEDEVIATIONBYIDREQUEST']._serialized_start=2895 + _globals['_RETRIEVEDEVIATIONBYIDREQUEST']._serialized_end=2941 + _globals['_RETRIEVEDEVIATIONBYIDRESPONSE']._serialized_start=2943 + _globals['_RETRIEVEDEVIATIONBYIDRESPONSE']._serialized_end=3025 + _globals['_RECONCILERSERVICE']._serialized_start=3149 + _globals['_RECONCILERSERVICE']._serialized_end=3482 # @@protoc_insertion_point(module_scope) diff --git a/netbox_diode_plugin/reconciler/sdk/v1/reconciler_pb2.pyi b/netbox_diode_plugin/reconciler/sdk/v1/reconciler_pb2.pyi index 17d2407..60f7cb6 100644 --- a/netbox_diode_plugin/reconciler/sdk/v1/reconciler_pb2.pyi +++ b/netbox_diode_plugin/reconciler/sdk/v1/reconciler_pb2.pyi @@ -10,42 +10,22 @@ DESCRIPTOR: _descriptor.FileDescriptor class State(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): __slots__ = () - UNSPECIFIED: _ClassVar[State] + STATE_UNSPECIFIED: _ClassVar[State] QUEUED: _ClassVar[State] - RECONCILED: _ClassVar[State] + OPEN: _ClassVar[State] + APPLIED: _ClassVar[State] FAILED: _ClassVar[State] NO_CHANGES: _ClassVar[State] IGNORED: _ClassVar[State] -UNSPECIFIED: State + ERRORED: _ClassVar[State] +STATE_UNSPECIFIED: State QUEUED: State -RECONCILED: State +OPEN: State +APPLIED: State FAILED: State NO_CHANGES: State IGNORED: State - -class IngestionDataSource(_message.Message): - __slots__ = ("name", "api_key") - NAME_FIELD_NUMBER: _ClassVar[int] - API_KEY_FIELD_NUMBER: _ClassVar[int] - name: str - api_key: str - def __init__(self, name: _Optional[str] = ..., api_key: _Optional[str] = ...) -> None: ... - -class RetrieveIngestionDataSourcesRequest(_message.Message): - __slots__ = ("name", "sdk_name", "sdk_version") - NAME_FIELD_NUMBER: _ClassVar[int] - SDK_NAME_FIELD_NUMBER: _ClassVar[int] - SDK_VERSION_FIELD_NUMBER: _ClassVar[int] - name: str - sdk_name: str - sdk_version: str - def __init__(self, name: _Optional[str] = ..., sdk_name: _Optional[str] = ..., sdk_version: _Optional[str] = ...) -> None: ... - -class RetrieveIngestionDataSourcesResponse(_message.Message): - __slots__ = ("ingestion_data_sources",) - INGESTION_DATA_SOURCES_FIELD_NUMBER: _ClassVar[int] - ingestion_data_sources: _containers.RepeatedCompositeFieldContainer[IngestionDataSource] - def __init__(self, ingestion_data_sources: _Optional[_Iterable[_Union[IngestionDataSource, _Mapping]]] = ...) -> None: ... +ERRORED: State class IngestionError(_message.Message): __slots__ = ("message", "code", "details") @@ -88,17 +68,19 @@ class IngestionMetrics(_message.Message): def __init__(self, total: _Optional[int] = ..., queued: _Optional[int] = ..., reconciled: _Optional[int] = ..., failed: _Optional[int] = ..., no_changes: _Optional[int] = ...) -> None: ... class ChangeSet(_message.Message): - __slots__ = ("id", "data", "branch_id") + __slots__ = ("id", "data", "branch_id", "deviation_name") ID_FIELD_NUMBER: _ClassVar[int] DATA_FIELD_NUMBER: _ClassVar[int] BRANCH_ID_FIELD_NUMBER: _ClassVar[int] + DEVIATION_NAME_FIELD_NUMBER: _ClassVar[int] id: str data: bytes branch_id: str - def __init__(self, id: _Optional[str] = ..., data: _Optional[bytes] = ..., branch_id: _Optional[str] = ...) -> None: ... + deviation_name: str + def __init__(self, id: _Optional[str] = ..., data: _Optional[bytes] = ..., branch_id: _Optional[str] = ..., deviation_name: _Optional[str] = ...) -> None: ... class IngestionLog(_message.Message): - __slots__ = ("id", "data_type", "state", "request_id", "ingestion_ts", "producer_app_name", "producer_app_version", "sdk_name", "sdk_version", "entity", "error", "change_set") + __slots__ = ("id", "data_type", "state", "request_id", "ingestion_ts", "producer_app_name", "producer_app_version", "sdk_name", "sdk_version", "entity", "error", "change_set", "object_type") ID_FIELD_NUMBER: _ClassVar[int] DATA_TYPE_FIELD_NUMBER: _ClassVar[int] STATE_FIELD_NUMBER: _ClassVar[int] @@ -111,6 +93,7 @@ class IngestionLog(_message.Message): ENTITY_FIELD_NUMBER: _ClassVar[int] ERROR_FIELD_NUMBER: _ClassVar[int] CHANGE_SET_FIELD_NUMBER: _ClassVar[int] + OBJECT_TYPE_FIELD_NUMBER: _ClassVar[int] id: str data_type: str state: State @@ -123,10 +106,11 @@ class IngestionLog(_message.Message): entity: _ingester_pb2.Entity error: IngestionError change_set: ChangeSet - def __init__(self, id: _Optional[str] = ..., data_type: _Optional[str] = ..., state: _Optional[_Union[State, str]] = ..., request_id: _Optional[str] = ..., ingestion_ts: _Optional[int] = ..., producer_app_name: _Optional[str] = ..., producer_app_version: _Optional[str] = ..., sdk_name: _Optional[str] = ..., sdk_version: _Optional[str] = ..., entity: _Optional[_Union[_ingester_pb2.Entity, _Mapping]] = ..., error: _Optional[_Union[IngestionError, _Mapping]] = ..., change_set: _Optional[_Union[ChangeSet, _Mapping]] = ...) -> None: ... + object_type: str + def __init__(self, id: _Optional[str] = ..., data_type: _Optional[str] = ..., state: _Optional[_Union[State, str]] = ..., request_id: _Optional[str] = ..., ingestion_ts: _Optional[int] = ..., producer_app_name: _Optional[str] = ..., producer_app_version: _Optional[str] = ..., sdk_name: _Optional[str] = ..., sdk_version: _Optional[str] = ..., entity: _Optional[_Union[_ingester_pb2.Entity, _Mapping]] = ..., error: _Optional[_Union[IngestionError, _Mapping]] = ..., change_set: _Optional[_Union[ChangeSet, _Mapping]] = ..., object_type: _Optional[str] = ...) -> None: ... class RetrieveIngestionLogsRequest(_message.Message): - __slots__ = ("page_size", "state", "data_type", "request_id", "ingestion_ts_start", "ingestion_ts_end", "page_token", "only_metrics") + __slots__ = ("page_size", "state", "data_type", "request_id", "ingestion_ts_start", "ingestion_ts_end", "page_token", "only_metrics", "object_type") PAGE_SIZE_FIELD_NUMBER: _ClassVar[int] STATE_FIELD_NUMBER: _ClassVar[int] DATA_TYPE_FIELD_NUMBER: _ClassVar[int] @@ -135,6 +119,7 @@ class RetrieveIngestionLogsRequest(_message.Message): INGESTION_TS_END_FIELD_NUMBER: _ClassVar[int] PAGE_TOKEN_FIELD_NUMBER: _ClassVar[int] ONLY_METRICS_FIELD_NUMBER: _ClassVar[int] + OBJECT_TYPE_FIELD_NUMBER: _ClassVar[int] page_size: int state: State data_type: str @@ -143,7 +128,8 @@ class RetrieveIngestionLogsRequest(_message.Message): ingestion_ts_end: int page_token: str only_metrics: bool - def __init__(self, page_size: _Optional[int] = ..., state: _Optional[_Union[State, str]] = ..., data_type: _Optional[str] = ..., request_id: _Optional[str] = ..., ingestion_ts_start: _Optional[int] = ..., ingestion_ts_end: _Optional[int] = ..., page_token: _Optional[str] = ..., only_metrics: bool = ...) -> None: ... + object_type: str + def __init__(self, page_size: _Optional[int] = ..., state: _Optional[_Union[State, str]] = ..., data_type: _Optional[str] = ..., request_id: _Optional[str] = ..., ingestion_ts_start: _Optional[int] = ..., ingestion_ts_end: _Optional[int] = ..., page_token: _Optional[str] = ..., only_metrics: bool = ..., object_type: _Optional[str] = ...) -> None: ... class RetrieveIngestionLogsResponse(_message.Message): __slots__ = ("logs", "metrics", "next_page_token") @@ -154,3 +140,93 @@ class RetrieveIngestionLogsResponse(_message.Message): metrics: IngestionMetrics next_page_token: str def __init__(self, logs: _Optional[_Iterable[_Union[IngestionLog, _Mapping]]] = ..., metrics: _Optional[_Union[IngestionMetrics, _Mapping]] = ..., next_page_token: _Optional[str] = ...) -> None: ... + +class RetrieveDeviationsRequest(_message.Message): + __slots__ = ("page_size", "page_token", "ingestion_ts_start", "ingestion_ts_end", "state", "object_type", "branch_id", "site") + PAGE_SIZE_FIELD_NUMBER: _ClassVar[int] + PAGE_TOKEN_FIELD_NUMBER: _ClassVar[int] + INGESTION_TS_START_FIELD_NUMBER: _ClassVar[int] + INGESTION_TS_END_FIELD_NUMBER: _ClassVar[int] + STATE_FIELD_NUMBER: _ClassVar[int] + OBJECT_TYPE_FIELD_NUMBER: _ClassVar[int] + BRANCH_ID_FIELD_NUMBER: _ClassVar[int] + SITE_FIELD_NUMBER: _ClassVar[int] + page_size: int + page_token: str + ingestion_ts_start: int + ingestion_ts_end: int + state: _containers.RepeatedScalarFieldContainer[State] + object_type: _containers.RepeatedScalarFieldContainer[str] + branch_id: _containers.RepeatedScalarFieldContainer[str] + site: _containers.RepeatedScalarFieldContainer[str] + def __init__(self, page_size: _Optional[int] = ..., page_token: _Optional[str] = ..., ingestion_ts_start: _Optional[int] = ..., ingestion_ts_end: _Optional[int] = ..., state: _Optional[_Iterable[_Union[State, str]]] = ..., object_type: _Optional[_Iterable[str]] = ..., branch_id: _Optional[_Iterable[str]] = ..., site: _Optional[_Iterable[str]] = ...) -> None: ... + +class DeviationError(_message.Message): + __slots__ = ("message", "code") + MESSAGE_FIELD_NUMBER: _ClassVar[int] + CODE_FIELD_NUMBER: _ClassVar[int] + message: str + code: int + def __init__(self, message: _Optional[str] = ..., code: _Optional[int] = ...) -> None: ... + +class Change(_message.Message): + __slots__ = ("id", "object_type", "object_primary_value", "change_type", "before", "after") + ID_FIELD_NUMBER: _ClassVar[int] + OBJECT_TYPE_FIELD_NUMBER: _ClassVar[int] + OBJECT_PRIMARY_VALUE_FIELD_NUMBER: _ClassVar[int] + CHANGE_TYPE_FIELD_NUMBER: _ClassVar[int] + BEFORE_FIELD_NUMBER: _ClassVar[int] + AFTER_FIELD_NUMBER: _ClassVar[int] + id: str + object_type: str + object_primary_value: str + change_type: str + before: bytes + after: bytes + def __init__(self, id: _Optional[str] = ..., object_type: _Optional[str] = ..., object_primary_value: _Optional[str] = ..., change_type: _Optional[str] = ..., before: _Optional[bytes] = ..., after: _Optional[bytes] = ...) -> None: ... + +class Deviation(_message.Message): + __slots__ = ("id", "ingestion_ts", "last_update_ts", "name", "source", "state", "object_type", "branch_id", "ingested_entity", "error", "changes") + ID_FIELD_NUMBER: _ClassVar[int] + INGESTION_TS_FIELD_NUMBER: _ClassVar[int] + LAST_UPDATE_TS_FIELD_NUMBER: _ClassVar[int] + NAME_FIELD_NUMBER: _ClassVar[int] + SOURCE_FIELD_NUMBER: _ClassVar[int] + STATE_FIELD_NUMBER: _ClassVar[int] + OBJECT_TYPE_FIELD_NUMBER: _ClassVar[int] + BRANCH_ID_FIELD_NUMBER: _ClassVar[int] + INGESTED_ENTITY_FIELD_NUMBER: _ClassVar[int] + ERROR_FIELD_NUMBER: _ClassVar[int] + CHANGES_FIELD_NUMBER: _ClassVar[int] + id: str + ingestion_ts: int + last_update_ts: int + name: str + source: str + state: State + object_type: str + branch_id: str + ingested_entity: _ingester_pb2.Entity + error: DeviationError + changes: _containers.RepeatedCompositeFieldContainer[Change] + def __init__(self, id: _Optional[str] = ..., ingestion_ts: _Optional[int] = ..., last_update_ts: _Optional[int] = ..., name: _Optional[str] = ..., source: _Optional[str] = ..., state: _Optional[_Union[State, str]] = ..., object_type: _Optional[str] = ..., branch_id: _Optional[str] = ..., ingested_entity: _Optional[_Union[_ingester_pb2.Entity, _Mapping]] = ..., error: _Optional[_Union[DeviationError, _Mapping]] = ..., changes: _Optional[_Iterable[_Union[Change, _Mapping]]] = ...) -> None: ... + +class RetrieveDeviationsResponse(_message.Message): + __slots__ = ("deviations", "next_page_token") + DEVIATIONS_FIELD_NUMBER: _ClassVar[int] + NEXT_PAGE_TOKEN_FIELD_NUMBER: _ClassVar[int] + deviations: _containers.RepeatedCompositeFieldContainer[Deviation] + next_page_token: str + def __init__(self, deviations: _Optional[_Iterable[_Union[Deviation, _Mapping]]] = ..., next_page_token: _Optional[str] = ...) -> None: ... + +class RetrieveDeviationByIDRequest(_message.Message): + __slots__ = ("id",) + ID_FIELD_NUMBER: _ClassVar[int] + id: str + def __init__(self, id: _Optional[str] = ...) -> None: ... + +class RetrieveDeviationByIDResponse(_message.Message): + __slots__ = ("deviation",) + DEVIATION_FIELD_NUMBER: _ClassVar[int] + deviation: Deviation + def __init__(self, deviation: _Optional[_Union[Deviation, _Mapping]] = ...) -> None: ... diff --git a/netbox_diode_plugin/reconciler/sdk/v1/reconciler_pb2_grpc.py b/netbox_diode_plugin/reconciler/sdk/v1/reconciler_pb2_grpc.py index 57b9f9d..73d37b7 100644 --- a/netbox_diode_plugin/reconciler/sdk/v1/reconciler_pb2_grpc.py +++ b/netbox_diode_plugin/reconciler/sdk/v1/reconciler_pb2_grpc.py @@ -15,31 +15,43 @@ def __init__(self, channel): Args: channel: A grpc.Channel. """ - self.RetrieveIngestionDataSources = channel.unary_unary( - '/diode.v1.ReconcilerService/RetrieveIngestionDataSources', - request_serializer=diode_dot_v1_dot_reconciler__pb2.RetrieveIngestionDataSourcesRequest.SerializeToString, - response_deserializer=diode_dot_v1_dot_reconciler__pb2.RetrieveIngestionDataSourcesResponse.FromString, - ) self.RetrieveIngestionLogs = channel.unary_unary( '/diode.v1.ReconcilerService/RetrieveIngestionLogs', request_serializer=diode_dot_v1_dot_reconciler__pb2.RetrieveIngestionLogsRequest.SerializeToString, response_deserializer=diode_dot_v1_dot_reconciler__pb2.RetrieveIngestionLogsResponse.FromString, ) + self.RetrieveDeviations = channel.unary_unary( + '/diode.v1.ReconcilerService/RetrieveDeviations', + request_serializer=diode_dot_v1_dot_reconciler__pb2.RetrieveDeviationsRequest.SerializeToString, + response_deserializer=diode_dot_v1_dot_reconciler__pb2.RetrieveDeviationsResponse.FromString, + ) + self.RetrieveDeviationByID = channel.unary_unary( + '/diode.v1.ReconcilerService/RetrieveDeviationByID', + request_serializer=diode_dot_v1_dot_reconciler__pb2.RetrieveDeviationByIDRequest.SerializeToString, + response_deserializer=diode_dot_v1_dot_reconciler__pb2.RetrieveDeviationByIDResponse.FromString, + ) class ReconcilerServiceServicer(object): """Reconciler service API """ - def RetrieveIngestionDataSources(self, request, context): - """Retrieves ingestion data sources + def RetrieveIngestionLogs(self, request, context): + """Retrieves ingestion logs """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') - def RetrieveIngestionLogs(self, request, context): - """Retrieves ingestion logs + def RetrieveDeviations(self, request, context): + """Retrieve deviations + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def RetrieveDeviationByID(self, request, context): + """Retrieve deviation by ID """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') @@ -48,16 +60,21 @@ def RetrieveIngestionLogs(self, request, context): def add_ReconcilerServiceServicer_to_server(servicer, server): rpc_method_handlers = { - 'RetrieveIngestionDataSources': grpc.unary_unary_rpc_method_handler( - servicer.RetrieveIngestionDataSources, - request_deserializer=diode_dot_v1_dot_reconciler__pb2.RetrieveIngestionDataSourcesRequest.FromString, - response_serializer=diode_dot_v1_dot_reconciler__pb2.RetrieveIngestionDataSourcesResponse.SerializeToString, - ), 'RetrieveIngestionLogs': grpc.unary_unary_rpc_method_handler( servicer.RetrieveIngestionLogs, request_deserializer=diode_dot_v1_dot_reconciler__pb2.RetrieveIngestionLogsRequest.FromString, response_serializer=diode_dot_v1_dot_reconciler__pb2.RetrieveIngestionLogsResponse.SerializeToString, ), + 'RetrieveDeviations': grpc.unary_unary_rpc_method_handler( + servicer.RetrieveDeviations, + request_deserializer=diode_dot_v1_dot_reconciler__pb2.RetrieveDeviationsRequest.FromString, + response_serializer=diode_dot_v1_dot_reconciler__pb2.RetrieveDeviationsResponse.SerializeToString, + ), + 'RetrieveDeviationByID': grpc.unary_unary_rpc_method_handler( + servicer.RetrieveDeviationByID, + request_deserializer=diode_dot_v1_dot_reconciler__pb2.RetrieveDeviationByIDRequest.FromString, + response_serializer=diode_dot_v1_dot_reconciler__pb2.RetrieveDeviationByIDResponse.SerializeToString, + ), } generic_handler = grpc.method_handlers_generic_handler( 'diode.v1.ReconcilerService', rpc_method_handlers) @@ -70,7 +87,24 @@ class ReconcilerService(object): """ @staticmethod - def RetrieveIngestionDataSources(request, + def RetrieveIngestionLogs(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/diode.v1.ReconcilerService/RetrieveIngestionLogs', + diode_dot_v1_dot_reconciler__pb2.RetrieveIngestionLogsRequest.SerializeToString, + diode_dot_v1_dot_reconciler__pb2.RetrieveIngestionLogsResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def RetrieveDeviations(request, target, options=(), channel_credentials=None, @@ -80,14 +114,14 @@ def RetrieveIngestionDataSources(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary(request, target, '/diode.v1.ReconcilerService/RetrieveIngestionDataSources', - diode_dot_v1_dot_reconciler__pb2.RetrieveIngestionDataSourcesRequest.SerializeToString, - diode_dot_v1_dot_reconciler__pb2.RetrieveIngestionDataSourcesResponse.FromString, + return grpc.experimental.unary_unary(request, target, '/diode.v1.ReconcilerService/RetrieveDeviations', + diode_dot_v1_dot_reconciler__pb2.RetrieveDeviationsRequest.SerializeToString, + diode_dot_v1_dot_reconciler__pb2.RetrieveDeviationsResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod - def RetrieveIngestionLogs(request, + def RetrieveDeviationByID(request, target, options=(), channel_credentials=None, @@ -97,8 +131,8 @@ def RetrieveIngestionLogs(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary(request, target, '/diode.v1.ReconcilerService/RetrieveIngestionLogs', - diode_dot_v1_dot_reconciler__pb2.RetrieveIngestionLogsRequest.SerializeToString, - diode_dot_v1_dot_reconciler__pb2.RetrieveIngestionLogsResponse.FromString, + return grpc.experimental.unary_unary(request, target, '/diode.v1.ReconcilerService/RetrieveDeviationByID', + diode_dot_v1_dot_reconciler__pb2.RetrieveDeviationByIDRequest.SerializeToString, + diode_dot_v1_dot_reconciler__pb2.RetrieveDeviationByIDResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) diff --git a/netbox_diode_plugin/tests/test_tables.py b/netbox_diode_plugin/tests/test_tables.py index f05ba6a..38dd93e 100644 --- a/netbox_diode_plugin/tests/test_tables.py +++ b/netbox_diode_plugin/tests/test_tables.py @@ -20,7 +20,7 @@ def setUp(self): self.mock_data = [ reconciler_pb2.IngestionLog( ingestion_ts=1638316800000000000, # Example timestamp in nanoseconds - state=reconciler_pb2.State.RECONCILED, + state=reconciler_pb2.State.APPLIED, data_type="dcim.site", request_id="12345", producer_app_name="TestApp", @@ -59,7 +59,7 @@ def test_ingestion_ts_rendering(self): def test_state_rendering(self): """Test rendering of the state column.""" table = IngestionLogsTable(self.mock_data) - self.assertEqual(table.rows[0].get_cell("state"), "Reconciled") + self.assertEqual(table.rows[0].get_cell("state"), "Applied") self.assertEqual(table.rows[1].get_cell("state"), None) def test_data_type_rendering(self): diff --git a/netbox_diode_plugin/tests/test_views.py b/netbox_diode_plugin/tests/test_views.py index 9c58c13..5c28ce2 100644 --- a/netbox_diode_plugin/tests/test_views.py +++ b/netbox_diode_plugin/tests/test_views.py @@ -74,7 +74,7 @@ def test_ingestion_logs_retrieve_logs(self): logs=[ reconciler_pb2.IngestionLog( data_type="dcim.site", - state=reconciler_pb2.State.RECONCILED, + state=reconciler_pb2.State.APPLIED, request_id="c6ecd1ea-b23b-4f98-8593-d01d5a0da012", ingestion_ts=1725617988, producer_app_name="diode-test-app", @@ -119,7 +119,7 @@ def test_cached_metrics(self): logs=[ reconciler_pb2.IngestionLog( data_type="dcim.site", - state=reconciler_pb2.State.RECONCILED, + state=reconciler_pb2.State.APPLIED, request_id="c6ecd1ea-b23b-4f98-8593-d01d5a0da012", ingestion_ts=1725617988, producer_app_name="diode-test-app", @@ -145,7 +145,7 @@ def test_cached_metrics(self): "ingestion_metrics", { "queued": 10, - "reconciled": 20, + "applied": 20, "failed": 5, "no_changes": 65, "total": 1, diff --git a/netbox_diode_plugin/views.py b/netbox_diode_plugin/views.py index 8acfe70..75f884b 100644 --- a/netbox_diode_plugin/views.py +++ b/netbox_diode_plugin/views.py @@ -7,7 +7,9 @@ from django.contrib import messages from django.contrib.auth import get_user, get_user_model from django.core.cache import cache +from django.http import HttpResponseRedirect from django.shortcuts import redirect, render +from django.utils.http import url_has_allowed_host_and_scheme from django.views.generic import View from netbox.plugins import get_plugin_config from netbox.views import generic @@ -28,6 +30,17 @@ User = get_user_model() +def redirect_to_login(request): + """Redirect to login view.""" + redirect_url = netbox_settings.LOGIN_URL + target = request.path + + if target and url_has_allowed_host_and_scheme(target, allowed_hosts=None): + redirect_url = f"{netbox_settings.LOGIN_URL}?next={target}" + + return HttpResponseRedirect(redirect_url) + + class IngestionLogsView(View): """Ingestion logs view.""" @@ -36,7 +49,7 @@ class IngestionLogsView(View): def get(self, request): """Render ingestion logs template.""" if not request.user.is_authenticated or not request.user.is_staff: - return redirect(f"{netbox_settings.LOGIN_URL}?next={request.path}") + return redirect_to_login(request) netbox_to_diode_username = get_diode_username_for_user_type("netbox_to_diode") try: @@ -118,7 +131,7 @@ class SettingsView(View): def get(self, request): """Render settings template.""" if not request.user.is_authenticated or not request.user.is_staff: - return redirect(f"{netbox_settings.LOGIN_URL}?next={request.path}") + return redirect_to_login(request) diode_target_override = get_plugin_config( "netbox_diode_plugin", "diode_target_override" @@ -187,7 +200,7 @@ class SettingsEditView(generic.ObjectEditView): def get(self, request, *args, **kwargs): """GET request handler.""" if not request.user.is_authenticated or not request.user.is_staff: - return redirect(f"{netbox_settings.LOGIN_URL}?next={request.path}") + return redirect_to_login(request) diode_target_override = get_plugin_config( "netbox_diode_plugin", "diode_target_override" @@ -207,7 +220,7 @@ def get(self, request, *args, **kwargs): def post(self, request, *args, **kwargs): """POST request handler.""" if not request.user.is_authenticated or not request.user.is_staff: - return redirect(f"{netbox_settings.LOGIN_URL}?next={request.path}") + return redirect_to_login(request) diode_target_override = get_plugin_config( "netbox_diode_plugin", "diode_target_override" @@ -272,7 +285,7 @@ def _retrieve_users(self): def get(self, request): """GET request handler.""" if not request.user.is_authenticated or not request.user.is_staff: - return redirect(f"{netbox_settings.LOGIN_URL}?next={request.path}") + return redirect_to_login(request) users = self._retrieve_users() @@ -285,7 +298,7 @@ def get(self, request): def post(self, request): """POST request handler.""" if not request.user.is_authenticated or not request.user.is_staff: - return redirect(f"{netbox_settings.LOGIN_URL}?next={request.path}") + return redirect_to_login(request) users = self._retrieve_users() From 1fa65a453dc0678a7eabc70d54d1384bea605119 Mon Sep 17 00:00:00 2001 From: Michal Fiedorowicz Date: Thu, 23 Jan 2025 14:07:30 +0000 Subject: [PATCH 11/52] chore: regenerate reconciler sdk (#56) Signed-off-by: Michal Fiedorowicz --- .../reconciler/sdk/v1/reconciler_pb2.py | 36 +++++++++---------- .../reconciler/sdk/v1/reconciler_pb2.pyi | 6 ++-- 2 files changed, 20 insertions(+), 22 deletions(-) diff --git a/netbox_diode_plugin/reconciler/sdk/v1/reconciler_pb2.py b/netbox_diode_plugin/reconciler/sdk/v1/reconciler_pb2.py index 02d4c58..45688f4 100644 --- a/netbox_diode_plugin/reconciler/sdk/v1/reconciler_pb2.py +++ b/netbox_diode_plugin/reconciler/sdk/v1/reconciler_pb2.py @@ -16,7 +16,7 @@ from netbox_diode_plugin.reconciler.sdk.validate import validate_pb2 as validate_dot_validate__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19\x64iode/v1/reconciler.proto\x12\x08\x64iode.v1\x1a\x17\x64iode/v1/ingester.proto\x1a\x17validate/validate.proto\"\xbe\x02\n\x0eIngestionError\x12\x18\n\x07message\x18\x01 \x01(\tR\x07message\x12\x12\n\x04\x63ode\x18\x02 \x01(\x05R\x04\x63ode\x12:\n\x07\x64\x65tails\x18\x03 \x01(\x0b\x32 .diode.v1.IngestionError.DetailsR\x07\x64\x65tails\x1a\xc1\x01\n\x07\x44\x65tails\x12\"\n\rchange_set_id\x18\x01 \x01(\tR\x0b\x63hangeSetId\x12\x16\n\x06result\x18\x02 \x01(\tR\x06result\x12>\n\x06\x65rrors\x18\x03 \x03(\x0b\x32&.diode.v1.IngestionError.Details.ErrorR\x06\x65rrors\x1a:\n\x05\x45rror\x12\x14\n\x05\x65rror\x18\x01 \x01(\tR\x05\x65rror\x12\x1b\n\tchange_id\x18\x02 \x01(\tR\x08\x63hangeId\"\x97\x01\n\x10IngestionMetrics\x12\x14\n\x05total\x18\x01 \x01(\x05R\x05total\x12\x16\n\x06queued\x18\x02 \x01(\x05R\x06queued\x12\x1e\n\nreconciled\x18\x03 \x01(\x05R\nreconciled\x12\x16\n\x06\x66\x61iled\x18\x04 \x01(\x05R\x06\x66\x61iled\x12\x1d\n\nno_changes\x18\x05 \x01(\x05R\tnoChanges\"\x9e\x01\n\tChangeSet\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x12\n\x04\x64\x61ta\x18\x02 \x01(\x0cR\x04\x64\x61ta\x12 \n\tbranch_id\x18\x03 \x01(\tH\x00R\x08\x62ranchId\x88\x01\x01\x12*\n\x0e\x64\x65viation_name\x18\x04 \x01(\tH\x01R\rdeviationName\x88\x01\x01\x42\x0c\n\n_branch_idB\x11\n\x0f_deviation_name\"\xf1\x03\n\x0cIngestionLog\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x1f\n\tdata_type\x18\x02 \x01(\tB\x02\x18\x01R\x08\x64\x61taType\x12%\n\x05state\x18\x03 \x01(\x0e\x32\x0f.diode.v1.StateR\x05state\x12\x1d\n\nrequest_id\x18\x04 \x01(\tR\trequestId\x12!\n\x0cingestion_ts\x18\x05 \x01(\x03R\x0bingestionTs\x12*\n\x11producer_app_name\x18\x06 \x01(\tR\x0fproducerAppName\x12\x30\n\x14producer_app_version\x18\x07 \x01(\tR\x12producerAppVersion\x12\x19\n\x08sdk_name\x18\x08 \x01(\tR\x07sdkName\x12\x1f\n\x0bsdk_version\x18\t \x01(\tR\nsdkVersion\x12(\n\x06\x65ntity\x18\n \x01(\x0b\x32\x10.diode.v1.EntityR\x06\x65ntity\x12.\n\x05\x65rror\x18\x0b \x01(\x0b\x32\x18.diode.v1.IngestionErrorR\x05\x65rror\x12\x32\n\nchange_set\x18\x0c \x01(\x0b\x32\x13.diode.v1.ChangeSetR\tchangeSet\x12\x1f\n\x0bobject_type\x18\r \x01(\tR\nobjectType\"\xff\x02\n\x1cRetrieveIngestionLogsRequest\x12 \n\tpage_size\x18\x01 \x01(\x05H\x00R\x08pageSize\x88\x01\x01\x12*\n\x05state\x18\x02 \x01(\x0e\x32\x0f.diode.v1.StateH\x01R\x05state\x88\x01\x01\x12\x1f\n\tdata_type\x18\x03 \x01(\tB\x02\x18\x01R\x08\x64\x61taType\x12\x1d\n\nrequest_id\x18\x04 \x01(\tR\trequestId\x12,\n\x12ingestion_ts_start\x18\x05 \x01(\x03R\x10ingestionTsStart\x12(\n\x10ingestion_ts_end\x18\x06 \x01(\x03R\x0eingestionTsEnd\x12\x1d\n\npage_token\x18\x07 \x01(\tR\tpageToken\x12!\n\x0conly_metrics\x18\x08 \x01(\x08R\x0bonlyMetrics\x12\x1f\n\x0bobject_type\x18\t \x01(\tR\nobjectTypeB\x0c\n\n_page_sizeB\x08\n\x06_state\"\xa9\x01\n\x1dRetrieveIngestionLogsResponse\x12*\n\x04logs\x18\x01 \x03(\x0b\x32\x16.diode.v1.IngestionLogR\x04logs\x12\x34\n\x07metrics\x18\x02 \x01(\x0b\x32\x1a.diode.v1.IngestionMetricsR\x07metrics\x12&\n\x0fnext_page_token\x18\x03 \x01(\tR\rnextPageToken\"\xbb\x02\n\x19RetrieveDeviationsRequest\x12 \n\tpage_size\x18\x01 \x01(\x05H\x00R\x08pageSize\x88\x01\x01\x12\x1d\n\npage_token\x18\x02 \x01(\tR\tpageToken\x12,\n\x12ingestion_ts_start\x18\x03 \x01(\x03R\x10ingestionTsStart\x12(\n\x10ingestion_ts_end\x18\x04 \x01(\x03R\x0eingestionTsEnd\x12%\n\x05state\x18\x05 \x03(\x0e\x32\x0f.diode.v1.StateR\x05state\x12\x1f\n\x0bobject_type\x18\x06 \x03(\tR\nobjectType\x12\x1b\n\tbranch_id\x18\x07 \x03(\tR\x08\x62ranchId\x12\x12\n\x04site\x18\x08 \x03(\tR\x04siteB\x0c\n\n_page_size\">\n\x0e\x44\x65viationError\x12\x18\n\x07message\x18\x01 \x01(\tR\x07message\x12\x12\n\x04\x63ode\x18\x02 \x01(\x05R\x04\x63ode\"\xba\x01\n\x06\x43hange\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x1f\n\x0bobject_type\x18\x02 \x01(\tR\nobjectType\x12\x30\n\x14object_primary_value\x18\x03 \x01(\tR\x12objectPrimaryValue\x12\x1f\n\x0b\x63hange_type\x18\x04 \x01(\tR\nchangeType\x12\x16\n\x06\x62\x65\x66ore\x18\x05 \x01(\x0cR\x06\x62\x65\x66ore\x12\x14\n\x05\x61\x66ter\x18\x06 \x01(\x0cR\x05\x61\x66ter\"\x9f\x03\n\tDeviation\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12!\n\x0cingestion_ts\x18\x02 \x01(\x03R\x0bingestionTs\x12$\n\x0elast_update_ts\x18\x03 \x01(\x03R\x0clastUpdateTs\x12\x12\n\x04name\x18\x04 \x01(\tR\x04name\x12\x16\n\x06source\x18\x05 \x01(\tR\x06source\x12%\n\x05state\x18\x06 \x01(\x0e\x32\x0f.diode.v1.StateR\x05state\x12\x1f\n\x0bobject_type\x18\x07 \x01(\tR\nobjectType\x12 \n\tbranch_id\x18\x08 \x01(\tH\x00R\x08\x62ranchId\x88\x01\x01\x12\x39\n\x0fingested_entity\x18\t \x01(\x0b\x32\x10.diode.v1.EntityR\x0eingestedEntity\x12.\n\x05\x65rror\x18\n \x01(\x0b\x32\x18.diode.v1.DeviationErrorR\x05\x65rror\x12*\n\x07\x63hanges\x18\x0b \x03(\x0b\x32\x10.diode.v1.ChangeR\x07\x63hangesB\x0c\n\n_branch_id\"y\n\x1aRetrieveDeviationsResponse\x12\x33\n\ndeviations\x18\x01 \x03(\x0b\x32\x13.diode.v1.DeviationR\ndeviations\x12&\n\x0fnext_page_token\x18\x02 \x01(\tR\rnextPageToken\".\n\x1cRetrieveDeviationByIDRequest\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\"R\n\x1dRetrieveDeviationByIDResponse\x12\x31\n\tdeviation\x18\x01 \x01(\x0b\x32\x13.diode.v1.DeviationR\tdeviation*w\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\n\n\x06QUEUED\x10\x01\x12\x08\n\x04OPEN\x10\x02\x12\x0b\n\x07\x41PPLIED\x10\x03\x12\n\n\x06\x46\x41ILED\x10\x04\x12\x0e\n\nNO_CHANGES\x10\x05\x12\x0b\n\x07IGNORED\x10\x06\x12\x0b\n\x07\x45RRORED\x10\x07\x32\xcd\x02\n\x11ReconcilerService\x12m\n\x15RetrieveIngestionLogs\x12&.diode.v1.RetrieveIngestionLogsRequest\x1a\'.diode.v1.RetrieveIngestionLogsResponse\"\x03\x88\x02\x01\x12_\n\x12RetrieveDeviations\x12#.diode.v1.RetrieveDeviationsRequest\x1a$.diode.v1.RetrieveDeviationsResponse\x12h\n\x15RetrieveDeviationByID\x12&.diode.v1.RetrieveDeviationByIDRequest\x1a\'.diode.v1.RetrieveDeviationByIDResponseBDZBgithub.com/netboxlabs/diode/diode-server/gen/diode/v1/reconcilerpbb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19\x64iode/v1/reconciler.proto\x12\x08\x64iode.v1\x1a\x17\x64iode/v1/ingester.proto\x1a\x17validate/validate.proto\"\xbe\x02\n\x0eIngestionError\x12\x18\n\x07message\x18\x01 \x01(\tR\x07message\x12\x12\n\x04\x63ode\x18\x02 \x01(\x05R\x04\x63ode\x12:\n\x07\x64\x65tails\x18\x03 \x01(\x0b\x32 .diode.v1.IngestionError.DetailsR\x07\x64\x65tails\x1a\xc1\x01\n\x07\x44\x65tails\x12\"\n\rchange_set_id\x18\x01 \x01(\tR\x0b\x63hangeSetId\x12\x16\n\x06result\x18\x02 \x01(\tR\x06result\x12>\n\x06\x65rrors\x18\x03 \x03(\x0b\x32&.diode.v1.IngestionError.Details.ErrorR\x06\x65rrors\x1a:\n\x05\x45rror\x12\x14\n\x05\x65rror\x18\x01 \x01(\tR\x05\x65rror\x12\x1b\n\tchange_id\x18\x02 \x01(\tR\x08\x63hangeId\"\x97\x01\n\x10IngestionMetrics\x12\x14\n\x05total\x18\x01 \x01(\x05R\x05total\x12\x16\n\x06queued\x18\x02 \x01(\x05R\x06queued\x12\x1e\n\nreconciled\x18\x03 \x01(\x05R\nreconciled\x12\x16\n\x06\x66\x61iled\x18\x04 \x01(\x05R\x06\x66\x61iled\x12\x1d\n\nno_changes\x18\x05 \x01(\x05R\tnoChanges\"\x9e\x01\n\tChangeSet\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x12\n\x04\x64\x61ta\x18\x02 \x01(\x0cR\x04\x64\x61ta\x12 \n\tbranch_id\x18\x03 \x01(\tH\x00R\x08\x62ranchId\x88\x01\x01\x12*\n\x0e\x64\x65viation_name\x18\x04 \x01(\tH\x01R\rdeviationName\x88\x01\x01\x42\x0c\n\n_branch_idB\x11\n\x0f_deviation_name\"\xf1\x03\n\x0cIngestionLog\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x1f\n\tdata_type\x18\x02 \x01(\tB\x02\x18\x01R\x08\x64\x61taType\x12%\n\x05state\x18\x03 \x01(\x0e\x32\x0f.diode.v1.StateR\x05state\x12\x1d\n\nrequest_id\x18\x04 \x01(\tR\trequestId\x12!\n\x0cingestion_ts\x18\x05 \x01(\x03R\x0bingestionTs\x12*\n\x11producer_app_name\x18\x06 \x01(\tR\x0fproducerAppName\x12\x30\n\x14producer_app_version\x18\x07 \x01(\tR\x12producerAppVersion\x12\x19\n\x08sdk_name\x18\x08 \x01(\tR\x07sdkName\x12\x1f\n\x0bsdk_version\x18\t \x01(\tR\nsdkVersion\x12(\n\x06\x65ntity\x18\n \x01(\x0b\x32\x10.diode.v1.EntityR\x06\x65ntity\x12.\n\x05\x65rror\x18\x0b \x01(\x0b\x32\x18.diode.v1.IngestionErrorR\x05\x65rror\x12\x32\n\nchange_set\x18\x0c \x01(\x0b\x32\x13.diode.v1.ChangeSetR\tchangeSet\x12\x1f\n\x0bobject_type\x18\r \x01(\tR\nobjectType\"\xff\x02\n\x1cRetrieveIngestionLogsRequest\x12 \n\tpage_size\x18\x01 \x01(\x05H\x00R\x08pageSize\x88\x01\x01\x12*\n\x05state\x18\x02 \x01(\x0e\x32\x0f.diode.v1.StateH\x01R\x05state\x88\x01\x01\x12\x1f\n\tdata_type\x18\x03 \x01(\tB\x02\x18\x01R\x08\x64\x61taType\x12\x1d\n\nrequest_id\x18\x04 \x01(\tR\trequestId\x12,\n\x12ingestion_ts_start\x18\x05 \x01(\x03R\x10ingestionTsStart\x12(\n\x10ingestion_ts_end\x18\x06 \x01(\x03R\x0eingestionTsEnd\x12\x1d\n\npage_token\x18\x07 \x01(\tR\tpageToken\x12!\n\x0conly_metrics\x18\x08 \x01(\x08R\x0bonlyMetrics\x12\x1f\n\x0bobject_type\x18\t \x01(\tR\nobjectTypeB\x0c\n\n_page_sizeB\x08\n\x06_state\"\xa9\x01\n\x1dRetrieveIngestionLogsResponse\x12*\n\x04logs\x18\x01 \x03(\x0b\x32\x16.diode.v1.IngestionLogR\x04logs\x12\x34\n\x07metrics\x18\x02 \x01(\x0b\x32\x1a.diode.v1.IngestionMetricsR\x07metrics\x12&\n\x0fnext_page_token\x18\x03 \x01(\tR\rnextPageToken\"\xa7\x02\n\x19RetrieveDeviationsRequest\x12 \n\tpage_size\x18\x01 \x01(\x05H\x00R\x08pageSize\x88\x01\x01\x12\x1d\n\npage_token\x18\x02 \x01(\tR\tpageToken\x12,\n\x12ingestion_ts_start\x18\x03 \x01(\x03R\x10ingestionTsStart\x12(\n\x10ingestion_ts_end\x18\x04 \x01(\x03R\x0eingestionTsEnd\x12%\n\x05state\x18\x05 \x03(\x0e\x32\x0f.diode.v1.StateR\x05state\x12\x1f\n\x0bobject_type\x18\x06 \x03(\tR\nobjectType\x12\x1b\n\tbranch_id\x18\x07 \x03(\tR\x08\x62ranchIdB\x0c\n\n_page_size\">\n\x0e\x44\x65viationError\x12\x18\n\x07message\x18\x01 \x01(\tR\x07message\x12\x12\n\x04\x63ode\x18\x02 \x01(\x05R\x04\x63ode\"\xba\x01\n\x06\x43hange\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x1f\n\x0bobject_type\x18\x02 \x01(\tR\nobjectType\x12\x30\n\x14object_primary_value\x18\x03 \x01(\tR\x12objectPrimaryValue\x12\x1f\n\x0b\x63hange_type\x18\x04 \x01(\tR\nchangeType\x12\x16\n\x06\x62\x65\x66ore\x18\x05 \x01(\x0cR\x06\x62\x65\x66ore\x12\x14\n\x05\x61\x66ter\x18\x06 \x01(\x0cR\x05\x61\x66ter\"\x9f\x03\n\tDeviation\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12!\n\x0cingestion_ts\x18\x02 \x01(\x03R\x0bingestionTs\x12$\n\x0elast_update_ts\x18\x03 \x01(\x03R\x0clastUpdateTs\x12\x12\n\x04name\x18\x04 \x01(\tR\x04name\x12\x16\n\x06source\x18\x05 \x01(\tR\x06source\x12%\n\x05state\x18\x06 \x01(\x0e\x32\x0f.diode.v1.StateR\x05state\x12\x1f\n\x0bobject_type\x18\x07 \x01(\tR\nobjectType\x12 \n\tbranch_id\x18\x08 \x01(\tH\x00R\x08\x62ranchId\x88\x01\x01\x12\x39\n\x0fingested_entity\x18\t \x01(\x0b\x32\x10.diode.v1.EntityR\x0eingestedEntity\x12.\n\x05\x65rror\x18\n \x01(\x0b\x32\x18.diode.v1.DeviationErrorR\x05\x65rror\x12*\n\x07\x63hanges\x18\x0b \x03(\x0b\x32\x10.diode.v1.ChangeR\x07\x63hangesB\x0c\n\n_branch_id\"y\n\x1aRetrieveDeviationsResponse\x12\x33\n\ndeviations\x18\x01 \x03(\x0b\x32\x13.diode.v1.DeviationR\ndeviations\x12&\n\x0fnext_page_token\x18\x02 \x01(\tR\rnextPageToken\".\n\x1cRetrieveDeviationByIDRequest\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\"R\n\x1dRetrieveDeviationByIDResponse\x12\x31\n\tdeviation\x18\x01 \x01(\x0b\x32\x13.diode.v1.DeviationR\tdeviation*w\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\n\n\x06QUEUED\x10\x01\x12\x08\n\x04OPEN\x10\x02\x12\x0b\n\x07\x41PPLIED\x10\x03\x12\n\n\x06\x46\x41ILED\x10\x04\x12\x0e\n\nNO_CHANGES\x10\x05\x12\x0b\n\x07IGNORED\x10\x06\x12\x0b\n\x07\x45RRORED\x10\x07\x32\xcd\x02\n\x11ReconcilerService\x12m\n\x15RetrieveIngestionLogs\x12&.diode.v1.RetrieveIngestionLogsRequest\x1a\'.diode.v1.RetrieveIngestionLogsResponse\"\x03\x88\x02\x01\x12_\n\x12RetrieveDeviations\x12#.diode.v1.RetrieveDeviationsRequest\x1a$.diode.v1.RetrieveDeviationsResponse\x12h\n\x15RetrieveDeviationByID\x12&.diode.v1.RetrieveDeviationByIDRequest\x1a\'.diode.v1.RetrieveDeviationByIDResponseBDZBgithub.com/netboxlabs/diode/diode-server/gen/diode/v1/reconcilerpbb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -30,8 +30,8 @@ _globals['_RETRIEVEINGESTIONLOGSREQUEST'].fields_by_name['data_type']._serialized_options = b'\030\001' _globals['_RECONCILERSERVICE'].methods_by_name['RetrieveIngestionLogs']._loaded_options = None _globals['_RECONCILERSERVICE'].methods_by_name['RetrieveIngestionLogs']._serialized_options = b'\210\002\001' - _globals['_STATE']._serialized_start=3027 - _globals['_STATE']._serialized_end=3146 + _globals['_STATE']._serialized_start=3007 + _globals['_STATE']._serialized_end=3126 _globals['_INGESTIONERROR']._serialized_start=90 _globals['_INGESTIONERROR']._serialized_end=408 _globals['_INGESTIONERROR_DETAILS']._serialized_start=215 @@ -49,19 +49,19 @@ _globals['_RETRIEVEINGESTIONLOGSRESPONSE']._serialized_start=1612 _globals['_RETRIEVEINGESTIONLOGSRESPONSE']._serialized_end=1781 _globals['_RETRIEVEDEVIATIONSREQUEST']._serialized_start=1784 - _globals['_RETRIEVEDEVIATIONSREQUEST']._serialized_end=2099 - _globals['_DEVIATIONERROR']._serialized_start=2101 - _globals['_DEVIATIONERROR']._serialized_end=2163 - _globals['_CHANGE']._serialized_start=2166 - _globals['_CHANGE']._serialized_end=2352 - _globals['_DEVIATION']._serialized_start=2355 - _globals['_DEVIATION']._serialized_end=2770 - _globals['_RETRIEVEDEVIATIONSRESPONSE']._serialized_start=2772 - _globals['_RETRIEVEDEVIATIONSRESPONSE']._serialized_end=2893 - _globals['_RETRIEVEDEVIATIONBYIDREQUEST']._serialized_start=2895 - _globals['_RETRIEVEDEVIATIONBYIDREQUEST']._serialized_end=2941 - _globals['_RETRIEVEDEVIATIONBYIDRESPONSE']._serialized_start=2943 - _globals['_RETRIEVEDEVIATIONBYIDRESPONSE']._serialized_end=3025 - _globals['_RECONCILERSERVICE']._serialized_start=3149 - _globals['_RECONCILERSERVICE']._serialized_end=3482 + _globals['_RETRIEVEDEVIATIONSREQUEST']._serialized_end=2079 + _globals['_DEVIATIONERROR']._serialized_start=2081 + _globals['_DEVIATIONERROR']._serialized_end=2143 + _globals['_CHANGE']._serialized_start=2146 + _globals['_CHANGE']._serialized_end=2332 + _globals['_DEVIATION']._serialized_start=2335 + _globals['_DEVIATION']._serialized_end=2750 + _globals['_RETRIEVEDEVIATIONSRESPONSE']._serialized_start=2752 + _globals['_RETRIEVEDEVIATIONSRESPONSE']._serialized_end=2873 + _globals['_RETRIEVEDEVIATIONBYIDREQUEST']._serialized_start=2875 + _globals['_RETRIEVEDEVIATIONBYIDREQUEST']._serialized_end=2921 + _globals['_RETRIEVEDEVIATIONBYIDRESPONSE']._serialized_start=2923 + _globals['_RETRIEVEDEVIATIONBYIDRESPONSE']._serialized_end=3005 + _globals['_RECONCILERSERVICE']._serialized_start=3129 + _globals['_RECONCILERSERVICE']._serialized_end=3462 # @@protoc_insertion_point(module_scope) diff --git a/netbox_diode_plugin/reconciler/sdk/v1/reconciler_pb2.pyi b/netbox_diode_plugin/reconciler/sdk/v1/reconciler_pb2.pyi index 60f7cb6..0747660 100644 --- a/netbox_diode_plugin/reconciler/sdk/v1/reconciler_pb2.pyi +++ b/netbox_diode_plugin/reconciler/sdk/v1/reconciler_pb2.pyi @@ -142,7 +142,7 @@ class RetrieveIngestionLogsResponse(_message.Message): def __init__(self, logs: _Optional[_Iterable[_Union[IngestionLog, _Mapping]]] = ..., metrics: _Optional[_Union[IngestionMetrics, _Mapping]] = ..., next_page_token: _Optional[str] = ...) -> None: ... class RetrieveDeviationsRequest(_message.Message): - __slots__ = ("page_size", "page_token", "ingestion_ts_start", "ingestion_ts_end", "state", "object_type", "branch_id", "site") + __slots__ = ("page_size", "page_token", "ingestion_ts_start", "ingestion_ts_end", "state", "object_type", "branch_id") PAGE_SIZE_FIELD_NUMBER: _ClassVar[int] PAGE_TOKEN_FIELD_NUMBER: _ClassVar[int] INGESTION_TS_START_FIELD_NUMBER: _ClassVar[int] @@ -150,7 +150,6 @@ class RetrieveDeviationsRequest(_message.Message): STATE_FIELD_NUMBER: _ClassVar[int] OBJECT_TYPE_FIELD_NUMBER: _ClassVar[int] BRANCH_ID_FIELD_NUMBER: _ClassVar[int] - SITE_FIELD_NUMBER: _ClassVar[int] page_size: int page_token: str ingestion_ts_start: int @@ -158,8 +157,7 @@ class RetrieveDeviationsRequest(_message.Message): state: _containers.RepeatedScalarFieldContainer[State] object_type: _containers.RepeatedScalarFieldContainer[str] branch_id: _containers.RepeatedScalarFieldContainer[str] - site: _containers.RepeatedScalarFieldContainer[str] - def __init__(self, page_size: _Optional[int] = ..., page_token: _Optional[str] = ..., ingestion_ts_start: _Optional[int] = ..., ingestion_ts_end: _Optional[int] = ..., state: _Optional[_Iterable[_Union[State, str]]] = ..., object_type: _Optional[_Iterable[str]] = ..., branch_id: _Optional[_Iterable[str]] = ..., site: _Optional[_Iterable[str]] = ...) -> None: ... + def __init__(self, page_size: _Optional[int] = ..., page_token: _Optional[str] = ..., ingestion_ts_start: _Optional[int] = ..., ingestion_ts_end: _Optional[int] = ..., state: _Optional[_Iterable[_Union[State, str]]] = ..., object_type: _Optional[_Iterable[str]] = ..., branch_id: _Optional[_Iterable[str]] = ...) -> None: ... class DeviationError(_message.Message): __slots__ = ("message", "code") From 89adbae0e6a7de901f49636c48b610d093eec5b9 Mon Sep 17 00:00:00 2001 From: Michal Fiedorowicz Date: Fri, 24 Jan 2025 14:04:20 +0000 Subject: [PATCH 12/52] chore: regenerate reconciler proto (#57) Signed-off-by: Michal Fiedorowicz --- .../reconciler/sdk/v1/reconciler_pb2.py | 48 +++++++++---------- .../reconciler/sdk/v1/reconciler_pb2.pyi | 12 +++-- 2 files changed, 32 insertions(+), 28 deletions(-) diff --git a/netbox_diode_plugin/reconciler/sdk/v1/reconciler_pb2.py b/netbox_diode_plugin/reconciler/sdk/v1/reconciler_pb2.py index 45688f4..e811c9c 100644 --- a/netbox_diode_plugin/reconciler/sdk/v1/reconciler_pb2.py +++ b/netbox_diode_plugin/reconciler/sdk/v1/reconciler_pb2.py @@ -16,7 +16,7 @@ from netbox_diode_plugin.reconciler.sdk.validate import validate_pb2 as validate_dot_validate__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19\x64iode/v1/reconciler.proto\x12\x08\x64iode.v1\x1a\x17\x64iode/v1/ingester.proto\x1a\x17validate/validate.proto\"\xbe\x02\n\x0eIngestionError\x12\x18\n\x07message\x18\x01 \x01(\tR\x07message\x12\x12\n\x04\x63ode\x18\x02 \x01(\x05R\x04\x63ode\x12:\n\x07\x64\x65tails\x18\x03 \x01(\x0b\x32 .diode.v1.IngestionError.DetailsR\x07\x64\x65tails\x1a\xc1\x01\n\x07\x44\x65tails\x12\"\n\rchange_set_id\x18\x01 \x01(\tR\x0b\x63hangeSetId\x12\x16\n\x06result\x18\x02 \x01(\tR\x06result\x12>\n\x06\x65rrors\x18\x03 \x03(\x0b\x32&.diode.v1.IngestionError.Details.ErrorR\x06\x65rrors\x1a:\n\x05\x45rror\x12\x14\n\x05\x65rror\x18\x01 \x01(\tR\x05\x65rror\x12\x1b\n\tchange_id\x18\x02 \x01(\tR\x08\x63hangeId\"\x97\x01\n\x10IngestionMetrics\x12\x14\n\x05total\x18\x01 \x01(\x05R\x05total\x12\x16\n\x06queued\x18\x02 \x01(\x05R\x06queued\x12\x1e\n\nreconciled\x18\x03 \x01(\x05R\nreconciled\x12\x16\n\x06\x66\x61iled\x18\x04 \x01(\x05R\x06\x66\x61iled\x12\x1d\n\nno_changes\x18\x05 \x01(\x05R\tnoChanges\"\x9e\x01\n\tChangeSet\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x12\n\x04\x64\x61ta\x18\x02 \x01(\x0cR\x04\x64\x61ta\x12 \n\tbranch_id\x18\x03 \x01(\tH\x00R\x08\x62ranchId\x88\x01\x01\x12*\n\x0e\x64\x65viation_name\x18\x04 \x01(\tH\x01R\rdeviationName\x88\x01\x01\x42\x0c\n\n_branch_idB\x11\n\x0f_deviation_name\"\xf1\x03\n\x0cIngestionLog\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x1f\n\tdata_type\x18\x02 \x01(\tB\x02\x18\x01R\x08\x64\x61taType\x12%\n\x05state\x18\x03 \x01(\x0e\x32\x0f.diode.v1.StateR\x05state\x12\x1d\n\nrequest_id\x18\x04 \x01(\tR\trequestId\x12!\n\x0cingestion_ts\x18\x05 \x01(\x03R\x0bingestionTs\x12*\n\x11producer_app_name\x18\x06 \x01(\tR\x0fproducerAppName\x12\x30\n\x14producer_app_version\x18\x07 \x01(\tR\x12producerAppVersion\x12\x19\n\x08sdk_name\x18\x08 \x01(\tR\x07sdkName\x12\x1f\n\x0bsdk_version\x18\t \x01(\tR\nsdkVersion\x12(\n\x06\x65ntity\x18\n \x01(\x0b\x32\x10.diode.v1.EntityR\x06\x65ntity\x12.\n\x05\x65rror\x18\x0b \x01(\x0b\x32\x18.diode.v1.IngestionErrorR\x05\x65rror\x12\x32\n\nchange_set\x18\x0c \x01(\x0b\x32\x13.diode.v1.ChangeSetR\tchangeSet\x12\x1f\n\x0bobject_type\x18\r \x01(\tR\nobjectType\"\xff\x02\n\x1cRetrieveIngestionLogsRequest\x12 \n\tpage_size\x18\x01 \x01(\x05H\x00R\x08pageSize\x88\x01\x01\x12*\n\x05state\x18\x02 \x01(\x0e\x32\x0f.diode.v1.StateH\x01R\x05state\x88\x01\x01\x12\x1f\n\tdata_type\x18\x03 \x01(\tB\x02\x18\x01R\x08\x64\x61taType\x12\x1d\n\nrequest_id\x18\x04 \x01(\tR\trequestId\x12,\n\x12ingestion_ts_start\x18\x05 \x01(\x03R\x10ingestionTsStart\x12(\n\x10ingestion_ts_end\x18\x06 \x01(\x03R\x0eingestionTsEnd\x12\x1d\n\npage_token\x18\x07 \x01(\tR\tpageToken\x12!\n\x0conly_metrics\x18\x08 \x01(\x08R\x0bonlyMetrics\x12\x1f\n\x0bobject_type\x18\t \x01(\tR\nobjectTypeB\x0c\n\n_page_sizeB\x08\n\x06_state\"\xa9\x01\n\x1dRetrieveIngestionLogsResponse\x12*\n\x04logs\x18\x01 \x03(\x0b\x32\x16.diode.v1.IngestionLogR\x04logs\x12\x34\n\x07metrics\x18\x02 \x01(\x0b\x32\x1a.diode.v1.IngestionMetricsR\x07metrics\x12&\n\x0fnext_page_token\x18\x03 \x01(\tR\rnextPageToken\"\xa7\x02\n\x19RetrieveDeviationsRequest\x12 \n\tpage_size\x18\x01 \x01(\x05H\x00R\x08pageSize\x88\x01\x01\x12\x1d\n\npage_token\x18\x02 \x01(\tR\tpageToken\x12,\n\x12ingestion_ts_start\x18\x03 \x01(\x03R\x10ingestionTsStart\x12(\n\x10ingestion_ts_end\x18\x04 \x01(\x03R\x0eingestionTsEnd\x12%\n\x05state\x18\x05 \x03(\x0e\x32\x0f.diode.v1.StateR\x05state\x12\x1f\n\x0bobject_type\x18\x06 \x03(\tR\nobjectType\x12\x1b\n\tbranch_id\x18\x07 \x03(\tR\x08\x62ranchIdB\x0c\n\n_page_size\">\n\x0e\x44\x65viationError\x12\x18\n\x07message\x18\x01 \x01(\tR\x07message\x12\x12\n\x04\x63ode\x18\x02 \x01(\x05R\x04\x63ode\"\xba\x01\n\x06\x43hange\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x1f\n\x0bobject_type\x18\x02 \x01(\tR\nobjectType\x12\x30\n\x14object_primary_value\x18\x03 \x01(\tR\x12objectPrimaryValue\x12\x1f\n\x0b\x63hange_type\x18\x04 \x01(\tR\nchangeType\x12\x16\n\x06\x62\x65\x66ore\x18\x05 \x01(\x0cR\x06\x62\x65\x66ore\x12\x14\n\x05\x61\x66ter\x18\x06 \x01(\x0cR\x05\x61\x66ter\"\x9f\x03\n\tDeviation\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12!\n\x0cingestion_ts\x18\x02 \x01(\x03R\x0bingestionTs\x12$\n\x0elast_update_ts\x18\x03 \x01(\x03R\x0clastUpdateTs\x12\x12\n\x04name\x18\x04 \x01(\tR\x04name\x12\x16\n\x06source\x18\x05 \x01(\tR\x06source\x12%\n\x05state\x18\x06 \x01(\x0e\x32\x0f.diode.v1.StateR\x05state\x12\x1f\n\x0bobject_type\x18\x07 \x01(\tR\nobjectType\x12 \n\tbranch_id\x18\x08 \x01(\tH\x00R\x08\x62ranchId\x88\x01\x01\x12\x39\n\x0fingested_entity\x18\t \x01(\x0b\x32\x10.diode.v1.EntityR\x0eingestedEntity\x12.\n\x05\x65rror\x18\n \x01(\x0b\x32\x18.diode.v1.DeviationErrorR\x05\x65rror\x12*\n\x07\x63hanges\x18\x0b \x03(\x0b\x32\x10.diode.v1.ChangeR\x07\x63hangesB\x0c\n\n_branch_id\"y\n\x1aRetrieveDeviationsResponse\x12\x33\n\ndeviations\x18\x01 \x03(\x0b\x32\x13.diode.v1.DeviationR\ndeviations\x12&\n\x0fnext_page_token\x18\x02 \x01(\tR\rnextPageToken\".\n\x1cRetrieveDeviationByIDRequest\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\"R\n\x1dRetrieveDeviationByIDResponse\x12\x31\n\tdeviation\x18\x01 \x01(\x0b\x32\x13.diode.v1.DeviationR\tdeviation*w\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\n\n\x06QUEUED\x10\x01\x12\x08\n\x04OPEN\x10\x02\x12\x0b\n\x07\x41PPLIED\x10\x03\x12\n\n\x06\x46\x41ILED\x10\x04\x12\x0e\n\nNO_CHANGES\x10\x05\x12\x0b\n\x07IGNORED\x10\x06\x12\x0b\n\x07\x45RRORED\x10\x07\x32\xcd\x02\n\x11ReconcilerService\x12m\n\x15RetrieveIngestionLogs\x12&.diode.v1.RetrieveIngestionLogsRequest\x1a\'.diode.v1.RetrieveIngestionLogsResponse\"\x03\x88\x02\x01\x12_\n\x12RetrieveDeviations\x12#.diode.v1.RetrieveDeviationsRequest\x1a$.diode.v1.RetrieveDeviationsResponse\x12h\n\x15RetrieveDeviationByID\x12&.diode.v1.RetrieveDeviationByIDRequest\x1a\'.diode.v1.RetrieveDeviationByIDResponseBDZBgithub.com/netboxlabs/diode/diode-server/gen/diode/v1/reconcilerpbb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19\x64iode/v1/reconciler.proto\x12\x08\x64iode.v1\x1a\x17\x64iode/v1/ingester.proto\x1a\x17validate/validate.proto\"\xbe\x02\n\x0eIngestionError\x12\x18\n\x07message\x18\x01 \x01(\tR\x07message\x12\x12\n\x04\x63ode\x18\x02 \x01(\x05R\x04\x63ode\x12:\n\x07\x64\x65tails\x18\x03 \x01(\x0b\x32 .diode.v1.IngestionError.DetailsR\x07\x64\x65tails\x1a\xc1\x01\n\x07\x44\x65tails\x12\"\n\rchange_set_id\x18\x01 \x01(\tR\x0b\x63hangeSetId\x12\x16\n\x06result\x18\x02 \x01(\tR\x06result\x12>\n\x06\x65rrors\x18\x03 \x03(\x0b\x32&.diode.v1.IngestionError.Details.ErrorR\x06\x65rrors\x1a:\n\x05\x45rror\x12\x14\n\x05\x65rror\x18\x01 \x01(\tR\x05\x65rror\x12\x1b\n\tchange_id\x18\x02 \x01(\tR\x08\x63hangeId\"\x97\x01\n\x10IngestionMetrics\x12\x14\n\x05total\x18\x01 \x01(\x05R\x05total\x12\x16\n\x06queued\x18\x02 \x01(\x05R\x06queued\x12\x1e\n\nreconciled\x18\x03 \x01(\x05R\nreconciled\x12\x16\n\x06\x66\x61iled\x18\x04 \x01(\x05R\x06\x66\x61iled\x12\x1d\n\nno_changes\x18\x05 \x01(\x05R\tnoChanges\"\x9e\x01\n\tChangeSet\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x12\n\x04\x64\x61ta\x18\x02 \x01(\x0cR\x04\x64\x61ta\x12 \n\tbranch_id\x18\x03 \x01(\tH\x00R\x08\x62ranchId\x88\x01\x01\x12*\n\x0e\x64\x65viation_name\x18\x04 \x01(\tH\x01R\rdeviationName\x88\x01\x01\x42\x0c\n\n_branch_idB\x11\n\x0f_deviation_name\"\x8e\x04\n\x0cIngestionLog\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x1f\n\tdata_type\x18\x02 \x01(\tB\x02\x18\x01R\x08\x64\x61taType\x12%\n\x05state\x18\x03 \x01(\x0e\x32\x0f.diode.v1.StateR\x05state\x12\x1d\n\nrequest_id\x18\x04 \x01(\tR\trequestId\x12!\n\x0cingestion_ts\x18\x05 \x01(\x03R\x0bingestionTs\x12*\n\x11producer_app_name\x18\x06 \x01(\tR\x0fproducerAppName\x12\x30\n\x14producer_app_version\x18\x07 \x01(\tR\x12producerAppVersion\x12\x19\n\x08sdk_name\x18\x08 \x01(\tR\x07sdkName\x12\x1f\n\x0bsdk_version\x18\t \x01(\tR\nsdkVersion\x12(\n\x06\x65ntity\x18\n \x01(\x0b\x32\x10.diode.v1.EntityR\x06\x65ntity\x12.\n\x05\x65rror\x18\x0b \x01(\x0b\x32\x18.diode.v1.IngestionErrorR\x05\x65rror\x12\x32\n\nchange_set\x18\x0c \x01(\x0b\x32\x13.diode.v1.ChangeSetR\tchangeSet\x12\x1f\n\x0bobject_type\x18\r \x01(\tR\nobjectType\x12\x1b\n\tsource_ts\x18\x0e \x01(\x03R\x08sourceTs\"\xff\x02\n\x1cRetrieveIngestionLogsRequest\x12 \n\tpage_size\x18\x01 \x01(\x05H\x00R\x08pageSize\x88\x01\x01\x12*\n\x05state\x18\x02 \x01(\x0e\x32\x0f.diode.v1.StateH\x01R\x05state\x88\x01\x01\x12\x1f\n\tdata_type\x18\x03 \x01(\tB\x02\x18\x01R\x08\x64\x61taType\x12\x1d\n\nrequest_id\x18\x04 \x01(\tR\trequestId\x12,\n\x12ingestion_ts_start\x18\x05 \x01(\x03R\x10ingestionTsStart\x12(\n\x10ingestion_ts_end\x18\x06 \x01(\x03R\x0eingestionTsEnd\x12\x1d\n\npage_token\x18\x07 \x01(\tR\tpageToken\x12!\n\x0conly_metrics\x18\x08 \x01(\x08R\x0bonlyMetrics\x12\x1f\n\x0bobject_type\x18\t \x01(\tR\nobjectTypeB\x0c\n\n_page_sizeB\x08\n\x06_state\"\xa9\x01\n\x1dRetrieveIngestionLogsResponse\x12*\n\x04logs\x18\x01 \x03(\x0b\x32\x16.diode.v1.IngestionLogR\x04logs\x12\x34\n\x07metrics\x18\x02 \x01(\x0b\x32\x1a.diode.v1.IngestionMetricsR\x07metrics\x12&\n\x0fnext_page_token\x18\x03 \x01(\tR\rnextPageToken\"\xa7\x02\n\x19RetrieveDeviationsRequest\x12 \n\tpage_size\x18\x01 \x01(\x05H\x00R\x08pageSize\x88\x01\x01\x12\x1d\n\npage_token\x18\x02 \x01(\tR\tpageToken\x12,\n\x12ingestion_ts_start\x18\x03 \x01(\x03R\x10ingestionTsStart\x12(\n\x10ingestion_ts_end\x18\x04 \x01(\x03R\x0eingestionTsEnd\x12%\n\x05state\x18\x05 \x03(\x0e\x32\x0f.diode.v1.StateR\x05state\x12\x1f\n\x0bobject_type\x18\x06 \x03(\tR\nobjectType\x12\x1b\n\tbranch_id\x18\x07 \x03(\tR\x08\x62ranchIdB\x0c\n\n_page_size\">\n\x0e\x44\x65viationError\x12\x18\n\x07message\x18\x01 \x01(\tR\x07message\x12\x12\n\x04\x63ode\x18\x02 \x01(\x05R\x04\x63ode\"\xba\x01\n\x06\x43hange\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x1f\n\x0bobject_type\x18\x02 \x01(\tR\nobjectType\x12\x30\n\x14object_primary_value\x18\x03 \x01(\tR\x12objectPrimaryValue\x12\x1f\n\x0b\x63hange_type\x18\x04 \x01(\tR\nchangeType\x12\x16\n\x06\x62\x65\x66ore\x18\x05 \x01(\x0cR\x06\x62\x65\x66ore\x12\x14\n\x05\x61\x66ter\x18\x06 \x01(\x0cR\x05\x61\x66ter\"\xbc\x03\n\tDeviation\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12!\n\x0cingestion_ts\x18\x02 \x01(\x03R\x0bingestionTs\x12$\n\x0elast_update_ts\x18\x03 \x01(\x03R\x0clastUpdateTs\x12\x12\n\x04name\x18\x04 \x01(\tR\x04name\x12\x16\n\x06source\x18\x05 \x01(\tR\x06source\x12%\n\x05state\x18\x06 \x01(\x0e\x32\x0f.diode.v1.StateR\x05state\x12\x1f\n\x0bobject_type\x18\x07 \x01(\tR\nobjectType\x12 \n\tbranch_id\x18\x08 \x01(\tH\x00R\x08\x62ranchId\x88\x01\x01\x12\x39\n\x0fingested_entity\x18\t \x01(\x0b\x32\x10.diode.v1.EntityR\x0eingestedEntity\x12.\n\x05\x65rror\x18\n \x01(\x0b\x32\x18.diode.v1.DeviationErrorR\x05\x65rror\x12*\n\x07\x63hanges\x18\x0b \x03(\x0b\x32\x10.diode.v1.ChangeR\x07\x63hanges\x12\x1b\n\tsource_ts\x18\x0c \x01(\x03R\x08sourceTsB\x0c\n\n_branch_id\"y\n\x1aRetrieveDeviationsResponse\x12\x33\n\ndeviations\x18\x01 \x03(\x0b\x32\x13.diode.v1.DeviationR\ndeviations\x12&\n\x0fnext_page_token\x18\x02 \x01(\tR\rnextPageToken\".\n\x1cRetrieveDeviationByIDRequest\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\"R\n\x1dRetrieveDeviationByIDResponse\x12\x31\n\tdeviation\x18\x01 \x01(\x0b\x32\x13.diode.v1.DeviationR\tdeviation*w\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\n\n\x06QUEUED\x10\x01\x12\x08\n\x04OPEN\x10\x02\x12\x0b\n\x07\x41PPLIED\x10\x03\x12\n\n\x06\x46\x41ILED\x10\x04\x12\x0e\n\nNO_CHANGES\x10\x05\x12\x0b\n\x07IGNORED\x10\x06\x12\x0b\n\x07\x45RRORED\x10\x07\x32\xcd\x02\n\x11ReconcilerService\x12m\n\x15RetrieveIngestionLogs\x12&.diode.v1.RetrieveIngestionLogsRequest\x1a\'.diode.v1.RetrieveIngestionLogsResponse\"\x03\x88\x02\x01\x12_\n\x12RetrieveDeviations\x12#.diode.v1.RetrieveDeviationsRequest\x1a$.diode.v1.RetrieveDeviationsResponse\x12h\n\x15RetrieveDeviationByID\x12&.diode.v1.RetrieveDeviationByIDRequest\x1a\'.diode.v1.RetrieveDeviationByIDResponseBDZBgithub.com/netboxlabs/diode/diode-server/gen/diode/v1/reconcilerpbb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -30,8 +30,8 @@ _globals['_RETRIEVEINGESTIONLOGSREQUEST'].fields_by_name['data_type']._serialized_options = b'\030\001' _globals['_RECONCILERSERVICE'].methods_by_name['RetrieveIngestionLogs']._loaded_options = None _globals['_RECONCILERSERVICE'].methods_by_name['RetrieveIngestionLogs']._serialized_options = b'\210\002\001' - _globals['_STATE']._serialized_start=3007 - _globals['_STATE']._serialized_end=3126 + _globals['_STATE']._serialized_start=3065 + _globals['_STATE']._serialized_end=3184 _globals['_INGESTIONERROR']._serialized_start=90 _globals['_INGESTIONERROR']._serialized_end=408 _globals['_INGESTIONERROR_DETAILS']._serialized_start=215 @@ -43,25 +43,25 @@ _globals['_CHANGESET']._serialized_start=565 _globals['_CHANGESET']._serialized_end=723 _globals['_INGESTIONLOG']._serialized_start=726 - _globals['_INGESTIONLOG']._serialized_end=1223 - _globals['_RETRIEVEINGESTIONLOGSREQUEST']._serialized_start=1226 - _globals['_RETRIEVEINGESTIONLOGSREQUEST']._serialized_end=1609 - _globals['_RETRIEVEINGESTIONLOGSRESPONSE']._serialized_start=1612 - _globals['_RETRIEVEINGESTIONLOGSRESPONSE']._serialized_end=1781 - _globals['_RETRIEVEDEVIATIONSREQUEST']._serialized_start=1784 - _globals['_RETRIEVEDEVIATIONSREQUEST']._serialized_end=2079 - _globals['_DEVIATIONERROR']._serialized_start=2081 - _globals['_DEVIATIONERROR']._serialized_end=2143 - _globals['_CHANGE']._serialized_start=2146 - _globals['_CHANGE']._serialized_end=2332 - _globals['_DEVIATION']._serialized_start=2335 - _globals['_DEVIATION']._serialized_end=2750 - _globals['_RETRIEVEDEVIATIONSRESPONSE']._serialized_start=2752 - _globals['_RETRIEVEDEVIATIONSRESPONSE']._serialized_end=2873 - _globals['_RETRIEVEDEVIATIONBYIDREQUEST']._serialized_start=2875 - _globals['_RETRIEVEDEVIATIONBYIDREQUEST']._serialized_end=2921 - _globals['_RETRIEVEDEVIATIONBYIDRESPONSE']._serialized_start=2923 - _globals['_RETRIEVEDEVIATIONBYIDRESPONSE']._serialized_end=3005 - _globals['_RECONCILERSERVICE']._serialized_start=3129 - _globals['_RECONCILERSERVICE']._serialized_end=3462 + _globals['_INGESTIONLOG']._serialized_end=1252 + _globals['_RETRIEVEINGESTIONLOGSREQUEST']._serialized_start=1255 + _globals['_RETRIEVEINGESTIONLOGSREQUEST']._serialized_end=1638 + _globals['_RETRIEVEINGESTIONLOGSRESPONSE']._serialized_start=1641 + _globals['_RETRIEVEINGESTIONLOGSRESPONSE']._serialized_end=1810 + _globals['_RETRIEVEDEVIATIONSREQUEST']._serialized_start=1813 + _globals['_RETRIEVEDEVIATIONSREQUEST']._serialized_end=2108 + _globals['_DEVIATIONERROR']._serialized_start=2110 + _globals['_DEVIATIONERROR']._serialized_end=2172 + _globals['_CHANGE']._serialized_start=2175 + _globals['_CHANGE']._serialized_end=2361 + _globals['_DEVIATION']._serialized_start=2364 + _globals['_DEVIATION']._serialized_end=2808 + _globals['_RETRIEVEDEVIATIONSRESPONSE']._serialized_start=2810 + _globals['_RETRIEVEDEVIATIONSRESPONSE']._serialized_end=2931 + _globals['_RETRIEVEDEVIATIONBYIDREQUEST']._serialized_start=2933 + _globals['_RETRIEVEDEVIATIONBYIDREQUEST']._serialized_end=2979 + _globals['_RETRIEVEDEVIATIONBYIDRESPONSE']._serialized_start=2981 + _globals['_RETRIEVEDEVIATIONBYIDRESPONSE']._serialized_end=3063 + _globals['_RECONCILERSERVICE']._serialized_start=3187 + _globals['_RECONCILERSERVICE']._serialized_end=3520 # @@protoc_insertion_point(module_scope) diff --git a/netbox_diode_plugin/reconciler/sdk/v1/reconciler_pb2.pyi b/netbox_diode_plugin/reconciler/sdk/v1/reconciler_pb2.pyi index 0747660..200ffb3 100644 --- a/netbox_diode_plugin/reconciler/sdk/v1/reconciler_pb2.pyi +++ b/netbox_diode_plugin/reconciler/sdk/v1/reconciler_pb2.pyi @@ -80,7 +80,7 @@ class ChangeSet(_message.Message): def __init__(self, id: _Optional[str] = ..., data: _Optional[bytes] = ..., branch_id: _Optional[str] = ..., deviation_name: _Optional[str] = ...) -> None: ... class IngestionLog(_message.Message): - __slots__ = ("id", "data_type", "state", "request_id", "ingestion_ts", "producer_app_name", "producer_app_version", "sdk_name", "sdk_version", "entity", "error", "change_set", "object_type") + __slots__ = ("id", "data_type", "state", "request_id", "ingestion_ts", "producer_app_name", "producer_app_version", "sdk_name", "sdk_version", "entity", "error", "change_set", "object_type", "source_ts") ID_FIELD_NUMBER: _ClassVar[int] DATA_TYPE_FIELD_NUMBER: _ClassVar[int] STATE_FIELD_NUMBER: _ClassVar[int] @@ -94,6 +94,7 @@ class IngestionLog(_message.Message): ERROR_FIELD_NUMBER: _ClassVar[int] CHANGE_SET_FIELD_NUMBER: _ClassVar[int] OBJECT_TYPE_FIELD_NUMBER: _ClassVar[int] + SOURCE_TS_FIELD_NUMBER: _ClassVar[int] id: str data_type: str state: State @@ -107,7 +108,8 @@ class IngestionLog(_message.Message): error: IngestionError change_set: ChangeSet object_type: str - def __init__(self, id: _Optional[str] = ..., data_type: _Optional[str] = ..., state: _Optional[_Union[State, str]] = ..., request_id: _Optional[str] = ..., ingestion_ts: _Optional[int] = ..., producer_app_name: _Optional[str] = ..., producer_app_version: _Optional[str] = ..., sdk_name: _Optional[str] = ..., sdk_version: _Optional[str] = ..., entity: _Optional[_Union[_ingester_pb2.Entity, _Mapping]] = ..., error: _Optional[_Union[IngestionError, _Mapping]] = ..., change_set: _Optional[_Union[ChangeSet, _Mapping]] = ..., object_type: _Optional[str] = ...) -> None: ... + source_ts: int + def __init__(self, id: _Optional[str] = ..., data_type: _Optional[str] = ..., state: _Optional[_Union[State, str]] = ..., request_id: _Optional[str] = ..., ingestion_ts: _Optional[int] = ..., producer_app_name: _Optional[str] = ..., producer_app_version: _Optional[str] = ..., sdk_name: _Optional[str] = ..., sdk_version: _Optional[str] = ..., entity: _Optional[_Union[_ingester_pb2.Entity, _Mapping]] = ..., error: _Optional[_Union[IngestionError, _Mapping]] = ..., change_set: _Optional[_Union[ChangeSet, _Mapping]] = ..., object_type: _Optional[str] = ..., source_ts: _Optional[int] = ...) -> None: ... class RetrieveIngestionLogsRequest(_message.Message): __slots__ = ("page_size", "state", "data_type", "request_id", "ingestion_ts_start", "ingestion_ts_end", "page_token", "only_metrics", "object_type") @@ -184,7 +186,7 @@ class Change(_message.Message): def __init__(self, id: _Optional[str] = ..., object_type: _Optional[str] = ..., object_primary_value: _Optional[str] = ..., change_type: _Optional[str] = ..., before: _Optional[bytes] = ..., after: _Optional[bytes] = ...) -> None: ... class Deviation(_message.Message): - __slots__ = ("id", "ingestion_ts", "last_update_ts", "name", "source", "state", "object_type", "branch_id", "ingested_entity", "error", "changes") + __slots__ = ("id", "ingestion_ts", "last_update_ts", "name", "source", "state", "object_type", "branch_id", "ingested_entity", "error", "changes", "source_ts") ID_FIELD_NUMBER: _ClassVar[int] INGESTION_TS_FIELD_NUMBER: _ClassVar[int] LAST_UPDATE_TS_FIELD_NUMBER: _ClassVar[int] @@ -196,6 +198,7 @@ class Deviation(_message.Message): INGESTED_ENTITY_FIELD_NUMBER: _ClassVar[int] ERROR_FIELD_NUMBER: _ClassVar[int] CHANGES_FIELD_NUMBER: _ClassVar[int] + SOURCE_TS_FIELD_NUMBER: _ClassVar[int] id: str ingestion_ts: int last_update_ts: int @@ -207,7 +210,8 @@ class Deviation(_message.Message): ingested_entity: _ingester_pb2.Entity error: DeviationError changes: _containers.RepeatedCompositeFieldContainer[Change] - def __init__(self, id: _Optional[str] = ..., ingestion_ts: _Optional[int] = ..., last_update_ts: _Optional[int] = ..., name: _Optional[str] = ..., source: _Optional[str] = ..., state: _Optional[_Union[State, str]] = ..., object_type: _Optional[str] = ..., branch_id: _Optional[str] = ..., ingested_entity: _Optional[_Union[_ingester_pb2.Entity, _Mapping]] = ..., error: _Optional[_Union[DeviationError, _Mapping]] = ..., changes: _Optional[_Iterable[_Union[Change, _Mapping]]] = ...) -> None: ... + source_ts: int + def __init__(self, id: _Optional[str] = ..., ingestion_ts: _Optional[int] = ..., last_update_ts: _Optional[int] = ..., name: _Optional[str] = ..., source: _Optional[str] = ..., state: _Optional[_Union[State, str]] = ..., object_type: _Optional[str] = ..., branch_id: _Optional[str] = ..., ingested_entity: _Optional[_Union[_ingester_pb2.Entity, _Mapping]] = ..., error: _Optional[_Union[DeviationError, _Mapping]] = ..., changes: _Optional[_Iterable[_Union[Change, _Mapping]]] = ..., source_ts: _Optional[int] = ...) -> None: ... class RetrieveDeviationsResponse(_message.Message): __slots__ = ("deviations", "next_page_token") From 13115bd17dc0ca4d75b3b759c6e1881866f33b8c Mon Sep 17 00:00:00 2001 From: Michal Fiedorowicz Date: Thu, 6 Feb 2025 21:21:00 +0100 Subject: [PATCH 13/52] feat: remove ingestion logs view (#58) * chore: remove reconciler sdk Signed-off-by: Michal Fiedorowicz * fix: docker setup with netbox 4.1.11 for netbox-branching plugin Signed-off-by: Michal Fiedorowicz * feat: remove ingestion logs view Signed-off-by: Michal Fiedorowicz * chore: update CODEOWNERS Signed-off-by: Michal Fiedorowicz --------- Signed-off-by: Michal Fiedorowicz --- .github/CODEOWNERS | 2 +- Makefile | 2 +- docker/Dockerfile-diode-netbox-plugin | 2 +- docker/docker-compose.yaml | 2 +- netbox_diode_plugin/models.py | 8 +- netbox_diode_plugin/navigation.py | 7 - netbox_diode_plugin/reconciler/__init__.py | 3 - .../reconciler/sdk/__init__.py | 3 - netbox_diode_plugin/reconciler/sdk/client.py | 256 --------- .../reconciler/sdk/exceptions.py | 33 -- .../reconciler/sdk/v1/__init__.py | 3 - .../reconciler/sdk/v1/ingester_pb2.py | 217 -------- .../reconciler/sdk/v1/ingester_pb2.pyi | 368 ------------- .../reconciler/sdk/v1/ingester_pb2_grpc.py | 70 --- .../reconciler/sdk/v1/reconciler_pb2.py | 67 --- .../reconciler/sdk/v1/reconciler_pb2.pyi | 234 --------- .../reconciler/sdk/v1/reconciler_pb2_grpc.py | 138 ----- .../reconciler/sdk/validate/__init__.py | 3 - .../reconciler/sdk/validate/validate_pb2.py | 76 --- .../reconciler/sdk/validate/validate_pb2.pyi | 494 ------------------ .../sdk/validate/validate_pb2_grpc.py | 4 - netbox_diode_plugin/tables.py | 142 ----- .../templates/diode/ingestion_logs.html | 141 ----- .../diode/ingestion_logs_paginator.html | 17 - .../templates/diode/ingestion_logs_table.html | 109 ---- netbox_diode_plugin/templatetags/__init__.py | 3 - .../templatetags/diode_filters.py | 35 -- .../tests/test_reconciler_sdk_client.py | 331 ------------ netbox_diode_plugin/tests/test_tables.py | 84 --- .../tests/test_templatetags.py | 62 --- netbox_diode_plugin/tests/test_views.py | 189 +------ netbox_diode_plugin/urls.py | 1 - netbox_diode_plugin/views.py | 90 +--- 33 files changed, 11 insertions(+), 3185 deletions(-) delete mode 100644 netbox_diode_plugin/reconciler/__init__.py delete mode 100644 netbox_diode_plugin/reconciler/sdk/__init__.py delete mode 100644 netbox_diode_plugin/reconciler/sdk/client.py delete mode 100644 netbox_diode_plugin/reconciler/sdk/exceptions.py delete mode 100644 netbox_diode_plugin/reconciler/sdk/v1/__init__.py delete mode 100644 netbox_diode_plugin/reconciler/sdk/v1/ingester_pb2.py delete mode 100644 netbox_diode_plugin/reconciler/sdk/v1/ingester_pb2.pyi delete mode 100644 netbox_diode_plugin/reconciler/sdk/v1/ingester_pb2_grpc.py delete mode 100644 netbox_diode_plugin/reconciler/sdk/v1/reconciler_pb2.py delete mode 100644 netbox_diode_plugin/reconciler/sdk/v1/reconciler_pb2.pyi delete mode 100644 netbox_diode_plugin/reconciler/sdk/v1/reconciler_pb2_grpc.py delete mode 100644 netbox_diode_plugin/reconciler/sdk/validate/__init__.py delete mode 100644 netbox_diode_plugin/reconciler/sdk/validate/validate_pb2.py delete mode 100644 netbox_diode_plugin/reconciler/sdk/validate/validate_pb2.pyi delete mode 100644 netbox_diode_plugin/reconciler/sdk/validate/validate_pb2_grpc.py delete mode 100644 netbox_diode_plugin/tables.py delete mode 100644 netbox_diode_plugin/templates/diode/ingestion_logs.html delete mode 100644 netbox_diode_plugin/templates/diode/ingestion_logs_paginator.html delete mode 100644 netbox_diode_plugin/templates/diode/ingestion_logs_table.html delete mode 100644 netbox_diode_plugin/templatetags/__init__.py delete mode 100644 netbox_diode_plugin/templatetags/diode_filters.py delete mode 100644 netbox_diode_plugin/tests/test_reconciler_sdk_client.py delete mode 100644 netbox_diode_plugin/tests/test_tables.py delete mode 100644 netbox_diode_plugin/tests/test_templatetags.py diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 47b6708..ab07f99 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1 +1 @@ -* @leoparente @mfiedorowicz +* @leoparente @ltucker @mfiedorowicz diff --git a/Makefile b/Makefile index 2587f4e..6145666 100644 --- a/Makefile +++ b/Makefile @@ -19,5 +19,5 @@ docker-compose-netbox-plugin-test: .PHONY: docker-compose-netbox-plugin-test-cover docker-compose-netbox-plugin-test-cover: - -@$(DOCKER_COMPOSE) -f docker/docker-compose.yaml -f docker/docker-compose.test.yaml run --rm -u root -e COVERAGE_FILE=/opt/netbox/netbox/coverage/.coverage netbox sh -c "coverage run --source=netbox_diode_plugin --omit=*_pb2*,*/migrations/* ./manage.py test --keepdb netbox_diode_plugin && coverage xml -o /opt/netbox/netbox/coverage/report.xml && coverage report -m | tee /opt/netbox/netbox/coverage/report.txt" + -@$(DOCKER_COMPOSE) -f docker/docker-compose.yaml -f docker/docker-compose.test.yaml run --rm -u root -e COVERAGE_FILE=/opt/netbox/netbox/coverage/.coverage netbox sh -c "coverage run --source=netbox_diode_plugin --omit=*/migrations/* ./manage.py test --keepdb netbox_diode_plugin && coverage xml -o /opt/netbox/netbox/coverage/report.xml && coverage report -m | tee /opt/netbox/netbox/coverage/report.txt" @$(MAKE) docker-compose-netbox-plugin-down diff --git a/docker/Dockerfile-diode-netbox-plugin b/docker/Dockerfile-diode-netbox-plugin index 2453ef7..ec3f9c6 100644 --- a/docker/Dockerfile-diode-netbox-plugin +++ b/docker/Dockerfile-diode-netbox-plugin @@ -1,4 +1,4 @@ -FROM netboxcommunity/netbox:v4.1-3.0.1 +FROM netboxcommunity/netbox:v4.1.11-3.0.2 COPY ./netbox/configuration/ /etc/netbox/config/ RUN chmod 755 /etc/netbox/config/* && \ diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index 81dd163..c092668 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -1,7 +1,7 @@ name: diode-netbox-plugin services: netbox: &netbox - image: netboxcommunity/netbox:v4.1-3.0.1-diode-netbox-plugin + image: netboxcommunity/netbox:v4.1.11-3.0.2-diode-netbox-plugin build: context: . dockerfile: Dockerfile-diode-netbox-plugin diff --git a/netbox_diode_plugin/models.py b/netbox_diode_plugin/models.py index fe0796b..a928ed1 100644 --- a/netbox_diode_plugin/models.py +++ b/netbox_diode_plugin/models.py @@ -1,19 +1,21 @@ # !/usr/bin/env python # Copyright 2024 NetBox Labs Inc """Diode NetBox Plugin - Models.""" +from urllib.parse import urlparse from django.core.exceptions import ValidationError from django.db import models from django.urls import reverse from netbox.models import NetBoxModel -from netbox_diode_plugin.reconciler.sdk.client import parse_target - def diode_target_validator(target): """Diode target validator.""" try: - _, _, _ = parse_target(target) + parsed_target = urlparse(target) + + if parsed_target.scheme not in ["grpc", "grpcs"]: + raise ValueError("target should start with grpc:// or grpcs://") except ValueError as exc: raise ValidationError(exc) diff --git a/netbox_diode_plugin/navigation.py b/netbox_diode_plugin/navigation.py index d5ba204..dc70888 100644 --- a/netbox_diode_plugin/navigation.py +++ b/netbox_diode_plugin/navigation.py @@ -4,12 +4,6 @@ from netbox.plugins import PluginMenu, PluginMenuItem -ingestion_logs = { - "link": "plugins:netbox_diode_plugin:ingestion_logs", - "link_text": "Ingestion Logs", - "staff_only": True, -} - settings = { "link": "plugins:netbox_diode_plugin:settings", "link_text": "Settings", @@ -23,7 +17,6 @@ ( "Diode", ( - PluginMenuItem(**ingestion_logs), PluginMenuItem(**settings), ), ), diff --git a/netbox_diode_plugin/reconciler/__init__.py b/netbox_diode_plugin/reconciler/__init__.py deleted file mode 100644 index 96bc1bd..0000000 --- a/netbox_diode_plugin/reconciler/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc -"""Diode NetBox Plugin - Reconciler.""" diff --git a/netbox_diode_plugin/reconciler/sdk/__init__.py b/netbox_diode_plugin/reconciler/sdk/__init__.py deleted file mode 100644 index 8945f40..0000000 --- a/netbox_diode_plugin/reconciler/sdk/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc -"""Diode NetBox Plugin - Reconciler - SDK.""" diff --git a/netbox_diode_plugin/reconciler/sdk/client.py b/netbox_diode_plugin/reconciler/sdk/client.py deleted file mode 100644 index 3273618..0000000 --- a/netbox_diode_plugin/reconciler/sdk/client.py +++ /dev/null @@ -1,256 +0,0 @@ -#!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc -"""Diode NetBox Plugin - Reconciler - SDK - Client.""" - -import collections -import logging -import platform -from urllib.parse import urlparse - -import certifi -import grpc - -import netbox_diode_plugin -from netbox_diode_plugin.reconciler.sdk.exceptions import ReconcilerClientError -from netbox_diode_plugin.reconciler.sdk.v1 import reconciler_pb2, reconciler_pb2_grpc - -_LOGGER = logging.getLogger(__name__) - - -def _load_certs() -> bytes: - """Loads cacert.pem.""" - with open(certifi.where(), "rb") as f: - return f.read() - - -def parse_target(target: str) -> tuple[str, str, bool]: - """Parse the target into authority, path and tls_verify.""" - parsed_target = urlparse(target) - - if parsed_target.scheme not in ["grpc", "grpcs"]: - raise ValueError("target should start with grpc:// or grpcs://") - - tls_verify = parsed_target.scheme == "grpcs" - - authority = parsed_target.netloc - - if ":" not in authority: - authority += ":443" - - return authority, parsed_target.path, tls_verify - - -class ReconcilerClient: - """Reconciler Client.""" - - _name = "reconciler-sdk-python" - _version = "0.0.1" - _channel = None - _stub = None - - def __init__( - self, - target: str, - api_key: str, - ): - """Initiate a new client.""" - self._target, self._path, self._tls_verify = parse_target(target) - - plugin_config = netbox_diode_plugin.config - - self._app_name = plugin_config.name - self._app_version = plugin_config.version - self._platform = platform.platform() - self._python_version = platform.python_version() - - self._metadata = ( - ("authorization", api_key), - ("platform", self._platform), - ("python-version", self._python_version), - ) - - channel_opts = ( - ("grpc.primary_user_agent", f"{self._name}/{self._version} {self._app_name}/{self._app_version}"), - ) - - if self._tls_verify: - _LOGGER.debug("Setting up gRPC secure channel") - self._channel = grpc.secure_channel( - self._target, - grpc.ssl_channel_credentials( - root_certificates=_load_certs(), - ), - options=channel_opts, - ) - else: - _LOGGER.debug("Setting up gRPC insecure channel") - self._channel = grpc.insecure_channel( - target=self._target, - options=channel_opts, - ) - - channel = self._channel - - if self._path: - _LOGGER.debug(f"Setting up gRPC interceptor for path: {self._path}") - rpc_method_interceptor = ReconcilerMethodClientInterceptor(subpath=self._path) - - intercept_channel = grpc.intercept_channel( - self._channel, rpc_method_interceptor - ) - channel = intercept_channel - - self._stub = reconciler_pb2_grpc.ReconcilerServiceStub(channel) - - - @property - def name(self) -> str: - """Retrieve the name.""" - return self._name - - @property - def version(self) -> str: - """Retrieve the version.""" - return self._version - - @property - def target(self) -> str: - """Retrieve the target.""" - return self._target - - @property - def path(self) -> str: - """Retrieve the path.""" - return self._path - - @property - def tls_verify(self) -> bool: - """Retrieve the tls_verify.""" - return self._tls_verify - - @property - def app_name(self) -> str: - """Retrieve the app name.""" - return self._app_name - - @property - def app_version(self) -> str: - """Retrieve the app version.""" - return self._app_version - - @property - def channel(self) -> grpc.Channel: - """Retrieve the channel.""" - return self._channel - - def __enter__(self): - """Enters the runtime context related to the channel object.""" - return self - - def __exit__(self, exc_type, exc_value, exc_traceback): - """Exits the runtime context related to the channel object.""" - self.close() - - def close(self): - """Close the channel.""" - self._channel.close() - - def retrieve_ingestion_logs( - self, - state: str | None = None, - data_type: str | None = None, - request_id: str | None = None, - ingestion_ts_start: int | None = None, - ingestion_ts_end: int | None = None, - page_token: str | None = None, - page_size: int = 100, - only_metrics: bool = False, - ) -> reconciler_pb2.RetrieveIngestionLogsResponse: - """Retrieve ingestion logs.""" - try: - request = reconciler_pb2.RetrieveIngestionLogsRequest( - page_size=page_size, - state=state, - data_type=data_type, - request_id=request_id, - ingestion_ts_start=ingestion_ts_start, - ingestion_ts_end=ingestion_ts_end, - page_token=page_token, - only_metrics=only_metrics, - ) - - return self._stub.RetrieveIngestionLogs(request, metadata=self._metadata) - except grpc.RpcError as err: - raise ReconcilerClientError(err) from err - - -class _ClientCallDetails( - collections.namedtuple( - "_ClientCallDetails", - ( - "method", - "timeout", - "metadata", - "credentials", - "wait_for_ready", - "compression", - ), - ), - grpc.ClientCallDetails, -): - """ - _ClientCallDetails class. - - This class describes an RPC to be invoked and is required for custom gRPC interceptors. - - """ - - pass - - -class ReconcilerMethodClientInterceptor( - grpc.UnaryUnaryClientInterceptor, grpc.StreamUnaryClientInterceptor -): - """ - Reconciler Method Client Interceptor class. - - This class is used to intercept the client calls and modify the method details. It inherits from - grpc.UnaryUnaryClientInterceptor and grpc.StreamUnaryClientInterceptor. - - Reconciler's default method generated from Protocol Buffers definition is /diode.v1.ReconcilerService/RetrieveIngestionLogs and in order - to use Diode targets with path (i.e. localhost:8081/this/is/custom/path), this interceptor is used to modify the - method details, by prepending the generated method name with the path extracted from initial target. - - """ - - def __init__(self, subpath): - """Initiate a new interceptor.""" - self._subpath = subpath - - def _intercept_call(self, continuation, client_call_details, request_or_iterator): - """Intercept call.""" - method = client_call_details.method - if client_call_details.method is not None: - method = f"{self._subpath}{client_call_details.method}" - - client_call_details = _ClientCallDetails( - method, - client_call_details.timeout, - client_call_details.metadata, - client_call_details.credentials, - client_call_details.wait_for_ready, - client_call_details.compression, - ) - - response = continuation(client_call_details, request_or_iterator) - return response - - def intercept_unary_unary(self, continuation, client_call_details, request): - """Intercept unary unary.""" - return self._intercept_call(continuation, client_call_details, request) - - def intercept_stream_unary( - self, continuation, client_call_details, request_iterator - ): - """Intercept stream unary.""" - return self._intercept_call(continuation, client_call_details, request_iterator) diff --git a/netbox_diode_plugin/reconciler/sdk/exceptions.py b/netbox_diode_plugin/reconciler/sdk/exceptions.py deleted file mode 100644 index 18a25b6..0000000 --- a/netbox_diode_plugin/reconciler/sdk/exceptions.py +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc -"""Diode NetBox Plugin - Reconciler - SDK - Exceptions.""" - -from grpc import RpcError, StatusCode - - -class ReconcilerClientError(RpcError): - """Reconciler Client Error.""" - - _status_code = None - _details = None - - def __init__(self, err: RpcError): - """Initialize ReconcilerClientError.""" - self._status_code = err.code() - self._details = err.details() - - @property - def status_code(self): - """Return status code.""" - if isinstance(self._status_code, StatusCode): - return self._status_code.name - return self._status_code - - @property - def details(self): - """Return error details.""" - return self._details - - def __repr__(self): - """Return string representation.""" - return f"" diff --git a/netbox_diode_plugin/reconciler/sdk/v1/__init__.py b/netbox_diode_plugin/reconciler/sdk/v1/__init__.py deleted file mode 100644 index 0a4a1e8..0000000 --- a/netbox_diode_plugin/reconciler/sdk/v1/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc -"""Diode NetBox Plugin - Reconciler - SDK - V1.""" diff --git a/netbox_diode_plugin/reconciler/sdk/v1/ingester_pb2.py b/netbox_diode_plugin/reconciler/sdk/v1/ingester_pb2.py deleted file mode 100644 index 68e4bb0..0000000 --- a/netbox_diode_plugin/reconciler/sdk/v1/ingester_pb2.py +++ /dev/null @@ -1,217 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: diode/v1/ingester.proto -# Protobuf Python Version: 5.26.1 -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -from google.protobuf.internal import builder as _builder -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from netbox_diode_plugin.reconciler.sdk.validate import validate_pb2 as validate_dot_validate__pb2 - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x17\x64iode/v1/ingester.proto\x12\x08\x64iode.v1\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17validate/validate.proto\"\xe4\x05\n\x06\x44\x65vice\x12\x1b\n\x04name\x18\x01 \x01(\tB\x07\xfa\x42\x04r\x02\x18@R\x04name\x12\x30\n\x0b\x64\x65vice_fqdn\x18\x02 \x01(\tB\n\xfa\x42\x07r\x05\x10\x01\x18\xff\x01H\x00R\ndeviceFqdn\x88\x01\x01\x12\x35\n\x0b\x64\x65vice_type\x18\x03 \x01(\x0b\x32\x14.diode.v1.DeviceTypeR\ndeviceType\x12\"\n\x04role\x18\x04 \x01(\x0b\x32\x0e.diode.v1.RoleR\x04role\x12.\n\x08platform\x18\x05 \x01(\x0b\x32\x12.diode.v1.PlatformR\x08platform\x12$\n\x06serial\x18\x06 \x01(\tB\x07\xfa\x42\x04r\x02\x18\x32H\x01R\x06serial\x88\x01\x01\x12\"\n\x04site\x18\x07 \x01(\x0b\x32\x0e.diode.v1.SiteR\x04site\x12*\n\tasset_tag\x18\x08 \x01(\tB\x08\xfa\x42\x05r\x03\x18\xc8\x01H\x02R\x08\x61ssetTag\x88\x01\x01\x12\x63\n\x06status\x18\t \x01(\tBK\xfa\x42HrFR\x07offlineR\x06\x61\x63tiveR\x07plannedR\x06stagedR\x06\x66\x61iledR\tinventoryR\x0f\x64\x65\x63ommissioningR\x06status\x12/\n\x0b\x64\x65scription\x18\n \x01(\tB\x08\xfa\x42\x05r\x03\x18\xc8\x01H\x03R\x0b\x64\x65scription\x88\x01\x01\x12\x1f\n\x08\x63omments\x18\x0b \x01(\tH\x04R\x08\x63omments\x88\x01\x01\x12!\n\x04tags\x18\x0c \x03(\x0b\x32\r.diode.v1.TagR\x04tags\x12\x34\n\x0bprimary_ip4\x18\r \x01(\x0b\x32\x13.diode.v1.IPAddressR\nprimaryIp4\x12\x34\n\x0bprimary_ip6\x18\x0e \x01(\x0b\x32\x13.diode.v1.IPAddressR\nprimaryIp6B\x0e\n\x0c_device_fqdnB\t\n\x07_serialB\x0c\n\n_asset_tagB\x0e\n\x0c_descriptionB\x0b\n\t_comments\"\xef\x11\n\tInterface\x12\x32\n\x06\x64\x65vice\x18\x01 \x01(\x0b\x32\x10.diode.v1.DeviceB\x08\xfa\x42\x05\xa2\x01\x02\x08\x01R\x06\x64\x65vice\x12\x1d\n\x04name\x18\x02 \x01(\tB\t\xfa\x42\x06r\x04\x10\x01\x18@R\x04name\x12$\n\x05label\x18\x03 \x01(\tB\t\xfa\x42\x06r\x04\x10\x01\x18@H\x00R\x05label\x88\x01\x01\x12\xfa\x0c\n\x04type\x18\x04 \x01(\tB\xe5\x0c\xfa\x42\xe1\x0cr\xde\x0cR\x07virtualR\x06\x62ridgeR\x03lagR\n100base-fxR\x0b\x31\x30\x30\x62\x61se-lfxR\n100base-txR\n100base-t1R\n1000base-tR\x0f\x31\x30\x30\x30\x62\x61se-x-gbicR\x0e\x31\x30\x30\x30\x62\x61se-x-sfpR\n2.5gbase-tR\x08\x35gbase-tR\t10gbase-tR\x0b\x31\x30gbase-cx4R\x0e\x31\x30gbase-x-sfppR\r10gbase-x-xfpR\x10\x31\x30gbase-x-xenpakR\x0c\x31\x30gbase-x-x2R\x0f\x32\x35gbase-x-sfp28R\x0f\x35\x30gbase-x-sfp56R\x0f\x34\x30gbase-x-qsfppR\x0f\x35\x30gbase-x-sfp28R\x0e\x31\x30\x30gbase-x-cfpR\x0f\x31\x30\x30gbase-x-cfp2R\x0f\x31\x30\x30gbase-x-cfp4R\x0e\x31\x30\x30gbase-x-cxpR\x0f\x31\x30\x30gbase-x-cpakR\x0f\x31\x30\x30gbase-x-dsfpR\x10\x31\x30\x30gbase-x-sfpddR\x11\x31\x30\x30gbase-x-qsfp28R\x11\x31\x30\x30gbase-x-qsfpddR\x0f\x32\x30\x30gbase-x-cfp2R\x11\x32\x30\x30gbase-x-qsfp56R\x11\x32\x30\x30gbase-x-qsfpddR\x0f\x34\x30\x30gbase-x-cfp2R\x12\x34\x30\x30gbase-x-qsfp112R\x11\x34\x30\x30gbase-x-qsfpddR\x0f\x34\x30\x30gbase-x-osfpR\x13\x34\x30\x30gbase-x-osfp-rhsR\x0f\x34\x30\x30gbase-x-cdfpR\x0f\x34\x30\x30gbase-x-cfp8R\x11\x38\x30\x30gbase-x-qsfpddR\x0f\x38\x30\x30gbase-x-osfpR\x0b\x31\x30\x30\x30\x62\x61se-kxR\n10gbase-krR\x0b\x31\x30gbase-kx4R\n25gbase-krR\x0b\x34\x30gbase-kr4R\n50gbase-krR\x0c\x31\x30\x30gbase-kp4R\x0c\x31\x30\x30gbase-kr2R\x0c\x31\x30\x30gbase-kr4R\x0bieee802.11aR\x0bieee802.11gR\x0bieee802.11nR\x0cieee802.11acR\x0cieee802.11adR\x0cieee802.11axR\x0cieee802.11ayR\x0cieee802.15.1R\x0eother-wirelessR\x03gsmR\x04\x63\x64maR\x03lteR\tsonet-oc3R\nsonet-oc12R\nsonet-oc48R\x0bsonet-oc192R\x0bsonet-oc768R\x0csonet-oc1920R\x0csonet-oc3840R\x08\x31gfc-sfpR\x08\x32gfc-sfpR\x08\x34gfc-sfpR\t8gfc-sfppR\n16gfc-sfppR\x0b\x33\x32gfc-sfp28R\x0b\x36\x34gfc-qsfppR\r128gfc-qsfp28R\x0einfiniband-sdrR\x0einfiniband-ddrR\x0einfiniband-qdrR\x10infiniband-fdr10R\x0einfiniband-fdrR\x0einfiniband-edrR\x0einfiniband-hdrR\x0einfiniband-ndrR\x0einfiniband-xdrR\x02t1R\x02\x65\x31R\x02t3R\x02\x65\x33R\x04xdslR\x06\x64ocsisR\x04gponR\x06xg-ponR\x07xgs-ponR\x07ng-pon2R\x04\x65ponR\x08\x31\x30g-eponR\x0f\x63isco-stackwiseR\x14\x63isco-stackwise-plusR\x0f\x63isco-flexstackR\x14\x63isco-flexstack-plusR\x12\x63isco-stackwise-80R\x13\x63isco-stackwise-160R\x13\x63isco-stackwise-320R\x13\x63isco-stackwise-480R\x12\x63isco-stackwise-1tR\x0bjuniper-vcpR\x13\x65xtreme-summitstackR\x17\x65xtreme-summitstack-128R\x17\x65xtreme-summitstack-256R\x17\x65xtreme-summitstack-512R\x05otherR\x04type\x12\x1d\n\x07\x65nabled\x18\x05 \x01(\x08H\x01R\x07\x65nabled\x88\x01\x01\x12\"\n\x03mtu\x18\x06 \x01(\x05\x42\x0b\xfa\x42\x08\x1a\x06\x18\x80\x80\x04(\x01H\x02R\x03mtu\x88\x01\x01\x12$\n\x0bmac_address\x18\x07 \x01(\tH\x03R\nmacAddress\x88\x01\x01\x12\"\n\x05speed\x18\x08 \x01(\x05\x42\x07\xfa\x42\x04\x1a\x02(\x00H\x04R\x05speed\x88\x01\x01\x12\x15\n\x03wwn\x18\t \x01(\tH\x05R\x03wwn\x88\x01\x01\x12 \n\tmgmt_only\x18\n \x01(\x08H\x06R\x08mgmtOnly\x88\x01\x01\x12/\n\x0b\x64\x65scription\x18\x0b \x01(\tB\x08\xfa\x42\x05r\x03\x18\xc8\x01H\x07R\x0b\x64\x65scription\x88\x01\x01\x12*\n\x0emark_connected\x18\x0c \x01(\x08H\x08R\rmarkConnected\x88\x01\x01\x12\x35\n\x04mode\x18\r \x01(\tB!\xfa\x42\x1er\x1cR\x06\x61\x63\x63\x65ssR\x06taggedR\ntagged-allR\x04mode\x12!\n\x04tags\x18\x0e \x03(\x0b\x32\r.diode.v1.TagR\x04tagsB\x08\n\x06_labelB\n\n\x08_enabledB\x06\n\x04_mtuB\x0e\n\x0c_mac_addressB\x08\n\x06_speedB\x06\n\x04_wwnB\x0c\n\n_mgmt_onlyB\x0e\n\x0c_descriptionB\x11\n\x0f_mark_connected\"\xe3\x02\n\x07\x43luster\x12\x1d\n\x04name\x18\x01 \x01(\tB\t\xfa\x42\x06r\x04\x10\x01\x18\x64R\x04name\x12)\n\x04type\x18\x02 \x01(\x0b\x32\x15.diode.v1.ClusterTypeR\x04type\x12,\n\x05group\x18\x03 \x01(\x0b\x32\x16.diode.v1.ClusterGroupR\x05group\x12\"\n\x04site\x18\x04 \x01(\x0b\x32\x0e.diode.v1.SiteR\x04site\x12X\n\x06status\x18\x05 \x01(\tB@\xfa\x42=r;R\x07offlineR\x06\x61\x63tiveR\x07plannedR\x06stagedR\x06\x66\x61iledR\x0f\x64\x65\x63ommissioningR\x06status\x12/\n\x0b\x64\x65scription\x18\x06 \x01(\tB\x08\xfa\x42\x05r\x03\x18\xc8\x01H\x00R\x0b\x64\x65scription\x88\x01\x01\x12!\n\x04tags\x18\x07 \x03(\x0b\x32\r.diode.v1.TagR\x04tagsB\x0e\n\x0c_description\"\xc1\x01\n\x0b\x43lusterType\x12\x1d\n\x04name\x18\x01 \x01(\tB\t\xfa\x42\x06r\x04\x10\x01\x18\x64R\x04name\x12/\n\x04slug\x18\x02 \x01(\tB\x1b\xfa\x42\x18r\x16\x10\x01\x18\x64\x32\x10^[-a-zA-Z0-9_]+$R\x04slug\x12/\n\x0b\x64\x65scription\x18\x03 \x01(\tB\x08\xfa\x42\x05r\x03\x18\xc8\x01H\x00R\x0b\x64\x65scription\x88\x01\x01\x12!\n\x04tags\x18\x04 \x03(\x0b\x32\r.diode.v1.TagR\x04tagsB\x0e\n\x0c_description\"\xc2\x01\n\x0c\x43lusterGroup\x12\x1d\n\x04name\x18\x01 \x01(\tB\t\xfa\x42\x06r\x04\x10\x01\x18\x64R\x04name\x12/\n\x04slug\x18\x02 \x01(\tB\x1b\xfa\x42\x18r\x16\x10\x01\x18\x64\x32\x10^[-a-zA-Z0-9_]+$R\x04slug\x12/\n\x0b\x64\x65scription\x18\x03 \x01(\tB\x08\xfa\x42\x05r\x03\x18\xc8\x01H\x00R\x0b\x64\x65scription\x88\x01\x01\x12!\n\x04tags\x18\x04 \x03(\x0b\x32\r.diode.v1.TagR\x04tagsB\x0e\n\x0c_description\"\xde\x05\n\x0eVirtualMachine\x12\x1b\n\x04name\x18\x01 \x01(\tB\x07\xfa\x42\x04r\x02\x18@R\x04name\x12X\n\x06status\x18\x02 \x01(\tB@\xfa\x42=r;R\x07offlineR\x06\x61\x63tiveR\x07plannedR\x06stagedR\x06\x66\x61iledR\x0f\x64\x65\x63ommissioningR\x06status\x12\"\n\x04site\x18\x03 \x01(\x0b\x32\x0e.diode.v1.SiteR\x04site\x12+\n\x07\x63luster\x18\x04 \x01(\x0b\x32\x11.diode.v1.ClusterR\x07\x63luster\x12\"\n\x04role\x18\x05 \x01(\x0b\x32\x0e.diode.v1.RoleR\x04role\x12(\n\x06\x64\x65vice\x18\x06 \x01(\x0b\x32\x10.diode.v1.DeviceR\x06\x64\x65vice\x12.\n\x08platform\x18\x07 \x01(\x0b\x32\x12.diode.v1.PlatformR\x08platform\x12\x34\n\x0bprimary_ip4\x18\x08 \x01(\x0b\x32\x13.diode.v1.IPAddressR\nprimaryIp4\x12\x34\n\x0bprimary_ip6\x18\t \x01(\x0b\x32\x13.diode.v1.IPAddressR\nprimaryIp6\x12\"\n\x05vcpus\x18\n \x01(\x05\x42\x07\xfa\x42\x04\x1a\x02(\x00H\x00R\x05vcpus\x88\x01\x01\x12$\n\x06memory\x18\x0b \x01(\x05\x42\x07\xfa\x42\x04\x1a\x02(\x00H\x01R\x06memory\x88\x01\x01\x12 \n\x04\x64isk\x18\x0c \x01(\x05\x42\x07\xfa\x42\x04\x1a\x02(\x00H\x02R\x04\x64isk\x88\x01\x01\x12/\n\x0b\x64\x65scription\x18\r \x01(\tB\x08\xfa\x42\x05r\x03\x18\xc8\x01H\x03R\x0b\x64\x65scription\x88\x01\x01\x12\x1f\n\x08\x63omments\x18\x0e \x01(\tH\x04R\x08\x63omments\x88\x01\x01\x12!\n\x04tags\x18\x0f \x03(\x0b\x32\r.diode.v1.TagR\x04tagsB\x08\n\x06_vcpusB\t\n\x07_memoryB\x07\n\x05_diskB\x0e\n\x0c_descriptionB\x0b\n\t_comments\"\xea\x02\n\x0bVMInterface\x12K\n\x0fvirtual_machine\x18\x01 \x01(\x0b\x32\x18.diode.v1.VirtualMachineB\x08\xfa\x42\x05\xa2\x01\x02\x08\x01R\x0evirtualMachine\x12\x1d\n\x04name\x18\x02 \x01(\tB\t\xfa\x42\x06r\x04\x10\x01\x18@R\x04name\x12\x1d\n\x07\x65nabled\x18\x03 \x01(\x08H\x00R\x07\x65nabled\x88\x01\x01\x12\"\n\x03mtu\x18\x04 \x01(\x05\x42\x0b\xfa\x42\x08\x1a\x06\x18\x80\x80\x04(\x01H\x01R\x03mtu\x88\x01\x01\x12$\n\x0bmac_address\x18\x05 \x01(\tH\x02R\nmacAddress\x88\x01\x01\x12/\n\x0b\x64\x65scription\x18\x06 \x01(\tB\x08\xfa\x42\x05r\x03\x18\xc8\x01H\x03R\x0b\x64\x65scription\x88\x01\x01\x12!\n\x04tags\x18\x07 \x03(\x0b\x32\r.diode.v1.TagR\x04tagsB\n\n\x08_enabledB\x06\n\x04_mtuB\x0e\n\x0c_mac_addressB\x0e\n\x0c_description\"\xfa\x01\n\x0bVirtualDisk\x12K\n\x0fvirtual_machine\x18\x01 \x01(\x0b\x32\x18.diode.v1.VirtualMachineB\x08\xfa\x42\x05\xa2\x01\x02\x08\x01R\x0evirtualMachine\x12\x1d\n\x04name\x18\x02 \x01(\tB\t\xfa\x42\x06r\x04\x10\x01\x18\x64R\x04name\x12\x1b\n\x04size\x18\x03 \x01(\x05\x42\x07\xfa\x42\x04\x1a\x02(\x00R\x04size\x12/\n\x0b\x64\x65scription\x18\x04 \x01(\tB\x08\xfa\x42\x05r\x03\x18\xc8\x01H\x00R\x0b\x64\x65scription\x88\x01\x01\x12!\n\x04tags\x18\x05 \x03(\x0b\x32\r.diode.v1.TagR\x04tagsB\x0e\n\x0c_description\"\x8c\x04\n\tIPAddress\x12!\n\x07\x61\x64\x64ress\x18\x01 \x01(\tB\x07\xfa\x42\x04r\x02p\x01R\x07\x61\x64\x64ress\x12\x33\n\tinterface\x18\x02 \x01(\x0b\x32\x13.diode.v1.InterfaceH\x00R\tinterface\x12H\n\x06status\x18\x03 \x01(\tB0\xfa\x42-r+R\x06\x61\x63tiveR\x08reservedR\ndeprecatedR\x04\x64hcpR\x05slaacR\x06status\x12T\n\x04role\x18\x04 \x01(\tB@\xfa\x42=r;R\x08loopbackR\tsecondaryR\x07\x61nycastR\x03vipR\x04vrrpR\x04hsrpR\x04glbpR\x04\x63\x61rpR\x04role\x12U\n\x08\x64ns_name\x18\x05 \x01(\tB5\xfa\x42\x32r0\x18\xff\x01\x32+^([0-9A-Za-z_-]+|\\*)(\\.[0-9A-Za-z_-]+)*\\.?$H\x01R\x07\x64nsName\x88\x01\x01\x12/\n\x0b\x64\x65scription\x18\x06 \x01(\tB\x08\xfa\x42\x05r\x03\x18\xc8\x01H\x02R\x0b\x64\x65scription\x88\x01\x01\x12\x1f\n\x08\x63omments\x18\x07 \x01(\tH\x03R\x08\x63omments\x88\x01\x01\x12!\n\x04tags\x18\x08 \x03(\x0b\x32\r.diode.v1.TagR\x04tagsB\x11\n\x0f\x61ssigned_objectB\x0b\n\t_dns_nameB\x0e\n\x0c_descriptionB\x0b\n\t_comments\"\xeb\x02\n\nDeviceType\x12\x1f\n\x05model\x18\x01 \x01(\tB\t\xfa\x42\x06r\x04\x10\x01\x18\x64R\x05model\x12/\n\x04slug\x18\x02 \x01(\tB\x1b\xfa\x42\x18r\x16\x10\x01\x18\x64\x32\x10^[-a-zA-Z0-9_]+$R\x04slug\x12:\n\x0cmanufacturer\x18\x03 \x01(\x0b\x32\x16.diode.v1.ManufacturerR\x0cmanufacturer\x12/\n\x0b\x64\x65scription\x18\x04 \x01(\tB\x08\xfa\x42\x05r\x03\x18\xc8\x01H\x00R\x0b\x64\x65scription\x88\x01\x01\x12\x1f\n\x08\x63omments\x18\x05 \x01(\tH\x01R\x08\x63omments\x88\x01\x01\x12-\n\x0bpart_number\x18\x06 \x01(\tB\x07\xfa\x42\x04r\x02\x18\x32H\x02R\npartNumber\x88\x01\x01\x12!\n\x04tags\x18\x07 \x03(\x0b\x32\r.diode.v1.TagR\x04tagsB\x0e\n\x0c_descriptionB\x0b\n\t_commentsB\x0e\n\x0c_part_number\"\xc2\x01\n\x0cManufacturer\x12\x1d\n\x04name\x18\x01 \x01(\tB\t\xfa\x42\x06r\x04\x10\x01\x18\x64R\x04name\x12/\n\x04slug\x18\x02 \x01(\tB\x1b\xfa\x42\x18r\x16\x10\x01\x18\x64\x32\x10^[-a-zA-Z0-9_]+$R\x04slug\x12/\n\x0b\x64\x65scription\x18\x03 \x01(\tB\x08\xfa\x42\x05r\x03\x18\xc8\x01H\x00R\x0b\x64\x65scription\x88\x01\x01\x12!\n\x04tags\x18\x04 \x03(\x0b\x32\r.diode.v1.TagR\x04tagsB\x0e\n\x0c_description\"\xfa\x01\n\x08Platform\x12\x1d\n\x04name\x18\x01 \x01(\tB\t\xfa\x42\x06r\x04\x10\x01\x18\x64R\x04name\x12/\n\x04slug\x18\x02 \x01(\tB\x1b\xfa\x42\x18r\x16\x10\x01\x18\x64\x32\x10^[-a-zA-Z0-9_]+$R\x04slug\x12:\n\x0cmanufacturer\x18\x03 \x01(\x0b\x32\x16.diode.v1.ManufacturerR\x0cmanufacturer\x12/\n\x0b\x64\x65scription\x18\x04 \x01(\tB\x08\xfa\x42\x05r\x03\x18\xc8\x01H\x00R\x0b\x64\x65scription\x88\x01\x01\x12!\n\x04tags\x18\x05 \x03(\x0b\x32\r.diode.v1.TagR\x04tagsB\x0e\n\x0c_description\"\x8d\x03\n\x06Prefix\x12\x1f\n\x06prefix\x18\x01 \x01(\tB\x07\xfa\x42\x04r\x02p\x01R\x06prefix\x12\"\n\x04site\x18\x02 \x01(\x0b\x32\x0e.diode.v1.SiteR\x04site\x12\x46\n\x06status\x18\x03 \x01(\tB.\xfa\x42+r)R\x06\x61\x63tiveR\tcontainerR\x08reservedR\ndeprecatedR\x06status\x12\x1c\n\x07is_pool\x18\x04 \x01(\x08H\x00R\x06isPool\x88\x01\x01\x12(\n\rmark_utilized\x18\x05 \x01(\x08H\x01R\x0cmarkUtilized\x88\x01\x01\x12/\n\x0b\x64\x65scription\x18\x06 \x01(\tB\x08\xfa\x42\x05r\x03\x18\xc8\x01H\x02R\x0b\x64\x65scription\x88\x01\x01\x12\x1f\n\x08\x63omments\x18\x07 \x01(\tH\x03R\x08\x63omments\x88\x01\x01\x12!\n\x04tags\x18\x08 \x03(\x0b\x32\r.diode.v1.TagR\x04tagsB\n\n\x08_is_poolB\x10\n\x0e_mark_utilizedB\x0e\n\x0c_descriptionB\x0b\n\t_comments\"\xea\x01\n\x04Role\x12\x1d\n\x04name\x18\x01 \x01(\tB\t\xfa\x42\x06r\x04\x10\x01\x18\x64R\x04name\x12/\n\x04slug\x18\x02 \x01(\tB\x1b\xfa\x42\x18r\x16\x10\x01\x18\x64\x32\x10^[-a-zA-Z0-9_]+$R\x04slug\x12.\n\x05\x63olor\x18\x03 \x01(\tB\x18\xfa\x42\x15r\x13\x10\x06\x18\x06\x32\r^[0-9a-f]{6}$R\x05\x63olor\x12/\n\x0b\x64\x65scription\x18\x04 \x01(\tB\x08\xfa\x42\x05r\x03\x18\xc8\x01H\x00R\x0b\x64\x65scription\x88\x01\x01\x12!\n\x04tags\x18\x05 \x03(\x0b\x32\r.diode.v1.TagR\x04tagsB\x0e\n\x0c_description\"\xa2\x03\n\x04Site\x12\x1d\n\x04name\x18\x01 \x01(\tB\t\xfa\x42\x06r\x04\x10\x01\x18\x64R\x04name\x12/\n\x04slug\x18\x02 \x01(\tB\x1b\xfa\x42\x18r\x16\x10\x01\x18\x64\x32\x10^[-a-zA-Z0-9_]+$R\x04slug\x12Q\n\x06status\x18\x03 \x01(\tB9\xfa\x42\x36r4R\x07plannedR\x07stagingR\x06\x61\x63tiveR\x0f\x64\x65\x63ommissioningR\x07retiredR\x06status\x12(\n\x08\x66\x61\x63ility\x18\x04 \x01(\tB\x07\xfa\x42\x04r\x02\x18\x32H\x00R\x08\x66\x61\x63ility\x88\x01\x01\x12 \n\ttime_zone\x18\x05 \x01(\tH\x01R\x08timeZone\x88\x01\x01\x12/\n\x0b\x64\x65scription\x18\x06 \x01(\tB\x08\xfa\x42\x05r\x03\x18\xc8\x01H\x02R\x0b\x64\x65scription\x88\x01\x01\x12\x1f\n\x08\x63omments\x18\x07 \x01(\tH\x03R\x08\x63omments\x88\x01\x01\x12!\n\x04tags\x18\x08 \x03(\x0b\x32\r.diode.v1.TagR\x04tagsB\x0b\n\t_facilityB\x0c\n\n_time_zoneB\x0e\n\x0c_descriptionB\x0b\n\t_comments\"\x85\x01\n\x03Tag\x12\x1d\n\x04name\x18\x01 \x01(\tB\t\xfa\x42\x06r\x04\x10\x01\x18\x64R\x04name\x12/\n\x04slug\x18\x02 \x01(\tB\x1b\xfa\x42\x18r\x16\x10\x01\x18\x64\x32\x10^[-a-zA-Z0-9_]+$R\x04slug\x12.\n\x05\x63olor\x18\x03 \x01(\tB\x18\xfa\x42\x15r\x13\x10\x06\x18\x06\x32\r^[0-9a-f]{6}$R\x05\x63olor\"\x83\x07\n\x06\x45ntity\x12$\n\x04site\x18\x01 \x01(\x0b\x32\x0e.diode.v1.SiteH\x00R\x04site\x12\x30\n\x08platform\x18\x02 \x01(\x0b\x32\x12.diode.v1.PlatformH\x00R\x08platform\x12<\n\x0cmanufacturer\x18\x03 \x01(\x0b\x32\x16.diode.v1.ManufacturerH\x00R\x0cmanufacturer\x12*\n\x06\x64\x65vice\x18\x04 \x01(\x0b\x32\x10.diode.v1.DeviceH\x00R\x06\x64\x65vice\x12\x31\n\x0b\x64\x65vice_role\x18\x05 \x01(\x0b\x32\x0e.diode.v1.RoleH\x00R\ndeviceRole\x12\x37\n\x0b\x64\x65vice_type\x18\x06 \x01(\x0b\x32\x14.diode.v1.DeviceTypeH\x00R\ndeviceType\x12\x33\n\tinterface\x18\x07 \x01(\x0b\x32\x13.diode.v1.InterfaceH\x00R\tinterface\x12\x34\n\nip_address\x18\t \x01(\x0b\x32\x13.diode.v1.IPAddressH\x00R\tipAddress\x12*\n\x06prefix\x18\n \x01(\x0b\x32\x10.diode.v1.PrefixH\x00R\x06prefix\x12=\n\rcluster_group\x18\x0b \x01(\x0b\x32\x16.diode.v1.ClusterGroupH\x00R\x0c\x63lusterGroup\x12:\n\x0c\x63luster_type\x18\x0c \x01(\x0b\x32\x15.diode.v1.ClusterTypeH\x00R\x0b\x63lusterType\x12-\n\x07\x63luster\x18\r \x01(\x0b\x32\x11.diode.v1.ClusterH\x00R\x07\x63luster\x12\x43\n\x0fvirtual_machine\x18\x0e \x01(\x0b\x32\x18.diode.v1.VirtualMachineH\x00R\x0evirtualMachine\x12\x39\n\x0bvminterface\x18\x0f \x01(\x0b\x32\x15.diode.v1.VMInterfaceH\x00R\x0bvminterface\x12:\n\x0cvirtual_disk\x18\x10 \x01(\x0b\x32\x15.diode.v1.VirtualDiskH\x00R\x0bvirtualDisk\x12\x44\n\ttimestamp\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\n\xfa\x42\x07\xb2\x01\x04\x08\x01\x38\x01R\ttimestampB\x08\n\x06\x65ntity\"\xe4\x02\n\rIngestRequest\x12\"\n\x06stream\x18\x01 \x01(\tB\n\xfa\x42\x07r\x05\x10\x01\x18\xff\x01R\x06stream\x12\x39\n\x08\x65ntities\x18\x02 \x03(\x0b\x32\x10.diode.v1.EntityB\x0b\xfa\x42\x08\x92\x01\x05\x08\x01\x10\xe8\x07R\x08\x65ntities\x12\x18\n\x02id\x18\x03 \x01(\tB\x08\xfa\x42\x05r\x03\xb0\x01\x01R\x02id\x12\x36\n\x11producer_app_name\x18\x04 \x01(\tB\n\xfa\x42\x07r\x05\x10\x01\x18\xff\x01R\x0fproducerAppName\x12<\n\x14producer_app_version\x18\x05 \x01(\tB\n\xfa\x42\x07r\x05\x10\x01\x18\xff\x01R\x12producerAppVersion\x12%\n\x08sdk_name\x18\x06 \x01(\tB\n\xfa\x42\x07r\x05\x10\x01\x18\xff\x01R\x07sdkName\x12=\n\x0bsdk_version\x18\x07 \x01(\tB\x1c\xfa\x42\x19r\x17\x32\x15^(\\d)+\\.(\\d)+\\.(\\d)+$R\nsdkVersion\"(\n\x0eIngestResponse\x12\x16\n\x06\x65rrors\x18\x01 \x03(\tR\x06\x65rrors2P\n\x0fIngesterService\x12=\n\x06Ingest\x12\x17.diode.v1.IngestRequest\x1a\x18.diode.v1.IngestResponse\"\x00\x42\x35Z3github.com/netboxlabs/diode-sdk-go/diode/v1/diodepbb\x06proto3') - -_globals = globals() -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'diode.v1.ingester_pb2', _globals) -if not _descriptor._USE_C_DESCRIPTORS: - _globals['DESCRIPTOR']._loaded_options = None - _globals['DESCRIPTOR']._serialized_options = b'Z3github.com/netboxlabs/diode-sdk-go/diode/v1/diodepb' - _globals['_DEVICE'].fields_by_name['name']._loaded_options = None - _globals['_DEVICE'].fields_by_name['name']._serialized_options = b'\372B\004r\002\030@' - _globals['_DEVICE'].fields_by_name['device_fqdn']._loaded_options = None - _globals['_DEVICE'].fields_by_name['device_fqdn']._serialized_options = b'\372B\007r\005\020\001\030\377\001' - _globals['_DEVICE'].fields_by_name['serial']._loaded_options = None - _globals['_DEVICE'].fields_by_name['serial']._serialized_options = b'\372B\004r\002\0302' - _globals['_DEVICE'].fields_by_name['asset_tag']._loaded_options = None - _globals['_DEVICE'].fields_by_name['asset_tag']._serialized_options = b'\372B\005r\003\030\310\001' - _globals['_DEVICE'].fields_by_name['status']._loaded_options = None - _globals['_DEVICE'].fields_by_name['status']._serialized_options = b'\372BHrFR\007offlineR\006activeR\007plannedR\006stagedR\006failedR\tinventoryR\017decommissioning' - _globals['_DEVICE'].fields_by_name['description']._loaded_options = None - _globals['_DEVICE'].fields_by_name['description']._serialized_options = b'\372B\005r\003\030\310\001' - _globals['_INTERFACE'].fields_by_name['device']._loaded_options = None - _globals['_INTERFACE'].fields_by_name['device']._serialized_options = b'\372B\005\242\001\002\010\001' - _globals['_INTERFACE'].fields_by_name['name']._loaded_options = None - _globals['_INTERFACE'].fields_by_name['name']._serialized_options = b'\372B\006r\004\020\001\030@' - _globals['_INTERFACE'].fields_by_name['label']._loaded_options = None - _globals['_INTERFACE'].fields_by_name['label']._serialized_options = b'\372B\006r\004\020\001\030@' - _globals['_INTERFACE'].fields_by_name['type']._loaded_options = None - _globals['_INTERFACE'].fields_by_name['type']._serialized_options = b'\372B\341\014r\336\014R\007virtualR\006bridgeR\003lagR\n100base-fxR\013100base-lfxR\n100base-txR\n100base-t1R\n1000base-tR\0171000base-x-gbicR\0161000base-x-sfpR\n2.5gbase-tR\0105gbase-tR\t10gbase-tR\01310gbase-cx4R\01610gbase-x-sfppR\r10gbase-x-xfpR\02010gbase-x-xenpakR\01410gbase-x-x2R\01725gbase-x-sfp28R\01750gbase-x-sfp56R\01740gbase-x-qsfppR\01750gbase-x-sfp28R\016100gbase-x-cfpR\017100gbase-x-cfp2R\017100gbase-x-cfp4R\016100gbase-x-cxpR\017100gbase-x-cpakR\017100gbase-x-dsfpR\020100gbase-x-sfpddR\021100gbase-x-qsfp28R\021100gbase-x-qsfpddR\017200gbase-x-cfp2R\021200gbase-x-qsfp56R\021200gbase-x-qsfpddR\017400gbase-x-cfp2R\022400gbase-x-qsfp112R\021400gbase-x-qsfpddR\017400gbase-x-osfpR\023400gbase-x-osfp-rhsR\017400gbase-x-cdfpR\017400gbase-x-cfp8R\021800gbase-x-qsfpddR\017800gbase-x-osfpR\0131000base-kxR\n10gbase-krR\01310gbase-kx4R\n25gbase-krR\01340gbase-kr4R\n50gbase-krR\014100gbase-kp4R\014100gbase-kr2R\014100gbase-kr4R\013ieee802.11aR\013ieee802.11gR\013ieee802.11nR\014ieee802.11acR\014ieee802.11adR\014ieee802.11axR\014ieee802.11ayR\014ieee802.15.1R\016other-wirelessR\003gsmR\004cdmaR\003lteR\tsonet-oc3R\nsonet-oc12R\nsonet-oc48R\013sonet-oc192R\013sonet-oc768R\014sonet-oc1920R\014sonet-oc3840R\0101gfc-sfpR\0102gfc-sfpR\0104gfc-sfpR\t8gfc-sfppR\n16gfc-sfppR\01332gfc-sfp28R\01364gfc-qsfppR\r128gfc-qsfp28R\016infiniband-sdrR\016infiniband-ddrR\016infiniband-qdrR\020infiniband-fdr10R\016infiniband-fdrR\016infiniband-edrR\016infiniband-hdrR\016infiniband-ndrR\016infiniband-xdrR\002t1R\002e1R\002t3R\002e3R\004xdslR\006docsisR\004gponR\006xg-ponR\007xgs-ponR\007ng-pon2R\004eponR\01010g-eponR\017cisco-stackwiseR\024cisco-stackwise-plusR\017cisco-flexstackR\024cisco-flexstack-plusR\022cisco-stackwise-80R\023cisco-stackwise-160R\023cisco-stackwise-320R\023cisco-stackwise-480R\022cisco-stackwise-1tR\013juniper-vcpR\023extreme-summitstackR\027extreme-summitstack-128R\027extreme-summitstack-256R\027extreme-summitstack-512R\005other' - _globals['_INTERFACE'].fields_by_name['mtu']._loaded_options = None - _globals['_INTERFACE'].fields_by_name['mtu']._serialized_options = b'\372B\010\032\006\030\200\200\004(\001' - _globals['_INTERFACE'].fields_by_name['speed']._loaded_options = None - _globals['_INTERFACE'].fields_by_name['speed']._serialized_options = b'\372B\004\032\002(\000' - _globals['_INTERFACE'].fields_by_name['description']._loaded_options = None - _globals['_INTERFACE'].fields_by_name['description']._serialized_options = b'\372B\005r\003\030\310\001' - _globals['_INTERFACE'].fields_by_name['mode']._loaded_options = None - _globals['_INTERFACE'].fields_by_name['mode']._serialized_options = b'\372B\036r\034R\006accessR\006taggedR\ntagged-all' - _globals['_CLUSTER'].fields_by_name['name']._loaded_options = None - _globals['_CLUSTER'].fields_by_name['name']._serialized_options = b'\372B\006r\004\020\001\030d' - _globals['_CLUSTER'].fields_by_name['status']._loaded_options = None - _globals['_CLUSTER'].fields_by_name['status']._serialized_options = b'\372B=r;R\007offlineR\006activeR\007plannedR\006stagedR\006failedR\017decommissioning' - _globals['_CLUSTER'].fields_by_name['description']._loaded_options = None - _globals['_CLUSTER'].fields_by_name['description']._serialized_options = b'\372B\005r\003\030\310\001' - _globals['_CLUSTERTYPE'].fields_by_name['name']._loaded_options = None - _globals['_CLUSTERTYPE'].fields_by_name['name']._serialized_options = b'\372B\006r\004\020\001\030d' - _globals['_CLUSTERTYPE'].fields_by_name['slug']._loaded_options = None - _globals['_CLUSTERTYPE'].fields_by_name['slug']._serialized_options = b'\372B\030r\026\020\001\030d2\020^[-a-zA-Z0-9_]+$' - _globals['_CLUSTERTYPE'].fields_by_name['description']._loaded_options = None - _globals['_CLUSTERTYPE'].fields_by_name['description']._serialized_options = b'\372B\005r\003\030\310\001' - _globals['_CLUSTERGROUP'].fields_by_name['name']._loaded_options = None - _globals['_CLUSTERGROUP'].fields_by_name['name']._serialized_options = b'\372B\006r\004\020\001\030d' - _globals['_CLUSTERGROUP'].fields_by_name['slug']._loaded_options = None - _globals['_CLUSTERGROUP'].fields_by_name['slug']._serialized_options = b'\372B\030r\026\020\001\030d2\020^[-a-zA-Z0-9_]+$' - _globals['_CLUSTERGROUP'].fields_by_name['description']._loaded_options = None - _globals['_CLUSTERGROUP'].fields_by_name['description']._serialized_options = b'\372B\005r\003\030\310\001' - _globals['_VIRTUALMACHINE'].fields_by_name['name']._loaded_options = None - _globals['_VIRTUALMACHINE'].fields_by_name['name']._serialized_options = b'\372B\004r\002\030@' - _globals['_VIRTUALMACHINE'].fields_by_name['status']._loaded_options = None - _globals['_VIRTUALMACHINE'].fields_by_name['status']._serialized_options = b'\372B=r;R\007offlineR\006activeR\007plannedR\006stagedR\006failedR\017decommissioning' - _globals['_VIRTUALMACHINE'].fields_by_name['vcpus']._loaded_options = None - _globals['_VIRTUALMACHINE'].fields_by_name['vcpus']._serialized_options = b'\372B\004\032\002(\000' - _globals['_VIRTUALMACHINE'].fields_by_name['memory']._loaded_options = None - _globals['_VIRTUALMACHINE'].fields_by_name['memory']._serialized_options = b'\372B\004\032\002(\000' - _globals['_VIRTUALMACHINE'].fields_by_name['disk']._loaded_options = None - _globals['_VIRTUALMACHINE'].fields_by_name['disk']._serialized_options = b'\372B\004\032\002(\000' - _globals['_VIRTUALMACHINE'].fields_by_name['description']._loaded_options = None - _globals['_VIRTUALMACHINE'].fields_by_name['description']._serialized_options = b'\372B\005r\003\030\310\001' - _globals['_VMINTERFACE'].fields_by_name['virtual_machine']._loaded_options = None - _globals['_VMINTERFACE'].fields_by_name['virtual_machine']._serialized_options = b'\372B\005\242\001\002\010\001' - _globals['_VMINTERFACE'].fields_by_name['name']._loaded_options = None - _globals['_VMINTERFACE'].fields_by_name['name']._serialized_options = b'\372B\006r\004\020\001\030@' - _globals['_VMINTERFACE'].fields_by_name['mtu']._loaded_options = None - _globals['_VMINTERFACE'].fields_by_name['mtu']._serialized_options = b'\372B\010\032\006\030\200\200\004(\001' - _globals['_VMINTERFACE'].fields_by_name['description']._loaded_options = None - _globals['_VMINTERFACE'].fields_by_name['description']._serialized_options = b'\372B\005r\003\030\310\001' - _globals['_VIRTUALDISK'].fields_by_name['virtual_machine']._loaded_options = None - _globals['_VIRTUALDISK'].fields_by_name['virtual_machine']._serialized_options = b'\372B\005\242\001\002\010\001' - _globals['_VIRTUALDISK'].fields_by_name['name']._loaded_options = None - _globals['_VIRTUALDISK'].fields_by_name['name']._serialized_options = b'\372B\006r\004\020\001\030d' - _globals['_VIRTUALDISK'].fields_by_name['size']._loaded_options = None - _globals['_VIRTUALDISK'].fields_by_name['size']._serialized_options = b'\372B\004\032\002(\000' - _globals['_VIRTUALDISK'].fields_by_name['description']._loaded_options = None - _globals['_VIRTUALDISK'].fields_by_name['description']._serialized_options = b'\372B\005r\003\030\310\001' - _globals['_IPADDRESS'].fields_by_name['address']._loaded_options = None - _globals['_IPADDRESS'].fields_by_name['address']._serialized_options = b'\372B\004r\002p\001' - _globals['_IPADDRESS'].fields_by_name['status']._loaded_options = None - _globals['_IPADDRESS'].fields_by_name['status']._serialized_options = b'\372B-r+R\006activeR\010reservedR\ndeprecatedR\004dhcpR\005slaac' - _globals['_IPADDRESS'].fields_by_name['role']._loaded_options = None - _globals['_IPADDRESS'].fields_by_name['role']._serialized_options = b'\372B=r;R\010loopbackR\tsecondaryR\007anycastR\003vipR\004vrrpR\004hsrpR\004glbpR\004carp' - _globals['_IPADDRESS'].fields_by_name['dns_name']._loaded_options = None - _globals['_IPADDRESS'].fields_by_name['dns_name']._serialized_options = b'\372B2r0\030\377\0012+^([0-9A-Za-z_-]+|\\*)(\\.[0-9A-Za-z_-]+)*\\.?$' - _globals['_IPADDRESS'].fields_by_name['description']._loaded_options = None - _globals['_IPADDRESS'].fields_by_name['description']._serialized_options = b'\372B\005r\003\030\310\001' - _globals['_DEVICETYPE'].fields_by_name['model']._loaded_options = None - _globals['_DEVICETYPE'].fields_by_name['model']._serialized_options = b'\372B\006r\004\020\001\030d' - _globals['_DEVICETYPE'].fields_by_name['slug']._loaded_options = None - _globals['_DEVICETYPE'].fields_by_name['slug']._serialized_options = b'\372B\030r\026\020\001\030d2\020^[-a-zA-Z0-9_]+$' - _globals['_DEVICETYPE'].fields_by_name['description']._loaded_options = None - _globals['_DEVICETYPE'].fields_by_name['description']._serialized_options = b'\372B\005r\003\030\310\001' - _globals['_DEVICETYPE'].fields_by_name['part_number']._loaded_options = None - _globals['_DEVICETYPE'].fields_by_name['part_number']._serialized_options = b'\372B\004r\002\0302' - _globals['_MANUFACTURER'].fields_by_name['name']._loaded_options = None - _globals['_MANUFACTURER'].fields_by_name['name']._serialized_options = b'\372B\006r\004\020\001\030d' - _globals['_MANUFACTURER'].fields_by_name['slug']._loaded_options = None - _globals['_MANUFACTURER'].fields_by_name['slug']._serialized_options = b'\372B\030r\026\020\001\030d2\020^[-a-zA-Z0-9_]+$' - _globals['_MANUFACTURER'].fields_by_name['description']._loaded_options = None - _globals['_MANUFACTURER'].fields_by_name['description']._serialized_options = b'\372B\005r\003\030\310\001' - _globals['_PLATFORM'].fields_by_name['name']._loaded_options = None - _globals['_PLATFORM'].fields_by_name['name']._serialized_options = b'\372B\006r\004\020\001\030d' - _globals['_PLATFORM'].fields_by_name['slug']._loaded_options = None - _globals['_PLATFORM'].fields_by_name['slug']._serialized_options = b'\372B\030r\026\020\001\030d2\020^[-a-zA-Z0-9_]+$' - _globals['_PLATFORM'].fields_by_name['description']._loaded_options = None - _globals['_PLATFORM'].fields_by_name['description']._serialized_options = b'\372B\005r\003\030\310\001' - _globals['_PREFIX'].fields_by_name['prefix']._loaded_options = None - _globals['_PREFIX'].fields_by_name['prefix']._serialized_options = b'\372B\004r\002p\001' - _globals['_PREFIX'].fields_by_name['status']._loaded_options = None - _globals['_PREFIX'].fields_by_name['status']._serialized_options = b'\372B+r)R\006activeR\tcontainerR\010reservedR\ndeprecated' - _globals['_PREFIX'].fields_by_name['description']._loaded_options = None - _globals['_PREFIX'].fields_by_name['description']._serialized_options = b'\372B\005r\003\030\310\001' - _globals['_ROLE'].fields_by_name['name']._loaded_options = None - _globals['_ROLE'].fields_by_name['name']._serialized_options = b'\372B\006r\004\020\001\030d' - _globals['_ROLE'].fields_by_name['slug']._loaded_options = None - _globals['_ROLE'].fields_by_name['slug']._serialized_options = b'\372B\030r\026\020\001\030d2\020^[-a-zA-Z0-9_]+$' - _globals['_ROLE'].fields_by_name['color']._loaded_options = None - _globals['_ROLE'].fields_by_name['color']._serialized_options = b'\372B\025r\023\020\006\030\0062\r^[0-9a-f]{6}$' - _globals['_ROLE'].fields_by_name['description']._loaded_options = None - _globals['_ROLE'].fields_by_name['description']._serialized_options = b'\372B\005r\003\030\310\001' - _globals['_SITE'].fields_by_name['name']._loaded_options = None - _globals['_SITE'].fields_by_name['name']._serialized_options = b'\372B\006r\004\020\001\030d' - _globals['_SITE'].fields_by_name['slug']._loaded_options = None - _globals['_SITE'].fields_by_name['slug']._serialized_options = b'\372B\030r\026\020\001\030d2\020^[-a-zA-Z0-9_]+$' - _globals['_SITE'].fields_by_name['status']._loaded_options = None - _globals['_SITE'].fields_by_name['status']._serialized_options = b'\372B6r4R\007plannedR\007stagingR\006activeR\017decommissioningR\007retired' - _globals['_SITE'].fields_by_name['facility']._loaded_options = None - _globals['_SITE'].fields_by_name['facility']._serialized_options = b'\372B\004r\002\0302' - _globals['_SITE'].fields_by_name['description']._loaded_options = None - _globals['_SITE'].fields_by_name['description']._serialized_options = b'\372B\005r\003\030\310\001' - _globals['_TAG'].fields_by_name['name']._loaded_options = None - _globals['_TAG'].fields_by_name['name']._serialized_options = b'\372B\006r\004\020\001\030d' - _globals['_TAG'].fields_by_name['slug']._loaded_options = None - _globals['_TAG'].fields_by_name['slug']._serialized_options = b'\372B\030r\026\020\001\030d2\020^[-a-zA-Z0-9_]+$' - _globals['_TAG'].fields_by_name['color']._loaded_options = None - _globals['_TAG'].fields_by_name['color']._serialized_options = b'\372B\025r\023\020\006\030\0062\r^[0-9a-f]{6}$' - _globals['_ENTITY'].fields_by_name['timestamp']._loaded_options = None - _globals['_ENTITY'].fields_by_name['timestamp']._serialized_options = b'\372B\007\262\001\004\010\0018\001' - _globals['_INGESTREQUEST'].fields_by_name['stream']._loaded_options = None - _globals['_INGESTREQUEST'].fields_by_name['stream']._serialized_options = b'\372B\007r\005\020\001\030\377\001' - _globals['_INGESTREQUEST'].fields_by_name['entities']._loaded_options = None - _globals['_INGESTREQUEST'].fields_by_name['entities']._serialized_options = b'\372B\010\222\001\005\010\001\020\350\007' - _globals['_INGESTREQUEST'].fields_by_name['id']._loaded_options = None - _globals['_INGESTREQUEST'].fields_by_name['id']._serialized_options = b'\372B\005r\003\260\001\001' - _globals['_INGESTREQUEST'].fields_by_name['producer_app_name']._loaded_options = None - _globals['_INGESTREQUEST'].fields_by_name['producer_app_name']._serialized_options = b'\372B\007r\005\020\001\030\377\001' - _globals['_INGESTREQUEST'].fields_by_name['producer_app_version']._loaded_options = None - _globals['_INGESTREQUEST'].fields_by_name['producer_app_version']._serialized_options = b'\372B\007r\005\020\001\030\377\001' - _globals['_INGESTREQUEST'].fields_by_name['sdk_name']._loaded_options = None - _globals['_INGESTREQUEST'].fields_by_name['sdk_name']._serialized_options = b'\372B\007r\005\020\001\030\377\001' - _globals['_INGESTREQUEST'].fields_by_name['sdk_version']._loaded_options = None - _globals['_INGESTREQUEST'].fields_by_name['sdk_version']._serialized_options = b'\372B\031r\0272\025^(\\d)+\\.(\\d)+\\.(\\d)+$' - _globals['_DEVICE']._serialized_start=96 - _globals['_DEVICE']._serialized_end=836 - _globals['_INTERFACE']._serialized_start=839 - _globals['_INTERFACE']._serialized_end=3126 - _globals['_CLUSTER']._serialized_start=3129 - _globals['_CLUSTER']._serialized_end=3484 - _globals['_CLUSTERTYPE']._serialized_start=3487 - _globals['_CLUSTERTYPE']._serialized_end=3680 - _globals['_CLUSTERGROUP']._serialized_start=3683 - _globals['_CLUSTERGROUP']._serialized_end=3877 - _globals['_VIRTUALMACHINE']._serialized_start=3880 - _globals['_VIRTUALMACHINE']._serialized_end=4614 - _globals['_VMINTERFACE']._serialized_start=4617 - _globals['_VMINTERFACE']._serialized_end=4979 - _globals['_VIRTUALDISK']._serialized_start=4982 - _globals['_VIRTUALDISK']._serialized_end=5232 - _globals['_IPADDRESS']._serialized_start=5235 - _globals['_IPADDRESS']._serialized_end=5759 - _globals['_DEVICETYPE']._serialized_start=5762 - _globals['_DEVICETYPE']._serialized_end=6125 - _globals['_MANUFACTURER']._serialized_start=6128 - _globals['_MANUFACTURER']._serialized_end=6322 - _globals['_PLATFORM']._serialized_start=6325 - _globals['_PLATFORM']._serialized_end=6575 - _globals['_PREFIX']._serialized_start=6578 - _globals['_PREFIX']._serialized_end=6975 - _globals['_ROLE']._serialized_start=6978 - _globals['_ROLE']._serialized_end=7212 - _globals['_SITE']._serialized_start=7215 - _globals['_SITE']._serialized_end=7633 - _globals['_TAG']._serialized_start=7636 - _globals['_TAG']._serialized_end=7769 - _globals['_ENTITY']._serialized_start=7772 - _globals['_ENTITY']._serialized_end=8671 - _globals['_INGESTREQUEST']._serialized_start=8674 - _globals['_INGESTREQUEST']._serialized_end=9030 - _globals['_INGESTRESPONSE']._serialized_start=9032 - _globals['_INGESTRESPONSE']._serialized_end=9072 - _globals['_INGESTERSERVICE']._serialized_start=9074 - _globals['_INGESTERSERVICE']._serialized_end=9154 -# @@protoc_insertion_point(module_scope) diff --git a/netbox_diode_plugin/reconciler/sdk/v1/ingester_pb2.pyi b/netbox_diode_plugin/reconciler/sdk/v1/ingester_pb2.pyi deleted file mode 100644 index b9d44fe..0000000 --- a/netbox_diode_plugin/reconciler/sdk/v1/ingester_pb2.pyi +++ /dev/null @@ -1,368 +0,0 @@ -from google.protobuf import timestamp_pb2 as _timestamp_pb2 -from netbox_diode_plugin.reconciler.sdk.validate import validate_pb2 as _validate_pb2 -from google.protobuf.internal import containers as _containers -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Mapping, Optional as _Optional, Union as _Union - -DESCRIPTOR: _descriptor.FileDescriptor - -class Device(_message.Message): - __slots__ = ("name", "device_fqdn", "device_type", "role", "platform", "serial", "site", "asset_tag", "status", "description", "comments", "tags", "primary_ip4", "primary_ip6") - NAME_FIELD_NUMBER: _ClassVar[int] - DEVICE_FQDN_FIELD_NUMBER: _ClassVar[int] - DEVICE_TYPE_FIELD_NUMBER: _ClassVar[int] - ROLE_FIELD_NUMBER: _ClassVar[int] - PLATFORM_FIELD_NUMBER: _ClassVar[int] - SERIAL_FIELD_NUMBER: _ClassVar[int] - SITE_FIELD_NUMBER: _ClassVar[int] - ASSET_TAG_FIELD_NUMBER: _ClassVar[int] - STATUS_FIELD_NUMBER: _ClassVar[int] - DESCRIPTION_FIELD_NUMBER: _ClassVar[int] - COMMENTS_FIELD_NUMBER: _ClassVar[int] - TAGS_FIELD_NUMBER: _ClassVar[int] - PRIMARY_IP4_FIELD_NUMBER: _ClassVar[int] - PRIMARY_IP6_FIELD_NUMBER: _ClassVar[int] - name: str - device_fqdn: str - device_type: DeviceType - role: Role - platform: Platform - serial: str - site: Site - asset_tag: str - status: str - description: str - comments: str - tags: _containers.RepeatedCompositeFieldContainer[Tag] - primary_ip4: IPAddress - primary_ip6: IPAddress - def __init__(self, name: _Optional[str] = ..., device_fqdn: _Optional[str] = ..., device_type: _Optional[_Union[DeviceType, _Mapping]] = ..., role: _Optional[_Union[Role, _Mapping]] = ..., platform: _Optional[_Union[Platform, _Mapping]] = ..., serial: _Optional[str] = ..., site: _Optional[_Union[Site, _Mapping]] = ..., asset_tag: _Optional[str] = ..., status: _Optional[str] = ..., description: _Optional[str] = ..., comments: _Optional[str] = ..., tags: _Optional[_Iterable[_Union[Tag, _Mapping]]] = ..., primary_ip4: _Optional[_Union[IPAddress, _Mapping]] = ..., primary_ip6: _Optional[_Union[IPAddress, _Mapping]] = ...) -> None: ... - -class Interface(_message.Message): - __slots__ = ("device", "name", "label", "type", "enabled", "mtu", "mac_address", "speed", "wwn", "mgmt_only", "description", "mark_connected", "mode", "tags") - DEVICE_FIELD_NUMBER: _ClassVar[int] - NAME_FIELD_NUMBER: _ClassVar[int] - LABEL_FIELD_NUMBER: _ClassVar[int] - TYPE_FIELD_NUMBER: _ClassVar[int] - ENABLED_FIELD_NUMBER: _ClassVar[int] - MTU_FIELD_NUMBER: _ClassVar[int] - MAC_ADDRESS_FIELD_NUMBER: _ClassVar[int] - SPEED_FIELD_NUMBER: _ClassVar[int] - WWN_FIELD_NUMBER: _ClassVar[int] - MGMT_ONLY_FIELD_NUMBER: _ClassVar[int] - DESCRIPTION_FIELD_NUMBER: _ClassVar[int] - MARK_CONNECTED_FIELD_NUMBER: _ClassVar[int] - MODE_FIELD_NUMBER: _ClassVar[int] - TAGS_FIELD_NUMBER: _ClassVar[int] - device: Device - name: str - label: str - type: str - enabled: bool - mtu: int - mac_address: str - speed: int - wwn: str - mgmt_only: bool - description: str - mark_connected: bool - mode: str - tags: _containers.RepeatedCompositeFieldContainer[Tag] - def __init__(self, device: _Optional[_Union[Device, _Mapping]] = ..., name: _Optional[str] = ..., label: _Optional[str] = ..., type: _Optional[str] = ..., enabled: bool = ..., mtu: _Optional[int] = ..., mac_address: _Optional[str] = ..., speed: _Optional[int] = ..., wwn: _Optional[str] = ..., mgmt_only: bool = ..., description: _Optional[str] = ..., mark_connected: bool = ..., mode: _Optional[str] = ..., tags: _Optional[_Iterable[_Union[Tag, _Mapping]]] = ...) -> None: ... - -class Cluster(_message.Message): - __slots__ = ("name", "type", "group", "site", "status", "description", "tags") - NAME_FIELD_NUMBER: _ClassVar[int] - TYPE_FIELD_NUMBER: _ClassVar[int] - GROUP_FIELD_NUMBER: _ClassVar[int] - SITE_FIELD_NUMBER: _ClassVar[int] - STATUS_FIELD_NUMBER: _ClassVar[int] - DESCRIPTION_FIELD_NUMBER: _ClassVar[int] - TAGS_FIELD_NUMBER: _ClassVar[int] - name: str - type: ClusterType - group: ClusterGroup - site: Site - status: str - description: str - tags: _containers.RepeatedCompositeFieldContainer[Tag] - def __init__(self, name: _Optional[str] = ..., type: _Optional[_Union[ClusterType, _Mapping]] = ..., group: _Optional[_Union[ClusterGroup, _Mapping]] = ..., site: _Optional[_Union[Site, _Mapping]] = ..., status: _Optional[str] = ..., description: _Optional[str] = ..., tags: _Optional[_Iterable[_Union[Tag, _Mapping]]] = ...) -> None: ... - -class ClusterType(_message.Message): - __slots__ = ("name", "slug", "description", "tags") - NAME_FIELD_NUMBER: _ClassVar[int] - SLUG_FIELD_NUMBER: _ClassVar[int] - DESCRIPTION_FIELD_NUMBER: _ClassVar[int] - TAGS_FIELD_NUMBER: _ClassVar[int] - name: str - slug: str - description: str - tags: _containers.RepeatedCompositeFieldContainer[Tag] - def __init__(self, name: _Optional[str] = ..., slug: _Optional[str] = ..., description: _Optional[str] = ..., tags: _Optional[_Iterable[_Union[Tag, _Mapping]]] = ...) -> None: ... - -class ClusterGroup(_message.Message): - __slots__ = ("name", "slug", "description", "tags") - NAME_FIELD_NUMBER: _ClassVar[int] - SLUG_FIELD_NUMBER: _ClassVar[int] - DESCRIPTION_FIELD_NUMBER: _ClassVar[int] - TAGS_FIELD_NUMBER: _ClassVar[int] - name: str - slug: str - description: str - tags: _containers.RepeatedCompositeFieldContainer[Tag] - def __init__(self, name: _Optional[str] = ..., slug: _Optional[str] = ..., description: _Optional[str] = ..., tags: _Optional[_Iterable[_Union[Tag, _Mapping]]] = ...) -> None: ... - -class VirtualMachine(_message.Message): - __slots__ = ("name", "status", "site", "cluster", "role", "device", "platform", "primary_ip4", "primary_ip6", "vcpus", "memory", "disk", "description", "comments", "tags") - NAME_FIELD_NUMBER: _ClassVar[int] - STATUS_FIELD_NUMBER: _ClassVar[int] - SITE_FIELD_NUMBER: _ClassVar[int] - CLUSTER_FIELD_NUMBER: _ClassVar[int] - ROLE_FIELD_NUMBER: _ClassVar[int] - DEVICE_FIELD_NUMBER: _ClassVar[int] - PLATFORM_FIELD_NUMBER: _ClassVar[int] - PRIMARY_IP4_FIELD_NUMBER: _ClassVar[int] - PRIMARY_IP6_FIELD_NUMBER: _ClassVar[int] - VCPUS_FIELD_NUMBER: _ClassVar[int] - MEMORY_FIELD_NUMBER: _ClassVar[int] - DISK_FIELD_NUMBER: _ClassVar[int] - DESCRIPTION_FIELD_NUMBER: _ClassVar[int] - COMMENTS_FIELD_NUMBER: _ClassVar[int] - TAGS_FIELD_NUMBER: _ClassVar[int] - name: str - status: str - site: Site - cluster: Cluster - role: Role - device: Device - platform: Platform - primary_ip4: IPAddress - primary_ip6: IPAddress - vcpus: int - memory: int - disk: int - description: str - comments: str - tags: _containers.RepeatedCompositeFieldContainer[Tag] - def __init__(self, name: _Optional[str] = ..., status: _Optional[str] = ..., site: _Optional[_Union[Site, _Mapping]] = ..., cluster: _Optional[_Union[Cluster, _Mapping]] = ..., role: _Optional[_Union[Role, _Mapping]] = ..., device: _Optional[_Union[Device, _Mapping]] = ..., platform: _Optional[_Union[Platform, _Mapping]] = ..., primary_ip4: _Optional[_Union[IPAddress, _Mapping]] = ..., primary_ip6: _Optional[_Union[IPAddress, _Mapping]] = ..., vcpus: _Optional[int] = ..., memory: _Optional[int] = ..., disk: _Optional[int] = ..., description: _Optional[str] = ..., comments: _Optional[str] = ..., tags: _Optional[_Iterable[_Union[Tag, _Mapping]]] = ...) -> None: ... - -class VMInterface(_message.Message): - __slots__ = ("virtual_machine", "name", "enabled", "mtu", "mac_address", "description", "tags") - VIRTUAL_MACHINE_FIELD_NUMBER: _ClassVar[int] - NAME_FIELD_NUMBER: _ClassVar[int] - ENABLED_FIELD_NUMBER: _ClassVar[int] - MTU_FIELD_NUMBER: _ClassVar[int] - MAC_ADDRESS_FIELD_NUMBER: _ClassVar[int] - DESCRIPTION_FIELD_NUMBER: _ClassVar[int] - TAGS_FIELD_NUMBER: _ClassVar[int] - virtual_machine: VirtualMachine - name: str - enabled: bool - mtu: int - mac_address: str - description: str - tags: _containers.RepeatedCompositeFieldContainer[Tag] - def __init__(self, virtual_machine: _Optional[_Union[VirtualMachine, _Mapping]] = ..., name: _Optional[str] = ..., enabled: bool = ..., mtu: _Optional[int] = ..., mac_address: _Optional[str] = ..., description: _Optional[str] = ..., tags: _Optional[_Iterable[_Union[Tag, _Mapping]]] = ...) -> None: ... - -class VirtualDisk(_message.Message): - __slots__ = ("virtual_machine", "name", "size", "description", "tags") - VIRTUAL_MACHINE_FIELD_NUMBER: _ClassVar[int] - NAME_FIELD_NUMBER: _ClassVar[int] - SIZE_FIELD_NUMBER: _ClassVar[int] - DESCRIPTION_FIELD_NUMBER: _ClassVar[int] - TAGS_FIELD_NUMBER: _ClassVar[int] - virtual_machine: VirtualMachine - name: str - size: int - description: str - tags: _containers.RepeatedCompositeFieldContainer[Tag] - def __init__(self, virtual_machine: _Optional[_Union[VirtualMachine, _Mapping]] = ..., name: _Optional[str] = ..., size: _Optional[int] = ..., description: _Optional[str] = ..., tags: _Optional[_Iterable[_Union[Tag, _Mapping]]] = ...) -> None: ... - -class IPAddress(_message.Message): - __slots__ = ("address", "interface", "status", "role", "dns_name", "description", "comments", "tags") - ADDRESS_FIELD_NUMBER: _ClassVar[int] - INTERFACE_FIELD_NUMBER: _ClassVar[int] - STATUS_FIELD_NUMBER: _ClassVar[int] - ROLE_FIELD_NUMBER: _ClassVar[int] - DNS_NAME_FIELD_NUMBER: _ClassVar[int] - DESCRIPTION_FIELD_NUMBER: _ClassVar[int] - COMMENTS_FIELD_NUMBER: _ClassVar[int] - TAGS_FIELD_NUMBER: _ClassVar[int] - address: str - interface: Interface - status: str - role: str - dns_name: str - description: str - comments: str - tags: _containers.RepeatedCompositeFieldContainer[Tag] - def __init__(self, address: _Optional[str] = ..., interface: _Optional[_Union[Interface, _Mapping]] = ..., status: _Optional[str] = ..., role: _Optional[str] = ..., dns_name: _Optional[str] = ..., description: _Optional[str] = ..., comments: _Optional[str] = ..., tags: _Optional[_Iterable[_Union[Tag, _Mapping]]] = ...) -> None: ... - -class DeviceType(_message.Message): - __slots__ = ("model", "slug", "manufacturer", "description", "comments", "part_number", "tags") - MODEL_FIELD_NUMBER: _ClassVar[int] - SLUG_FIELD_NUMBER: _ClassVar[int] - MANUFACTURER_FIELD_NUMBER: _ClassVar[int] - DESCRIPTION_FIELD_NUMBER: _ClassVar[int] - COMMENTS_FIELD_NUMBER: _ClassVar[int] - PART_NUMBER_FIELD_NUMBER: _ClassVar[int] - TAGS_FIELD_NUMBER: _ClassVar[int] - model: str - slug: str - manufacturer: Manufacturer - description: str - comments: str - part_number: str - tags: _containers.RepeatedCompositeFieldContainer[Tag] - def __init__(self, model: _Optional[str] = ..., slug: _Optional[str] = ..., manufacturer: _Optional[_Union[Manufacturer, _Mapping]] = ..., description: _Optional[str] = ..., comments: _Optional[str] = ..., part_number: _Optional[str] = ..., tags: _Optional[_Iterable[_Union[Tag, _Mapping]]] = ...) -> None: ... - -class Manufacturer(_message.Message): - __slots__ = ("name", "slug", "description", "tags") - NAME_FIELD_NUMBER: _ClassVar[int] - SLUG_FIELD_NUMBER: _ClassVar[int] - DESCRIPTION_FIELD_NUMBER: _ClassVar[int] - TAGS_FIELD_NUMBER: _ClassVar[int] - name: str - slug: str - description: str - tags: _containers.RepeatedCompositeFieldContainer[Tag] - def __init__(self, name: _Optional[str] = ..., slug: _Optional[str] = ..., description: _Optional[str] = ..., tags: _Optional[_Iterable[_Union[Tag, _Mapping]]] = ...) -> None: ... - -class Platform(_message.Message): - __slots__ = ("name", "slug", "manufacturer", "description", "tags") - NAME_FIELD_NUMBER: _ClassVar[int] - SLUG_FIELD_NUMBER: _ClassVar[int] - MANUFACTURER_FIELD_NUMBER: _ClassVar[int] - DESCRIPTION_FIELD_NUMBER: _ClassVar[int] - TAGS_FIELD_NUMBER: _ClassVar[int] - name: str - slug: str - manufacturer: Manufacturer - description: str - tags: _containers.RepeatedCompositeFieldContainer[Tag] - def __init__(self, name: _Optional[str] = ..., slug: _Optional[str] = ..., manufacturer: _Optional[_Union[Manufacturer, _Mapping]] = ..., description: _Optional[str] = ..., tags: _Optional[_Iterable[_Union[Tag, _Mapping]]] = ...) -> None: ... - -class Prefix(_message.Message): - __slots__ = ("prefix", "site", "status", "is_pool", "mark_utilized", "description", "comments", "tags") - PREFIX_FIELD_NUMBER: _ClassVar[int] - SITE_FIELD_NUMBER: _ClassVar[int] - STATUS_FIELD_NUMBER: _ClassVar[int] - IS_POOL_FIELD_NUMBER: _ClassVar[int] - MARK_UTILIZED_FIELD_NUMBER: _ClassVar[int] - DESCRIPTION_FIELD_NUMBER: _ClassVar[int] - COMMENTS_FIELD_NUMBER: _ClassVar[int] - TAGS_FIELD_NUMBER: _ClassVar[int] - prefix: str - site: Site - status: str - is_pool: bool - mark_utilized: bool - description: str - comments: str - tags: _containers.RepeatedCompositeFieldContainer[Tag] - def __init__(self, prefix: _Optional[str] = ..., site: _Optional[_Union[Site, _Mapping]] = ..., status: _Optional[str] = ..., is_pool: bool = ..., mark_utilized: bool = ..., description: _Optional[str] = ..., comments: _Optional[str] = ..., tags: _Optional[_Iterable[_Union[Tag, _Mapping]]] = ...) -> None: ... - -class Role(_message.Message): - __slots__ = ("name", "slug", "color", "description", "tags") - NAME_FIELD_NUMBER: _ClassVar[int] - SLUG_FIELD_NUMBER: _ClassVar[int] - COLOR_FIELD_NUMBER: _ClassVar[int] - DESCRIPTION_FIELD_NUMBER: _ClassVar[int] - TAGS_FIELD_NUMBER: _ClassVar[int] - name: str - slug: str - color: str - description: str - tags: _containers.RepeatedCompositeFieldContainer[Tag] - def __init__(self, name: _Optional[str] = ..., slug: _Optional[str] = ..., color: _Optional[str] = ..., description: _Optional[str] = ..., tags: _Optional[_Iterable[_Union[Tag, _Mapping]]] = ...) -> None: ... - -class Site(_message.Message): - __slots__ = ("name", "slug", "status", "facility", "time_zone", "description", "comments", "tags") - NAME_FIELD_NUMBER: _ClassVar[int] - SLUG_FIELD_NUMBER: _ClassVar[int] - STATUS_FIELD_NUMBER: _ClassVar[int] - FACILITY_FIELD_NUMBER: _ClassVar[int] - TIME_ZONE_FIELD_NUMBER: _ClassVar[int] - DESCRIPTION_FIELD_NUMBER: _ClassVar[int] - COMMENTS_FIELD_NUMBER: _ClassVar[int] - TAGS_FIELD_NUMBER: _ClassVar[int] - name: str - slug: str - status: str - facility: str - time_zone: str - description: str - comments: str - tags: _containers.RepeatedCompositeFieldContainer[Tag] - def __init__(self, name: _Optional[str] = ..., slug: _Optional[str] = ..., status: _Optional[str] = ..., facility: _Optional[str] = ..., time_zone: _Optional[str] = ..., description: _Optional[str] = ..., comments: _Optional[str] = ..., tags: _Optional[_Iterable[_Union[Tag, _Mapping]]] = ...) -> None: ... - -class Tag(_message.Message): - __slots__ = ("name", "slug", "color") - NAME_FIELD_NUMBER: _ClassVar[int] - SLUG_FIELD_NUMBER: _ClassVar[int] - COLOR_FIELD_NUMBER: _ClassVar[int] - name: str - slug: str - color: str - def __init__(self, name: _Optional[str] = ..., slug: _Optional[str] = ..., color: _Optional[str] = ...) -> None: ... - -class Entity(_message.Message): - __slots__ = ("site", "platform", "manufacturer", "device", "device_role", "device_type", "interface", "ip_address", "prefix", "cluster_group", "cluster_type", "cluster", "virtual_machine", "vminterface", "virtual_disk", "timestamp") - SITE_FIELD_NUMBER: _ClassVar[int] - PLATFORM_FIELD_NUMBER: _ClassVar[int] - MANUFACTURER_FIELD_NUMBER: _ClassVar[int] - DEVICE_FIELD_NUMBER: _ClassVar[int] - DEVICE_ROLE_FIELD_NUMBER: _ClassVar[int] - DEVICE_TYPE_FIELD_NUMBER: _ClassVar[int] - INTERFACE_FIELD_NUMBER: _ClassVar[int] - IP_ADDRESS_FIELD_NUMBER: _ClassVar[int] - PREFIX_FIELD_NUMBER: _ClassVar[int] - CLUSTER_GROUP_FIELD_NUMBER: _ClassVar[int] - CLUSTER_TYPE_FIELD_NUMBER: _ClassVar[int] - CLUSTER_FIELD_NUMBER: _ClassVar[int] - VIRTUAL_MACHINE_FIELD_NUMBER: _ClassVar[int] - VMINTERFACE_FIELD_NUMBER: _ClassVar[int] - VIRTUAL_DISK_FIELD_NUMBER: _ClassVar[int] - TIMESTAMP_FIELD_NUMBER: _ClassVar[int] - site: Site - platform: Platform - manufacturer: Manufacturer - device: Device - device_role: Role - device_type: DeviceType - interface: Interface - ip_address: IPAddress - prefix: Prefix - cluster_group: ClusterGroup - cluster_type: ClusterType - cluster: Cluster - virtual_machine: VirtualMachine - vminterface: VMInterface - virtual_disk: VirtualDisk - timestamp: _timestamp_pb2.Timestamp - def __init__(self, site: _Optional[_Union[Site, _Mapping]] = ..., platform: _Optional[_Union[Platform, _Mapping]] = ..., manufacturer: _Optional[_Union[Manufacturer, _Mapping]] = ..., device: _Optional[_Union[Device, _Mapping]] = ..., device_role: _Optional[_Union[Role, _Mapping]] = ..., device_type: _Optional[_Union[DeviceType, _Mapping]] = ..., interface: _Optional[_Union[Interface, _Mapping]] = ..., ip_address: _Optional[_Union[IPAddress, _Mapping]] = ..., prefix: _Optional[_Union[Prefix, _Mapping]] = ..., cluster_group: _Optional[_Union[ClusterGroup, _Mapping]] = ..., cluster_type: _Optional[_Union[ClusterType, _Mapping]] = ..., cluster: _Optional[_Union[Cluster, _Mapping]] = ..., virtual_machine: _Optional[_Union[VirtualMachine, _Mapping]] = ..., vminterface: _Optional[_Union[VMInterface, _Mapping]] = ..., virtual_disk: _Optional[_Union[VirtualDisk, _Mapping]] = ..., timestamp: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ...) -> None: ... - -class IngestRequest(_message.Message): - __slots__ = ("stream", "entities", "id", "producer_app_name", "producer_app_version", "sdk_name", "sdk_version") - STREAM_FIELD_NUMBER: _ClassVar[int] - ENTITIES_FIELD_NUMBER: _ClassVar[int] - ID_FIELD_NUMBER: _ClassVar[int] - PRODUCER_APP_NAME_FIELD_NUMBER: _ClassVar[int] - PRODUCER_APP_VERSION_FIELD_NUMBER: _ClassVar[int] - SDK_NAME_FIELD_NUMBER: _ClassVar[int] - SDK_VERSION_FIELD_NUMBER: _ClassVar[int] - stream: str - entities: _containers.RepeatedCompositeFieldContainer[Entity] - id: str - producer_app_name: str - producer_app_version: str - sdk_name: str - sdk_version: str - def __init__(self, stream: _Optional[str] = ..., entities: _Optional[_Iterable[_Union[Entity, _Mapping]]] = ..., id: _Optional[str] = ..., producer_app_name: _Optional[str] = ..., producer_app_version: _Optional[str] = ..., sdk_name: _Optional[str] = ..., sdk_version: _Optional[str] = ...) -> None: ... - -class IngestResponse(_message.Message): - __slots__ = ("errors",) - ERRORS_FIELD_NUMBER: _ClassVar[int] - errors: _containers.RepeatedScalarFieldContainer[str] - def __init__(self, errors: _Optional[_Iterable[str]] = ...) -> None: ... diff --git a/netbox_diode_plugin/reconciler/sdk/v1/ingester_pb2_grpc.py b/netbox_diode_plugin/reconciler/sdk/v1/ingester_pb2_grpc.py deleted file mode 100644 index 98798c1..0000000 --- a/netbox_diode_plugin/reconciler/sdk/v1/ingester_pb2_grpc.py +++ /dev/null @@ -1,70 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc - -from netbox_diode_plugin.reconciler.sdk.v1 import ingester_pb2 as diode_dot_v1_dot_ingester__pb2 - - -class IngesterServiceStub(object): - """Ingestion API - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.Ingest = channel.unary_unary( - '/diode.v1.IngesterService/Ingest', - request_serializer=diode_dot_v1_dot_ingester__pb2.IngestRequest.SerializeToString, - response_deserializer=diode_dot_v1_dot_ingester__pb2.IngestResponse.FromString, - ) - - -class IngesterServiceServicer(object): - """Ingestion API - """ - - def Ingest(self, request, context): - """Ingests data into the system - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - -def add_IngesterServiceServicer_to_server(servicer, server): - rpc_method_handlers = { - 'Ingest': grpc.unary_unary_rpc_method_handler( - servicer.Ingest, - request_deserializer=diode_dot_v1_dot_ingester__pb2.IngestRequest.FromString, - response_serializer=diode_dot_v1_dot_ingester__pb2.IngestResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - 'diode.v1.IngesterService', rpc_method_handlers) - server.add_generic_rpc_handlers((generic_handler,)) - - - # This class is part of an EXPERIMENTAL API. -class IngesterService(object): - """Ingestion API - """ - - @staticmethod - def Ingest(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary(request, target, '/diode.v1.IngesterService/Ingest', - diode_dot_v1_dot_ingester__pb2.IngestRequest.SerializeToString, - diode_dot_v1_dot_ingester__pb2.IngestResponse.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) diff --git a/netbox_diode_plugin/reconciler/sdk/v1/reconciler_pb2.py b/netbox_diode_plugin/reconciler/sdk/v1/reconciler_pb2.py deleted file mode 100644 index e811c9c..0000000 --- a/netbox_diode_plugin/reconciler/sdk/v1/reconciler_pb2.py +++ /dev/null @@ -1,67 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: diode/v1/reconciler.proto -# Protobuf Python Version: 5.26.1 -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -from google.protobuf.internal import builder as _builder -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from netbox_diode_plugin.reconciler.sdk.v1 import ingester_pb2 as diode_dot_v1_dot_ingester__pb2 -from netbox_diode_plugin.reconciler.sdk.validate import validate_pb2 as validate_dot_validate__pb2 - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19\x64iode/v1/reconciler.proto\x12\x08\x64iode.v1\x1a\x17\x64iode/v1/ingester.proto\x1a\x17validate/validate.proto\"\xbe\x02\n\x0eIngestionError\x12\x18\n\x07message\x18\x01 \x01(\tR\x07message\x12\x12\n\x04\x63ode\x18\x02 \x01(\x05R\x04\x63ode\x12:\n\x07\x64\x65tails\x18\x03 \x01(\x0b\x32 .diode.v1.IngestionError.DetailsR\x07\x64\x65tails\x1a\xc1\x01\n\x07\x44\x65tails\x12\"\n\rchange_set_id\x18\x01 \x01(\tR\x0b\x63hangeSetId\x12\x16\n\x06result\x18\x02 \x01(\tR\x06result\x12>\n\x06\x65rrors\x18\x03 \x03(\x0b\x32&.diode.v1.IngestionError.Details.ErrorR\x06\x65rrors\x1a:\n\x05\x45rror\x12\x14\n\x05\x65rror\x18\x01 \x01(\tR\x05\x65rror\x12\x1b\n\tchange_id\x18\x02 \x01(\tR\x08\x63hangeId\"\x97\x01\n\x10IngestionMetrics\x12\x14\n\x05total\x18\x01 \x01(\x05R\x05total\x12\x16\n\x06queued\x18\x02 \x01(\x05R\x06queued\x12\x1e\n\nreconciled\x18\x03 \x01(\x05R\nreconciled\x12\x16\n\x06\x66\x61iled\x18\x04 \x01(\x05R\x06\x66\x61iled\x12\x1d\n\nno_changes\x18\x05 \x01(\x05R\tnoChanges\"\x9e\x01\n\tChangeSet\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x12\n\x04\x64\x61ta\x18\x02 \x01(\x0cR\x04\x64\x61ta\x12 \n\tbranch_id\x18\x03 \x01(\tH\x00R\x08\x62ranchId\x88\x01\x01\x12*\n\x0e\x64\x65viation_name\x18\x04 \x01(\tH\x01R\rdeviationName\x88\x01\x01\x42\x0c\n\n_branch_idB\x11\n\x0f_deviation_name\"\x8e\x04\n\x0cIngestionLog\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x1f\n\tdata_type\x18\x02 \x01(\tB\x02\x18\x01R\x08\x64\x61taType\x12%\n\x05state\x18\x03 \x01(\x0e\x32\x0f.diode.v1.StateR\x05state\x12\x1d\n\nrequest_id\x18\x04 \x01(\tR\trequestId\x12!\n\x0cingestion_ts\x18\x05 \x01(\x03R\x0bingestionTs\x12*\n\x11producer_app_name\x18\x06 \x01(\tR\x0fproducerAppName\x12\x30\n\x14producer_app_version\x18\x07 \x01(\tR\x12producerAppVersion\x12\x19\n\x08sdk_name\x18\x08 \x01(\tR\x07sdkName\x12\x1f\n\x0bsdk_version\x18\t \x01(\tR\nsdkVersion\x12(\n\x06\x65ntity\x18\n \x01(\x0b\x32\x10.diode.v1.EntityR\x06\x65ntity\x12.\n\x05\x65rror\x18\x0b \x01(\x0b\x32\x18.diode.v1.IngestionErrorR\x05\x65rror\x12\x32\n\nchange_set\x18\x0c \x01(\x0b\x32\x13.diode.v1.ChangeSetR\tchangeSet\x12\x1f\n\x0bobject_type\x18\r \x01(\tR\nobjectType\x12\x1b\n\tsource_ts\x18\x0e \x01(\x03R\x08sourceTs\"\xff\x02\n\x1cRetrieveIngestionLogsRequest\x12 \n\tpage_size\x18\x01 \x01(\x05H\x00R\x08pageSize\x88\x01\x01\x12*\n\x05state\x18\x02 \x01(\x0e\x32\x0f.diode.v1.StateH\x01R\x05state\x88\x01\x01\x12\x1f\n\tdata_type\x18\x03 \x01(\tB\x02\x18\x01R\x08\x64\x61taType\x12\x1d\n\nrequest_id\x18\x04 \x01(\tR\trequestId\x12,\n\x12ingestion_ts_start\x18\x05 \x01(\x03R\x10ingestionTsStart\x12(\n\x10ingestion_ts_end\x18\x06 \x01(\x03R\x0eingestionTsEnd\x12\x1d\n\npage_token\x18\x07 \x01(\tR\tpageToken\x12!\n\x0conly_metrics\x18\x08 \x01(\x08R\x0bonlyMetrics\x12\x1f\n\x0bobject_type\x18\t \x01(\tR\nobjectTypeB\x0c\n\n_page_sizeB\x08\n\x06_state\"\xa9\x01\n\x1dRetrieveIngestionLogsResponse\x12*\n\x04logs\x18\x01 \x03(\x0b\x32\x16.diode.v1.IngestionLogR\x04logs\x12\x34\n\x07metrics\x18\x02 \x01(\x0b\x32\x1a.diode.v1.IngestionMetricsR\x07metrics\x12&\n\x0fnext_page_token\x18\x03 \x01(\tR\rnextPageToken\"\xa7\x02\n\x19RetrieveDeviationsRequest\x12 \n\tpage_size\x18\x01 \x01(\x05H\x00R\x08pageSize\x88\x01\x01\x12\x1d\n\npage_token\x18\x02 \x01(\tR\tpageToken\x12,\n\x12ingestion_ts_start\x18\x03 \x01(\x03R\x10ingestionTsStart\x12(\n\x10ingestion_ts_end\x18\x04 \x01(\x03R\x0eingestionTsEnd\x12%\n\x05state\x18\x05 \x03(\x0e\x32\x0f.diode.v1.StateR\x05state\x12\x1f\n\x0bobject_type\x18\x06 \x03(\tR\nobjectType\x12\x1b\n\tbranch_id\x18\x07 \x03(\tR\x08\x62ranchIdB\x0c\n\n_page_size\">\n\x0e\x44\x65viationError\x12\x18\n\x07message\x18\x01 \x01(\tR\x07message\x12\x12\n\x04\x63ode\x18\x02 \x01(\x05R\x04\x63ode\"\xba\x01\n\x06\x43hange\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x1f\n\x0bobject_type\x18\x02 \x01(\tR\nobjectType\x12\x30\n\x14object_primary_value\x18\x03 \x01(\tR\x12objectPrimaryValue\x12\x1f\n\x0b\x63hange_type\x18\x04 \x01(\tR\nchangeType\x12\x16\n\x06\x62\x65\x66ore\x18\x05 \x01(\x0cR\x06\x62\x65\x66ore\x12\x14\n\x05\x61\x66ter\x18\x06 \x01(\x0cR\x05\x61\x66ter\"\xbc\x03\n\tDeviation\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12!\n\x0cingestion_ts\x18\x02 \x01(\x03R\x0bingestionTs\x12$\n\x0elast_update_ts\x18\x03 \x01(\x03R\x0clastUpdateTs\x12\x12\n\x04name\x18\x04 \x01(\tR\x04name\x12\x16\n\x06source\x18\x05 \x01(\tR\x06source\x12%\n\x05state\x18\x06 \x01(\x0e\x32\x0f.diode.v1.StateR\x05state\x12\x1f\n\x0bobject_type\x18\x07 \x01(\tR\nobjectType\x12 \n\tbranch_id\x18\x08 \x01(\tH\x00R\x08\x62ranchId\x88\x01\x01\x12\x39\n\x0fingested_entity\x18\t \x01(\x0b\x32\x10.diode.v1.EntityR\x0eingestedEntity\x12.\n\x05\x65rror\x18\n \x01(\x0b\x32\x18.diode.v1.DeviationErrorR\x05\x65rror\x12*\n\x07\x63hanges\x18\x0b \x03(\x0b\x32\x10.diode.v1.ChangeR\x07\x63hanges\x12\x1b\n\tsource_ts\x18\x0c \x01(\x03R\x08sourceTsB\x0c\n\n_branch_id\"y\n\x1aRetrieveDeviationsResponse\x12\x33\n\ndeviations\x18\x01 \x03(\x0b\x32\x13.diode.v1.DeviationR\ndeviations\x12&\n\x0fnext_page_token\x18\x02 \x01(\tR\rnextPageToken\".\n\x1cRetrieveDeviationByIDRequest\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\"R\n\x1dRetrieveDeviationByIDResponse\x12\x31\n\tdeviation\x18\x01 \x01(\x0b\x32\x13.diode.v1.DeviationR\tdeviation*w\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\n\n\x06QUEUED\x10\x01\x12\x08\n\x04OPEN\x10\x02\x12\x0b\n\x07\x41PPLIED\x10\x03\x12\n\n\x06\x46\x41ILED\x10\x04\x12\x0e\n\nNO_CHANGES\x10\x05\x12\x0b\n\x07IGNORED\x10\x06\x12\x0b\n\x07\x45RRORED\x10\x07\x32\xcd\x02\n\x11ReconcilerService\x12m\n\x15RetrieveIngestionLogs\x12&.diode.v1.RetrieveIngestionLogsRequest\x1a\'.diode.v1.RetrieveIngestionLogsResponse\"\x03\x88\x02\x01\x12_\n\x12RetrieveDeviations\x12#.diode.v1.RetrieveDeviationsRequest\x1a$.diode.v1.RetrieveDeviationsResponse\x12h\n\x15RetrieveDeviationByID\x12&.diode.v1.RetrieveDeviationByIDRequest\x1a\'.diode.v1.RetrieveDeviationByIDResponseBDZBgithub.com/netboxlabs/diode/diode-server/gen/diode/v1/reconcilerpbb\x06proto3') - -_globals = globals() -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'diode.v1.reconciler_pb2', _globals) -if not _descriptor._USE_C_DESCRIPTORS: - _globals['DESCRIPTOR']._loaded_options = None - _globals['DESCRIPTOR']._serialized_options = b'ZBgithub.com/netboxlabs/diode/diode-server/gen/diode/v1/reconcilerpb' - _globals['_INGESTIONLOG'].fields_by_name['data_type']._loaded_options = None - _globals['_INGESTIONLOG'].fields_by_name['data_type']._serialized_options = b'\030\001' - _globals['_RETRIEVEINGESTIONLOGSREQUEST'].fields_by_name['data_type']._loaded_options = None - _globals['_RETRIEVEINGESTIONLOGSREQUEST'].fields_by_name['data_type']._serialized_options = b'\030\001' - _globals['_RECONCILERSERVICE'].methods_by_name['RetrieveIngestionLogs']._loaded_options = None - _globals['_RECONCILERSERVICE'].methods_by_name['RetrieveIngestionLogs']._serialized_options = b'\210\002\001' - _globals['_STATE']._serialized_start=3065 - _globals['_STATE']._serialized_end=3184 - _globals['_INGESTIONERROR']._serialized_start=90 - _globals['_INGESTIONERROR']._serialized_end=408 - _globals['_INGESTIONERROR_DETAILS']._serialized_start=215 - _globals['_INGESTIONERROR_DETAILS']._serialized_end=408 - _globals['_INGESTIONERROR_DETAILS_ERROR']._serialized_start=350 - _globals['_INGESTIONERROR_DETAILS_ERROR']._serialized_end=408 - _globals['_INGESTIONMETRICS']._serialized_start=411 - _globals['_INGESTIONMETRICS']._serialized_end=562 - _globals['_CHANGESET']._serialized_start=565 - _globals['_CHANGESET']._serialized_end=723 - _globals['_INGESTIONLOG']._serialized_start=726 - _globals['_INGESTIONLOG']._serialized_end=1252 - _globals['_RETRIEVEINGESTIONLOGSREQUEST']._serialized_start=1255 - _globals['_RETRIEVEINGESTIONLOGSREQUEST']._serialized_end=1638 - _globals['_RETRIEVEINGESTIONLOGSRESPONSE']._serialized_start=1641 - _globals['_RETRIEVEINGESTIONLOGSRESPONSE']._serialized_end=1810 - _globals['_RETRIEVEDEVIATIONSREQUEST']._serialized_start=1813 - _globals['_RETRIEVEDEVIATIONSREQUEST']._serialized_end=2108 - _globals['_DEVIATIONERROR']._serialized_start=2110 - _globals['_DEVIATIONERROR']._serialized_end=2172 - _globals['_CHANGE']._serialized_start=2175 - _globals['_CHANGE']._serialized_end=2361 - _globals['_DEVIATION']._serialized_start=2364 - _globals['_DEVIATION']._serialized_end=2808 - _globals['_RETRIEVEDEVIATIONSRESPONSE']._serialized_start=2810 - _globals['_RETRIEVEDEVIATIONSRESPONSE']._serialized_end=2931 - _globals['_RETRIEVEDEVIATIONBYIDREQUEST']._serialized_start=2933 - _globals['_RETRIEVEDEVIATIONBYIDREQUEST']._serialized_end=2979 - _globals['_RETRIEVEDEVIATIONBYIDRESPONSE']._serialized_start=2981 - _globals['_RETRIEVEDEVIATIONBYIDRESPONSE']._serialized_end=3063 - _globals['_RECONCILERSERVICE']._serialized_start=3187 - _globals['_RECONCILERSERVICE']._serialized_end=3520 -# @@protoc_insertion_point(module_scope) diff --git a/netbox_diode_plugin/reconciler/sdk/v1/reconciler_pb2.pyi b/netbox_diode_plugin/reconciler/sdk/v1/reconciler_pb2.pyi deleted file mode 100644 index 200ffb3..0000000 --- a/netbox_diode_plugin/reconciler/sdk/v1/reconciler_pb2.pyi +++ /dev/null @@ -1,234 +0,0 @@ -from netbox_diode_plugin.reconciler.sdk.v1 import ingester_pb2 as _ingester_pb2 -from netbox_diode_plugin.reconciler.sdk.validate import validate_pb2 as _validate_pb2 -from google.protobuf.internal import containers as _containers -from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Mapping, Optional as _Optional, Union as _Union - -DESCRIPTOR: _descriptor.FileDescriptor - -class State(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): - __slots__ = () - STATE_UNSPECIFIED: _ClassVar[State] - QUEUED: _ClassVar[State] - OPEN: _ClassVar[State] - APPLIED: _ClassVar[State] - FAILED: _ClassVar[State] - NO_CHANGES: _ClassVar[State] - IGNORED: _ClassVar[State] - ERRORED: _ClassVar[State] -STATE_UNSPECIFIED: State -QUEUED: State -OPEN: State -APPLIED: State -FAILED: State -NO_CHANGES: State -IGNORED: State -ERRORED: State - -class IngestionError(_message.Message): - __slots__ = ("message", "code", "details") - class Details(_message.Message): - __slots__ = ("change_set_id", "result", "errors") - class Error(_message.Message): - __slots__ = ("error", "change_id") - ERROR_FIELD_NUMBER: _ClassVar[int] - CHANGE_ID_FIELD_NUMBER: _ClassVar[int] - error: str - change_id: str - def __init__(self, error: _Optional[str] = ..., change_id: _Optional[str] = ...) -> None: ... - CHANGE_SET_ID_FIELD_NUMBER: _ClassVar[int] - RESULT_FIELD_NUMBER: _ClassVar[int] - ERRORS_FIELD_NUMBER: _ClassVar[int] - change_set_id: str - result: str - errors: _containers.RepeatedCompositeFieldContainer[IngestionError.Details.Error] - def __init__(self, change_set_id: _Optional[str] = ..., result: _Optional[str] = ..., errors: _Optional[_Iterable[_Union[IngestionError.Details.Error, _Mapping]]] = ...) -> None: ... - MESSAGE_FIELD_NUMBER: _ClassVar[int] - CODE_FIELD_NUMBER: _ClassVar[int] - DETAILS_FIELD_NUMBER: _ClassVar[int] - message: str - code: int - details: IngestionError.Details - def __init__(self, message: _Optional[str] = ..., code: _Optional[int] = ..., details: _Optional[_Union[IngestionError.Details, _Mapping]] = ...) -> None: ... - -class IngestionMetrics(_message.Message): - __slots__ = ("total", "queued", "reconciled", "failed", "no_changes") - TOTAL_FIELD_NUMBER: _ClassVar[int] - QUEUED_FIELD_NUMBER: _ClassVar[int] - RECONCILED_FIELD_NUMBER: _ClassVar[int] - FAILED_FIELD_NUMBER: _ClassVar[int] - NO_CHANGES_FIELD_NUMBER: _ClassVar[int] - total: int - queued: int - reconciled: int - failed: int - no_changes: int - def __init__(self, total: _Optional[int] = ..., queued: _Optional[int] = ..., reconciled: _Optional[int] = ..., failed: _Optional[int] = ..., no_changes: _Optional[int] = ...) -> None: ... - -class ChangeSet(_message.Message): - __slots__ = ("id", "data", "branch_id", "deviation_name") - ID_FIELD_NUMBER: _ClassVar[int] - DATA_FIELD_NUMBER: _ClassVar[int] - BRANCH_ID_FIELD_NUMBER: _ClassVar[int] - DEVIATION_NAME_FIELD_NUMBER: _ClassVar[int] - id: str - data: bytes - branch_id: str - deviation_name: str - def __init__(self, id: _Optional[str] = ..., data: _Optional[bytes] = ..., branch_id: _Optional[str] = ..., deviation_name: _Optional[str] = ...) -> None: ... - -class IngestionLog(_message.Message): - __slots__ = ("id", "data_type", "state", "request_id", "ingestion_ts", "producer_app_name", "producer_app_version", "sdk_name", "sdk_version", "entity", "error", "change_set", "object_type", "source_ts") - ID_FIELD_NUMBER: _ClassVar[int] - DATA_TYPE_FIELD_NUMBER: _ClassVar[int] - STATE_FIELD_NUMBER: _ClassVar[int] - REQUEST_ID_FIELD_NUMBER: _ClassVar[int] - INGESTION_TS_FIELD_NUMBER: _ClassVar[int] - PRODUCER_APP_NAME_FIELD_NUMBER: _ClassVar[int] - PRODUCER_APP_VERSION_FIELD_NUMBER: _ClassVar[int] - SDK_NAME_FIELD_NUMBER: _ClassVar[int] - SDK_VERSION_FIELD_NUMBER: _ClassVar[int] - ENTITY_FIELD_NUMBER: _ClassVar[int] - ERROR_FIELD_NUMBER: _ClassVar[int] - CHANGE_SET_FIELD_NUMBER: _ClassVar[int] - OBJECT_TYPE_FIELD_NUMBER: _ClassVar[int] - SOURCE_TS_FIELD_NUMBER: _ClassVar[int] - id: str - data_type: str - state: State - request_id: str - ingestion_ts: int - producer_app_name: str - producer_app_version: str - sdk_name: str - sdk_version: str - entity: _ingester_pb2.Entity - error: IngestionError - change_set: ChangeSet - object_type: str - source_ts: int - def __init__(self, id: _Optional[str] = ..., data_type: _Optional[str] = ..., state: _Optional[_Union[State, str]] = ..., request_id: _Optional[str] = ..., ingestion_ts: _Optional[int] = ..., producer_app_name: _Optional[str] = ..., producer_app_version: _Optional[str] = ..., sdk_name: _Optional[str] = ..., sdk_version: _Optional[str] = ..., entity: _Optional[_Union[_ingester_pb2.Entity, _Mapping]] = ..., error: _Optional[_Union[IngestionError, _Mapping]] = ..., change_set: _Optional[_Union[ChangeSet, _Mapping]] = ..., object_type: _Optional[str] = ..., source_ts: _Optional[int] = ...) -> None: ... - -class RetrieveIngestionLogsRequest(_message.Message): - __slots__ = ("page_size", "state", "data_type", "request_id", "ingestion_ts_start", "ingestion_ts_end", "page_token", "only_metrics", "object_type") - PAGE_SIZE_FIELD_NUMBER: _ClassVar[int] - STATE_FIELD_NUMBER: _ClassVar[int] - DATA_TYPE_FIELD_NUMBER: _ClassVar[int] - REQUEST_ID_FIELD_NUMBER: _ClassVar[int] - INGESTION_TS_START_FIELD_NUMBER: _ClassVar[int] - INGESTION_TS_END_FIELD_NUMBER: _ClassVar[int] - PAGE_TOKEN_FIELD_NUMBER: _ClassVar[int] - ONLY_METRICS_FIELD_NUMBER: _ClassVar[int] - OBJECT_TYPE_FIELD_NUMBER: _ClassVar[int] - page_size: int - state: State - data_type: str - request_id: str - ingestion_ts_start: int - ingestion_ts_end: int - page_token: str - only_metrics: bool - object_type: str - def __init__(self, page_size: _Optional[int] = ..., state: _Optional[_Union[State, str]] = ..., data_type: _Optional[str] = ..., request_id: _Optional[str] = ..., ingestion_ts_start: _Optional[int] = ..., ingestion_ts_end: _Optional[int] = ..., page_token: _Optional[str] = ..., only_metrics: bool = ..., object_type: _Optional[str] = ...) -> None: ... - -class RetrieveIngestionLogsResponse(_message.Message): - __slots__ = ("logs", "metrics", "next_page_token") - LOGS_FIELD_NUMBER: _ClassVar[int] - METRICS_FIELD_NUMBER: _ClassVar[int] - NEXT_PAGE_TOKEN_FIELD_NUMBER: _ClassVar[int] - logs: _containers.RepeatedCompositeFieldContainer[IngestionLog] - metrics: IngestionMetrics - next_page_token: str - def __init__(self, logs: _Optional[_Iterable[_Union[IngestionLog, _Mapping]]] = ..., metrics: _Optional[_Union[IngestionMetrics, _Mapping]] = ..., next_page_token: _Optional[str] = ...) -> None: ... - -class RetrieveDeviationsRequest(_message.Message): - __slots__ = ("page_size", "page_token", "ingestion_ts_start", "ingestion_ts_end", "state", "object_type", "branch_id") - PAGE_SIZE_FIELD_NUMBER: _ClassVar[int] - PAGE_TOKEN_FIELD_NUMBER: _ClassVar[int] - INGESTION_TS_START_FIELD_NUMBER: _ClassVar[int] - INGESTION_TS_END_FIELD_NUMBER: _ClassVar[int] - STATE_FIELD_NUMBER: _ClassVar[int] - OBJECT_TYPE_FIELD_NUMBER: _ClassVar[int] - BRANCH_ID_FIELD_NUMBER: _ClassVar[int] - page_size: int - page_token: str - ingestion_ts_start: int - ingestion_ts_end: int - state: _containers.RepeatedScalarFieldContainer[State] - object_type: _containers.RepeatedScalarFieldContainer[str] - branch_id: _containers.RepeatedScalarFieldContainer[str] - def __init__(self, page_size: _Optional[int] = ..., page_token: _Optional[str] = ..., ingestion_ts_start: _Optional[int] = ..., ingestion_ts_end: _Optional[int] = ..., state: _Optional[_Iterable[_Union[State, str]]] = ..., object_type: _Optional[_Iterable[str]] = ..., branch_id: _Optional[_Iterable[str]] = ...) -> None: ... - -class DeviationError(_message.Message): - __slots__ = ("message", "code") - MESSAGE_FIELD_NUMBER: _ClassVar[int] - CODE_FIELD_NUMBER: _ClassVar[int] - message: str - code: int - def __init__(self, message: _Optional[str] = ..., code: _Optional[int] = ...) -> None: ... - -class Change(_message.Message): - __slots__ = ("id", "object_type", "object_primary_value", "change_type", "before", "after") - ID_FIELD_NUMBER: _ClassVar[int] - OBJECT_TYPE_FIELD_NUMBER: _ClassVar[int] - OBJECT_PRIMARY_VALUE_FIELD_NUMBER: _ClassVar[int] - CHANGE_TYPE_FIELD_NUMBER: _ClassVar[int] - BEFORE_FIELD_NUMBER: _ClassVar[int] - AFTER_FIELD_NUMBER: _ClassVar[int] - id: str - object_type: str - object_primary_value: str - change_type: str - before: bytes - after: bytes - def __init__(self, id: _Optional[str] = ..., object_type: _Optional[str] = ..., object_primary_value: _Optional[str] = ..., change_type: _Optional[str] = ..., before: _Optional[bytes] = ..., after: _Optional[bytes] = ...) -> None: ... - -class Deviation(_message.Message): - __slots__ = ("id", "ingestion_ts", "last_update_ts", "name", "source", "state", "object_type", "branch_id", "ingested_entity", "error", "changes", "source_ts") - ID_FIELD_NUMBER: _ClassVar[int] - INGESTION_TS_FIELD_NUMBER: _ClassVar[int] - LAST_UPDATE_TS_FIELD_NUMBER: _ClassVar[int] - NAME_FIELD_NUMBER: _ClassVar[int] - SOURCE_FIELD_NUMBER: _ClassVar[int] - STATE_FIELD_NUMBER: _ClassVar[int] - OBJECT_TYPE_FIELD_NUMBER: _ClassVar[int] - BRANCH_ID_FIELD_NUMBER: _ClassVar[int] - INGESTED_ENTITY_FIELD_NUMBER: _ClassVar[int] - ERROR_FIELD_NUMBER: _ClassVar[int] - CHANGES_FIELD_NUMBER: _ClassVar[int] - SOURCE_TS_FIELD_NUMBER: _ClassVar[int] - id: str - ingestion_ts: int - last_update_ts: int - name: str - source: str - state: State - object_type: str - branch_id: str - ingested_entity: _ingester_pb2.Entity - error: DeviationError - changes: _containers.RepeatedCompositeFieldContainer[Change] - source_ts: int - def __init__(self, id: _Optional[str] = ..., ingestion_ts: _Optional[int] = ..., last_update_ts: _Optional[int] = ..., name: _Optional[str] = ..., source: _Optional[str] = ..., state: _Optional[_Union[State, str]] = ..., object_type: _Optional[str] = ..., branch_id: _Optional[str] = ..., ingested_entity: _Optional[_Union[_ingester_pb2.Entity, _Mapping]] = ..., error: _Optional[_Union[DeviationError, _Mapping]] = ..., changes: _Optional[_Iterable[_Union[Change, _Mapping]]] = ..., source_ts: _Optional[int] = ...) -> None: ... - -class RetrieveDeviationsResponse(_message.Message): - __slots__ = ("deviations", "next_page_token") - DEVIATIONS_FIELD_NUMBER: _ClassVar[int] - NEXT_PAGE_TOKEN_FIELD_NUMBER: _ClassVar[int] - deviations: _containers.RepeatedCompositeFieldContainer[Deviation] - next_page_token: str - def __init__(self, deviations: _Optional[_Iterable[_Union[Deviation, _Mapping]]] = ..., next_page_token: _Optional[str] = ...) -> None: ... - -class RetrieveDeviationByIDRequest(_message.Message): - __slots__ = ("id",) - ID_FIELD_NUMBER: _ClassVar[int] - id: str - def __init__(self, id: _Optional[str] = ...) -> None: ... - -class RetrieveDeviationByIDResponse(_message.Message): - __slots__ = ("deviation",) - DEVIATION_FIELD_NUMBER: _ClassVar[int] - deviation: Deviation - def __init__(self, deviation: _Optional[_Union[Deviation, _Mapping]] = ...) -> None: ... diff --git a/netbox_diode_plugin/reconciler/sdk/v1/reconciler_pb2_grpc.py b/netbox_diode_plugin/reconciler/sdk/v1/reconciler_pb2_grpc.py deleted file mode 100644 index 73d37b7..0000000 --- a/netbox_diode_plugin/reconciler/sdk/v1/reconciler_pb2_grpc.py +++ /dev/null @@ -1,138 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc - -from netbox_diode_plugin.reconciler.sdk.v1 import reconciler_pb2 as diode_dot_v1_dot_reconciler__pb2 - - -class ReconcilerServiceStub(object): - """Reconciler service API - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.RetrieveIngestionLogs = channel.unary_unary( - '/diode.v1.ReconcilerService/RetrieveIngestionLogs', - request_serializer=diode_dot_v1_dot_reconciler__pb2.RetrieveIngestionLogsRequest.SerializeToString, - response_deserializer=diode_dot_v1_dot_reconciler__pb2.RetrieveIngestionLogsResponse.FromString, - ) - self.RetrieveDeviations = channel.unary_unary( - '/diode.v1.ReconcilerService/RetrieveDeviations', - request_serializer=diode_dot_v1_dot_reconciler__pb2.RetrieveDeviationsRequest.SerializeToString, - response_deserializer=diode_dot_v1_dot_reconciler__pb2.RetrieveDeviationsResponse.FromString, - ) - self.RetrieveDeviationByID = channel.unary_unary( - '/diode.v1.ReconcilerService/RetrieveDeviationByID', - request_serializer=diode_dot_v1_dot_reconciler__pb2.RetrieveDeviationByIDRequest.SerializeToString, - response_deserializer=diode_dot_v1_dot_reconciler__pb2.RetrieveDeviationByIDResponse.FromString, - ) - - -class ReconcilerServiceServicer(object): - """Reconciler service API - """ - - def RetrieveIngestionLogs(self, request, context): - """Retrieves ingestion logs - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def RetrieveDeviations(self, request, context): - """Retrieve deviations - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def RetrieveDeviationByID(self, request, context): - """Retrieve deviation by ID - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - -def add_ReconcilerServiceServicer_to_server(servicer, server): - rpc_method_handlers = { - 'RetrieveIngestionLogs': grpc.unary_unary_rpc_method_handler( - servicer.RetrieveIngestionLogs, - request_deserializer=diode_dot_v1_dot_reconciler__pb2.RetrieveIngestionLogsRequest.FromString, - response_serializer=diode_dot_v1_dot_reconciler__pb2.RetrieveIngestionLogsResponse.SerializeToString, - ), - 'RetrieveDeviations': grpc.unary_unary_rpc_method_handler( - servicer.RetrieveDeviations, - request_deserializer=diode_dot_v1_dot_reconciler__pb2.RetrieveDeviationsRequest.FromString, - response_serializer=diode_dot_v1_dot_reconciler__pb2.RetrieveDeviationsResponse.SerializeToString, - ), - 'RetrieveDeviationByID': grpc.unary_unary_rpc_method_handler( - servicer.RetrieveDeviationByID, - request_deserializer=diode_dot_v1_dot_reconciler__pb2.RetrieveDeviationByIDRequest.FromString, - response_serializer=diode_dot_v1_dot_reconciler__pb2.RetrieveDeviationByIDResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - 'diode.v1.ReconcilerService', rpc_method_handlers) - server.add_generic_rpc_handlers((generic_handler,)) - - - # This class is part of an EXPERIMENTAL API. -class ReconcilerService(object): - """Reconciler service API - """ - - @staticmethod - def RetrieveIngestionLogs(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary(request, target, '/diode.v1.ReconcilerService/RetrieveIngestionLogs', - diode_dot_v1_dot_reconciler__pb2.RetrieveIngestionLogsRequest.SerializeToString, - diode_dot_v1_dot_reconciler__pb2.RetrieveIngestionLogsResponse.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) - - @staticmethod - def RetrieveDeviations(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary(request, target, '/diode.v1.ReconcilerService/RetrieveDeviations', - diode_dot_v1_dot_reconciler__pb2.RetrieveDeviationsRequest.SerializeToString, - diode_dot_v1_dot_reconciler__pb2.RetrieveDeviationsResponse.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) - - @staticmethod - def RetrieveDeviationByID(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary(request, target, '/diode.v1.ReconcilerService/RetrieveDeviationByID', - diode_dot_v1_dot_reconciler__pb2.RetrieveDeviationByIDRequest.SerializeToString, - diode_dot_v1_dot_reconciler__pb2.RetrieveDeviationByIDResponse.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) diff --git a/netbox_diode_plugin/reconciler/sdk/validate/__init__.py b/netbox_diode_plugin/reconciler/sdk/validate/__init__.py deleted file mode 100644 index feaaf48..0000000 --- a/netbox_diode_plugin/reconciler/sdk/validate/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc -"""Diode NetBox Plugin - Reconciler - SDK - Validate.""" diff --git a/netbox_diode_plugin/reconciler/sdk/validate/validate_pb2.py b/netbox_diode_plugin/reconciler/sdk/validate/validate_pb2.py deleted file mode 100644 index cb59def..0000000 --- a/netbox_diode_plugin/reconciler/sdk/validate/validate_pb2.py +++ /dev/null @@ -1,76 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: validate/validate.proto -# Protobuf Python Version: 5.26.1 -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -from google.protobuf.internal import builder as _builder -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2 -from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x17validate/validate.proto\x12\x08validate\x1a google/protobuf/descriptor.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xc8\x08\n\nFieldRules\x12\x30\n\x07message\x18\x11 \x01(\x0b\x32\x16.validate.MessageRulesR\x07message\x12,\n\x05\x66loat\x18\x01 \x01(\x0b\x32\x14.validate.FloatRulesH\x00R\x05\x66loat\x12/\n\x06\x64ouble\x18\x02 \x01(\x0b\x32\x15.validate.DoubleRulesH\x00R\x06\x64ouble\x12,\n\x05int32\x18\x03 \x01(\x0b\x32\x14.validate.Int32RulesH\x00R\x05int32\x12,\n\x05int64\x18\x04 \x01(\x0b\x32\x14.validate.Int64RulesH\x00R\x05int64\x12/\n\x06uint32\x18\x05 \x01(\x0b\x32\x15.validate.UInt32RulesH\x00R\x06uint32\x12/\n\x06uint64\x18\x06 \x01(\x0b\x32\x15.validate.UInt64RulesH\x00R\x06uint64\x12/\n\x06sint32\x18\x07 \x01(\x0b\x32\x15.validate.SInt32RulesH\x00R\x06sint32\x12/\n\x06sint64\x18\x08 \x01(\x0b\x32\x15.validate.SInt64RulesH\x00R\x06sint64\x12\x32\n\x07\x66ixed32\x18\t \x01(\x0b\x32\x16.validate.Fixed32RulesH\x00R\x07\x66ixed32\x12\x32\n\x07\x66ixed64\x18\n \x01(\x0b\x32\x16.validate.Fixed64RulesH\x00R\x07\x66ixed64\x12\x35\n\x08sfixed32\x18\x0b \x01(\x0b\x32\x17.validate.SFixed32RulesH\x00R\x08sfixed32\x12\x35\n\x08sfixed64\x18\x0c \x01(\x0b\x32\x17.validate.SFixed64RulesH\x00R\x08sfixed64\x12)\n\x04\x62ool\x18\r \x01(\x0b\x32\x13.validate.BoolRulesH\x00R\x04\x62ool\x12/\n\x06string\x18\x0e \x01(\x0b\x32\x15.validate.StringRulesH\x00R\x06string\x12,\n\x05\x62ytes\x18\x0f \x01(\x0b\x32\x14.validate.BytesRulesH\x00R\x05\x62ytes\x12)\n\x04\x65num\x18\x10 \x01(\x0b\x32\x13.validate.EnumRulesH\x00R\x04\x65num\x12\x35\n\x08repeated\x18\x12 \x01(\x0b\x32\x17.validate.RepeatedRulesH\x00R\x08repeated\x12&\n\x03map\x18\x13 \x01(\x0b\x32\x12.validate.MapRulesH\x00R\x03map\x12&\n\x03\x61ny\x18\x14 \x01(\x0b\x32\x12.validate.AnyRulesH\x00R\x03\x61ny\x12\x35\n\x08\x64uration\x18\x15 \x01(\x0b\x32\x17.validate.DurationRulesH\x00R\x08\x64uration\x12\x38\n\ttimestamp\x18\x16 \x01(\x0b\x32\x18.validate.TimestampRulesH\x00R\ttimestampB\x06\n\x04type\"\xb0\x01\n\nFloatRules\x12\x14\n\x05\x63onst\x18\x01 \x01(\x02R\x05\x63onst\x12\x0e\n\x02lt\x18\x02 \x01(\x02R\x02lt\x12\x10\n\x03lte\x18\x03 \x01(\x02R\x03lte\x12\x0e\n\x02gt\x18\x04 \x01(\x02R\x02gt\x12\x10\n\x03gte\x18\x05 \x01(\x02R\x03gte\x12\x0e\n\x02in\x18\x06 \x03(\x02R\x02in\x12\x15\n\x06not_in\x18\x07 \x03(\x02R\x05notIn\x12!\n\x0cignore_empty\x18\x08 \x01(\x08R\x0bignoreEmpty\"\xb1\x01\n\x0b\x44oubleRules\x12\x14\n\x05\x63onst\x18\x01 \x01(\x01R\x05\x63onst\x12\x0e\n\x02lt\x18\x02 \x01(\x01R\x02lt\x12\x10\n\x03lte\x18\x03 \x01(\x01R\x03lte\x12\x0e\n\x02gt\x18\x04 \x01(\x01R\x02gt\x12\x10\n\x03gte\x18\x05 \x01(\x01R\x03gte\x12\x0e\n\x02in\x18\x06 \x03(\x01R\x02in\x12\x15\n\x06not_in\x18\x07 \x03(\x01R\x05notIn\x12!\n\x0cignore_empty\x18\x08 \x01(\x08R\x0bignoreEmpty\"\xb0\x01\n\nInt32Rules\x12\x14\n\x05\x63onst\x18\x01 \x01(\x05R\x05\x63onst\x12\x0e\n\x02lt\x18\x02 \x01(\x05R\x02lt\x12\x10\n\x03lte\x18\x03 \x01(\x05R\x03lte\x12\x0e\n\x02gt\x18\x04 \x01(\x05R\x02gt\x12\x10\n\x03gte\x18\x05 \x01(\x05R\x03gte\x12\x0e\n\x02in\x18\x06 \x03(\x05R\x02in\x12\x15\n\x06not_in\x18\x07 \x03(\x05R\x05notIn\x12!\n\x0cignore_empty\x18\x08 \x01(\x08R\x0bignoreEmpty\"\xb0\x01\n\nInt64Rules\x12\x14\n\x05\x63onst\x18\x01 \x01(\x03R\x05\x63onst\x12\x0e\n\x02lt\x18\x02 \x01(\x03R\x02lt\x12\x10\n\x03lte\x18\x03 \x01(\x03R\x03lte\x12\x0e\n\x02gt\x18\x04 \x01(\x03R\x02gt\x12\x10\n\x03gte\x18\x05 \x01(\x03R\x03gte\x12\x0e\n\x02in\x18\x06 \x03(\x03R\x02in\x12\x15\n\x06not_in\x18\x07 \x03(\x03R\x05notIn\x12!\n\x0cignore_empty\x18\x08 \x01(\x08R\x0bignoreEmpty\"\xb1\x01\n\x0bUInt32Rules\x12\x14\n\x05\x63onst\x18\x01 \x01(\rR\x05\x63onst\x12\x0e\n\x02lt\x18\x02 \x01(\rR\x02lt\x12\x10\n\x03lte\x18\x03 \x01(\rR\x03lte\x12\x0e\n\x02gt\x18\x04 \x01(\rR\x02gt\x12\x10\n\x03gte\x18\x05 \x01(\rR\x03gte\x12\x0e\n\x02in\x18\x06 \x03(\rR\x02in\x12\x15\n\x06not_in\x18\x07 \x03(\rR\x05notIn\x12!\n\x0cignore_empty\x18\x08 \x01(\x08R\x0bignoreEmpty\"\xb1\x01\n\x0bUInt64Rules\x12\x14\n\x05\x63onst\x18\x01 \x01(\x04R\x05\x63onst\x12\x0e\n\x02lt\x18\x02 \x01(\x04R\x02lt\x12\x10\n\x03lte\x18\x03 \x01(\x04R\x03lte\x12\x0e\n\x02gt\x18\x04 \x01(\x04R\x02gt\x12\x10\n\x03gte\x18\x05 \x01(\x04R\x03gte\x12\x0e\n\x02in\x18\x06 \x03(\x04R\x02in\x12\x15\n\x06not_in\x18\x07 \x03(\x04R\x05notIn\x12!\n\x0cignore_empty\x18\x08 \x01(\x08R\x0bignoreEmpty\"\xb1\x01\n\x0bSInt32Rules\x12\x14\n\x05\x63onst\x18\x01 \x01(\x11R\x05\x63onst\x12\x0e\n\x02lt\x18\x02 \x01(\x11R\x02lt\x12\x10\n\x03lte\x18\x03 \x01(\x11R\x03lte\x12\x0e\n\x02gt\x18\x04 \x01(\x11R\x02gt\x12\x10\n\x03gte\x18\x05 \x01(\x11R\x03gte\x12\x0e\n\x02in\x18\x06 \x03(\x11R\x02in\x12\x15\n\x06not_in\x18\x07 \x03(\x11R\x05notIn\x12!\n\x0cignore_empty\x18\x08 \x01(\x08R\x0bignoreEmpty\"\xb1\x01\n\x0bSInt64Rules\x12\x14\n\x05\x63onst\x18\x01 \x01(\x12R\x05\x63onst\x12\x0e\n\x02lt\x18\x02 \x01(\x12R\x02lt\x12\x10\n\x03lte\x18\x03 \x01(\x12R\x03lte\x12\x0e\n\x02gt\x18\x04 \x01(\x12R\x02gt\x12\x10\n\x03gte\x18\x05 \x01(\x12R\x03gte\x12\x0e\n\x02in\x18\x06 \x03(\x12R\x02in\x12\x15\n\x06not_in\x18\x07 \x03(\x12R\x05notIn\x12!\n\x0cignore_empty\x18\x08 \x01(\x08R\x0bignoreEmpty\"\xb2\x01\n\x0c\x46ixed32Rules\x12\x14\n\x05\x63onst\x18\x01 \x01(\x07R\x05\x63onst\x12\x0e\n\x02lt\x18\x02 \x01(\x07R\x02lt\x12\x10\n\x03lte\x18\x03 \x01(\x07R\x03lte\x12\x0e\n\x02gt\x18\x04 \x01(\x07R\x02gt\x12\x10\n\x03gte\x18\x05 \x01(\x07R\x03gte\x12\x0e\n\x02in\x18\x06 \x03(\x07R\x02in\x12\x15\n\x06not_in\x18\x07 \x03(\x07R\x05notIn\x12!\n\x0cignore_empty\x18\x08 \x01(\x08R\x0bignoreEmpty\"\xb2\x01\n\x0c\x46ixed64Rules\x12\x14\n\x05\x63onst\x18\x01 \x01(\x06R\x05\x63onst\x12\x0e\n\x02lt\x18\x02 \x01(\x06R\x02lt\x12\x10\n\x03lte\x18\x03 \x01(\x06R\x03lte\x12\x0e\n\x02gt\x18\x04 \x01(\x06R\x02gt\x12\x10\n\x03gte\x18\x05 \x01(\x06R\x03gte\x12\x0e\n\x02in\x18\x06 \x03(\x06R\x02in\x12\x15\n\x06not_in\x18\x07 \x03(\x06R\x05notIn\x12!\n\x0cignore_empty\x18\x08 \x01(\x08R\x0bignoreEmpty\"\xb3\x01\n\rSFixed32Rules\x12\x14\n\x05\x63onst\x18\x01 \x01(\x0fR\x05\x63onst\x12\x0e\n\x02lt\x18\x02 \x01(\x0fR\x02lt\x12\x10\n\x03lte\x18\x03 \x01(\x0fR\x03lte\x12\x0e\n\x02gt\x18\x04 \x01(\x0fR\x02gt\x12\x10\n\x03gte\x18\x05 \x01(\x0fR\x03gte\x12\x0e\n\x02in\x18\x06 \x03(\x0fR\x02in\x12\x15\n\x06not_in\x18\x07 \x03(\x0fR\x05notIn\x12!\n\x0cignore_empty\x18\x08 \x01(\x08R\x0bignoreEmpty\"\xb3\x01\n\rSFixed64Rules\x12\x14\n\x05\x63onst\x18\x01 \x01(\x10R\x05\x63onst\x12\x0e\n\x02lt\x18\x02 \x01(\x10R\x02lt\x12\x10\n\x03lte\x18\x03 \x01(\x10R\x03lte\x12\x0e\n\x02gt\x18\x04 \x01(\x10R\x02gt\x12\x10\n\x03gte\x18\x05 \x01(\x10R\x03gte\x12\x0e\n\x02in\x18\x06 \x03(\x10R\x02in\x12\x15\n\x06not_in\x18\x07 \x03(\x10R\x05notIn\x12!\n\x0cignore_empty\x18\x08 \x01(\x08R\x0bignoreEmpty\"!\n\tBoolRules\x12\x14\n\x05\x63onst\x18\x01 \x01(\x08R\x05\x63onst\"\xd4\x05\n\x0bStringRules\x12\x14\n\x05\x63onst\x18\x01 \x01(\tR\x05\x63onst\x12\x10\n\x03len\x18\x13 \x01(\x04R\x03len\x12\x17\n\x07min_len\x18\x02 \x01(\x04R\x06minLen\x12\x17\n\x07max_len\x18\x03 \x01(\x04R\x06maxLen\x12\x1b\n\tlen_bytes\x18\x14 \x01(\x04R\x08lenBytes\x12\x1b\n\tmin_bytes\x18\x04 \x01(\x04R\x08minBytes\x12\x1b\n\tmax_bytes\x18\x05 \x01(\x04R\x08maxBytes\x12\x18\n\x07pattern\x18\x06 \x01(\tR\x07pattern\x12\x16\n\x06prefix\x18\x07 \x01(\tR\x06prefix\x12\x16\n\x06suffix\x18\x08 \x01(\tR\x06suffix\x12\x1a\n\x08\x63ontains\x18\t \x01(\tR\x08\x63ontains\x12!\n\x0cnot_contains\x18\x17 \x01(\tR\x0bnotContains\x12\x0e\n\x02in\x18\n \x03(\tR\x02in\x12\x15\n\x06not_in\x18\x0b \x03(\tR\x05notIn\x12\x16\n\x05\x65mail\x18\x0c \x01(\x08H\x00R\x05\x65mail\x12\x1c\n\x08hostname\x18\r \x01(\x08H\x00R\x08hostname\x12\x10\n\x02ip\x18\x0e \x01(\x08H\x00R\x02ip\x12\x14\n\x04ipv4\x18\x0f \x01(\x08H\x00R\x04ipv4\x12\x14\n\x04ipv6\x18\x10 \x01(\x08H\x00R\x04ipv6\x12\x12\n\x03uri\x18\x11 \x01(\x08H\x00R\x03uri\x12\x19\n\x07uri_ref\x18\x12 \x01(\x08H\x00R\x06uriRef\x12\x1a\n\x07\x61\x64\x64ress\x18\x15 \x01(\x08H\x00R\x07\x61\x64\x64ress\x12\x14\n\x04uuid\x18\x16 \x01(\x08H\x00R\x04uuid\x12@\n\x10well_known_regex\x18\x18 \x01(\x0e\x32\x14.validate.KnownRegexH\x00R\x0ewellKnownRegex\x12\x1c\n\x06strict\x18\x19 \x01(\x08:\x04trueR\x06strict\x12!\n\x0cignore_empty\x18\x1a \x01(\x08R\x0bignoreEmptyB\x0c\n\nwell_known\"\xe2\x02\n\nBytesRules\x12\x14\n\x05\x63onst\x18\x01 \x01(\x0cR\x05\x63onst\x12\x10\n\x03len\x18\r \x01(\x04R\x03len\x12\x17\n\x07min_len\x18\x02 \x01(\x04R\x06minLen\x12\x17\n\x07max_len\x18\x03 \x01(\x04R\x06maxLen\x12\x18\n\x07pattern\x18\x04 \x01(\tR\x07pattern\x12\x16\n\x06prefix\x18\x05 \x01(\x0cR\x06prefix\x12\x16\n\x06suffix\x18\x06 \x01(\x0cR\x06suffix\x12\x1a\n\x08\x63ontains\x18\x07 \x01(\x0cR\x08\x63ontains\x12\x0e\n\x02in\x18\x08 \x03(\x0cR\x02in\x12\x15\n\x06not_in\x18\t \x03(\x0cR\x05notIn\x12\x10\n\x02ip\x18\n \x01(\x08H\x00R\x02ip\x12\x14\n\x04ipv4\x18\x0b \x01(\x08H\x00R\x04ipv4\x12\x14\n\x04ipv6\x18\x0c \x01(\x08H\x00R\x04ipv6\x12!\n\x0cignore_empty\x18\x0e \x01(\x08R\x0bignoreEmptyB\x0c\n\nwell_known\"k\n\tEnumRules\x12\x14\n\x05\x63onst\x18\x01 \x01(\x05R\x05\x63onst\x12!\n\x0c\x64\x65\x66ined_only\x18\x02 \x01(\x08R\x0b\x64\x65\x66inedOnly\x12\x0e\n\x02in\x18\x03 \x03(\x05R\x02in\x12\x15\n\x06not_in\x18\x04 \x03(\x05R\x05notIn\">\n\x0cMessageRules\x12\x12\n\x04skip\x18\x01 \x01(\x08R\x04skip\x12\x1a\n\x08required\x18\x02 \x01(\x08R\x08required\"\xb0\x01\n\rRepeatedRules\x12\x1b\n\tmin_items\x18\x01 \x01(\x04R\x08minItems\x12\x1b\n\tmax_items\x18\x02 \x01(\x04R\x08maxItems\x12\x16\n\x06unique\x18\x03 \x01(\x08R\x06unique\x12*\n\x05items\x18\x04 \x01(\x0b\x32\x14.validate.FieldRulesR\x05items\x12!\n\x0cignore_empty\x18\x05 \x01(\x08R\x0bignoreEmpty\"\xdc\x01\n\x08MapRules\x12\x1b\n\tmin_pairs\x18\x01 \x01(\x04R\x08minPairs\x12\x1b\n\tmax_pairs\x18\x02 \x01(\x04R\x08maxPairs\x12\x1b\n\tno_sparse\x18\x03 \x01(\x08R\x08noSparse\x12(\n\x04keys\x18\x04 \x01(\x0b\x32\x14.validate.FieldRulesR\x04keys\x12,\n\x06values\x18\x05 \x01(\x0b\x32\x14.validate.FieldRulesR\x06values\x12!\n\x0cignore_empty\x18\x06 \x01(\x08R\x0bignoreEmpty\"M\n\x08\x41nyRules\x12\x1a\n\x08required\x18\x01 \x01(\x08R\x08required\x12\x0e\n\x02in\x18\x02 \x03(\tR\x02in\x12\x15\n\x06not_in\x18\x03 \x03(\tR\x05notIn\"\xe9\x02\n\rDurationRules\x12\x1a\n\x08required\x18\x01 \x01(\x08R\x08required\x12/\n\x05\x63onst\x18\x02 \x01(\x0b\x32\x19.google.protobuf.DurationR\x05\x63onst\x12)\n\x02lt\x18\x03 \x01(\x0b\x32\x19.google.protobuf.DurationR\x02lt\x12+\n\x03lte\x18\x04 \x01(\x0b\x32\x19.google.protobuf.DurationR\x03lte\x12)\n\x02gt\x18\x05 \x01(\x0b\x32\x19.google.protobuf.DurationR\x02gt\x12+\n\x03gte\x18\x06 \x01(\x0b\x32\x19.google.protobuf.DurationR\x03gte\x12)\n\x02in\x18\x07 \x03(\x0b\x32\x19.google.protobuf.DurationR\x02in\x12\x30\n\x06not_in\x18\x08 \x03(\x0b\x32\x19.google.protobuf.DurationR\x05notIn\"\xf3\x02\n\x0eTimestampRules\x12\x1a\n\x08required\x18\x01 \x01(\x08R\x08required\x12\x30\n\x05\x63onst\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\x05\x63onst\x12*\n\x02lt\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\x02lt\x12,\n\x03lte\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\x03lte\x12*\n\x02gt\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\x02gt\x12,\n\x03gte\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\x03gte\x12\x15\n\x06lt_now\x18\x07 \x01(\x08R\x05ltNow\x12\x15\n\x06gt_now\x18\x08 \x01(\x08R\x05gtNow\x12\x31\n\x06within\x18\t \x01(\x0b\x32\x19.google.protobuf.DurationR\x06within*F\n\nKnownRegex\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x14\n\x10HTTP_HEADER_NAME\x10\x01\x12\x15\n\x11HTTP_HEADER_VALUE\x10\x02:<\n\x08\x64isabled\x12\x1f.google.protobuf.MessageOptions\x18\xaf\x08 \x01(\x08R\x08\x64isabled::\n\x07ignored\x12\x1f.google.protobuf.MessageOptions\x18\xb0\x08 \x01(\x08R\x07ignored::\n\x08required\x12\x1d.google.protobuf.OneofOptions\x18\xaf\x08 \x01(\x08R\x08required:J\n\x05rules\x12\x1d.google.protobuf.FieldOptions\x18\xaf\x08 \x01(\x0b\x32\x14.validate.FieldRulesR\x05rulesBP\n\x1aio.envoyproxy.pgv.validateZ2github.com/envoyproxy/protoc-gen-validate/validate') - -_globals = globals() -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'validate.validate_pb2', _globals) -if not _descriptor._USE_C_DESCRIPTORS: - _globals['DESCRIPTOR']._loaded_options = None - _globals['DESCRIPTOR']._serialized_options = b'\n\032io.envoyproxy.pgv.validateZ2github.com/envoyproxy/protoc-gen-validate/validate' - _globals['_KNOWNREGEX']._serialized_start=5909 - _globals['_KNOWNREGEX']._serialized_end=5979 - _globals['_FIELDRULES']._serialized_start=137 - _globals['_FIELDRULES']._serialized_end=1233 - _globals['_FLOATRULES']._serialized_start=1236 - _globals['_FLOATRULES']._serialized_end=1412 - _globals['_DOUBLERULES']._serialized_start=1415 - _globals['_DOUBLERULES']._serialized_end=1592 - _globals['_INT32RULES']._serialized_start=1595 - _globals['_INT32RULES']._serialized_end=1771 - _globals['_INT64RULES']._serialized_start=1774 - _globals['_INT64RULES']._serialized_end=1950 - _globals['_UINT32RULES']._serialized_start=1953 - _globals['_UINT32RULES']._serialized_end=2130 - _globals['_UINT64RULES']._serialized_start=2133 - _globals['_UINT64RULES']._serialized_end=2310 - _globals['_SINT32RULES']._serialized_start=2313 - _globals['_SINT32RULES']._serialized_end=2490 - _globals['_SINT64RULES']._serialized_start=2493 - _globals['_SINT64RULES']._serialized_end=2670 - _globals['_FIXED32RULES']._serialized_start=2673 - _globals['_FIXED32RULES']._serialized_end=2851 - _globals['_FIXED64RULES']._serialized_start=2854 - _globals['_FIXED64RULES']._serialized_end=3032 - _globals['_SFIXED32RULES']._serialized_start=3035 - _globals['_SFIXED32RULES']._serialized_end=3214 - _globals['_SFIXED64RULES']._serialized_start=3217 - _globals['_SFIXED64RULES']._serialized_end=3396 - _globals['_BOOLRULES']._serialized_start=3398 - _globals['_BOOLRULES']._serialized_end=3431 - _globals['_STRINGRULES']._serialized_start=3434 - _globals['_STRINGRULES']._serialized_end=4158 - _globals['_BYTESRULES']._serialized_start=4161 - _globals['_BYTESRULES']._serialized_end=4515 - _globals['_ENUMRULES']._serialized_start=4517 - _globals['_ENUMRULES']._serialized_end=4624 - _globals['_MESSAGERULES']._serialized_start=4626 - _globals['_MESSAGERULES']._serialized_end=4688 - _globals['_REPEATEDRULES']._serialized_start=4691 - _globals['_REPEATEDRULES']._serialized_end=4867 - _globals['_MAPRULES']._serialized_start=4870 - _globals['_MAPRULES']._serialized_end=5090 - _globals['_ANYRULES']._serialized_start=5092 - _globals['_ANYRULES']._serialized_end=5169 - _globals['_DURATIONRULES']._serialized_start=5172 - _globals['_DURATIONRULES']._serialized_end=5533 - _globals['_TIMESTAMPRULES']._serialized_start=5536 - _globals['_TIMESTAMPRULES']._serialized_end=5907 -# @@protoc_insertion_point(module_scope) diff --git a/netbox_diode_plugin/reconciler/sdk/validate/validate_pb2.pyi b/netbox_diode_plugin/reconciler/sdk/validate/validate_pb2.pyi deleted file mode 100644 index 2286693..0000000 --- a/netbox_diode_plugin/reconciler/sdk/validate/validate_pb2.pyi +++ /dev/null @@ -1,494 +0,0 @@ -from google.protobuf import descriptor_pb2 as _descriptor_pb2 -from google.protobuf import duration_pb2 as _duration_pb2 -from google.protobuf import timestamp_pb2 as _timestamp_pb2 -from google.protobuf.internal import containers as _containers -from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Mapping, Optional as _Optional, Union as _Union - -DESCRIPTOR: _descriptor.FileDescriptor - -class KnownRegex(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): - __slots__ = () - UNKNOWN: _ClassVar[KnownRegex] - HTTP_HEADER_NAME: _ClassVar[KnownRegex] - HTTP_HEADER_VALUE: _ClassVar[KnownRegex] -UNKNOWN: KnownRegex -HTTP_HEADER_NAME: KnownRegex -HTTP_HEADER_VALUE: KnownRegex -DISABLED_FIELD_NUMBER: _ClassVar[int] -disabled: _descriptor.FieldDescriptor -IGNORED_FIELD_NUMBER: _ClassVar[int] -ignored: _descriptor.FieldDescriptor -REQUIRED_FIELD_NUMBER: _ClassVar[int] -required: _descriptor.FieldDescriptor -RULES_FIELD_NUMBER: _ClassVar[int] -rules: _descriptor.FieldDescriptor - -class FieldRules(_message.Message): - __slots__ = ("message", "float", "double", "int32", "int64", "uint32", "uint64", "sint32", "sint64", "fixed32", "fixed64", "sfixed32", "sfixed64", "bool", "string", "bytes", "enum", "repeated", "map", "any", "duration", "timestamp") - MESSAGE_FIELD_NUMBER: _ClassVar[int] - FLOAT_FIELD_NUMBER: _ClassVar[int] - DOUBLE_FIELD_NUMBER: _ClassVar[int] - INT32_FIELD_NUMBER: _ClassVar[int] - INT64_FIELD_NUMBER: _ClassVar[int] - UINT32_FIELD_NUMBER: _ClassVar[int] - UINT64_FIELD_NUMBER: _ClassVar[int] - SINT32_FIELD_NUMBER: _ClassVar[int] - SINT64_FIELD_NUMBER: _ClassVar[int] - FIXED32_FIELD_NUMBER: _ClassVar[int] - FIXED64_FIELD_NUMBER: _ClassVar[int] - SFIXED32_FIELD_NUMBER: _ClassVar[int] - SFIXED64_FIELD_NUMBER: _ClassVar[int] - BOOL_FIELD_NUMBER: _ClassVar[int] - STRING_FIELD_NUMBER: _ClassVar[int] - BYTES_FIELD_NUMBER: _ClassVar[int] - ENUM_FIELD_NUMBER: _ClassVar[int] - REPEATED_FIELD_NUMBER: _ClassVar[int] - MAP_FIELD_NUMBER: _ClassVar[int] - ANY_FIELD_NUMBER: _ClassVar[int] - DURATION_FIELD_NUMBER: _ClassVar[int] - TIMESTAMP_FIELD_NUMBER: _ClassVar[int] - message: MessageRules - float: FloatRules - double: DoubleRules - int32: Int32Rules - int64: Int64Rules - uint32: UInt32Rules - uint64: UInt64Rules - sint32: SInt32Rules - sint64: SInt64Rules - fixed32: Fixed32Rules - fixed64: Fixed64Rules - sfixed32: SFixed32Rules - sfixed64: SFixed64Rules - bool: BoolRules - string: StringRules - bytes: BytesRules - enum: EnumRules - repeated: RepeatedRules - map: MapRules - any: AnyRules - duration: DurationRules - timestamp: TimestampRules - def __init__(self, message: _Optional[_Union[MessageRules, _Mapping]] = ..., float: _Optional[_Union[FloatRules, _Mapping]] = ..., double: _Optional[_Union[DoubleRules, _Mapping]] = ..., int32: _Optional[_Union[Int32Rules, _Mapping]] = ..., int64: _Optional[_Union[Int64Rules, _Mapping]] = ..., uint32: _Optional[_Union[UInt32Rules, _Mapping]] = ..., uint64: _Optional[_Union[UInt64Rules, _Mapping]] = ..., sint32: _Optional[_Union[SInt32Rules, _Mapping]] = ..., sint64: _Optional[_Union[SInt64Rules, _Mapping]] = ..., fixed32: _Optional[_Union[Fixed32Rules, _Mapping]] = ..., fixed64: _Optional[_Union[Fixed64Rules, _Mapping]] = ..., sfixed32: _Optional[_Union[SFixed32Rules, _Mapping]] = ..., sfixed64: _Optional[_Union[SFixed64Rules, _Mapping]] = ..., bool: _Optional[_Union[BoolRules, _Mapping]] = ..., string: _Optional[_Union[StringRules, _Mapping]] = ..., bytes: _Optional[_Union[BytesRules, _Mapping]] = ..., enum: _Optional[_Union[EnumRules, _Mapping]] = ..., repeated: _Optional[_Union[RepeatedRules, _Mapping]] = ..., map: _Optional[_Union[MapRules, _Mapping]] = ..., any: _Optional[_Union[AnyRules, _Mapping]] = ..., duration: _Optional[_Union[DurationRules, _Mapping]] = ..., timestamp: _Optional[_Union[TimestampRules, _Mapping]] = ...) -> None: ... - -class FloatRules(_message.Message): - __slots__ = ("const", "lt", "lte", "gt", "gte", "not_in", "ignore_empty") - CONST_FIELD_NUMBER: _ClassVar[int] - LT_FIELD_NUMBER: _ClassVar[int] - LTE_FIELD_NUMBER: _ClassVar[int] - GT_FIELD_NUMBER: _ClassVar[int] - GTE_FIELD_NUMBER: _ClassVar[int] - IN_FIELD_NUMBER: _ClassVar[int] - NOT_IN_FIELD_NUMBER: _ClassVar[int] - IGNORE_EMPTY_FIELD_NUMBER: _ClassVar[int] - const: float - lt: float - lte: float - gt: float - gte: float - not_in: _containers.RepeatedScalarFieldContainer[float] - ignore_empty: bool - def __init__(self, const: _Optional[float] = ..., lt: _Optional[float] = ..., lte: _Optional[float] = ..., gt: _Optional[float] = ..., gte: _Optional[float] = ..., not_in: _Optional[_Iterable[float]] = ..., ignore_empty: bool = ..., **kwargs) -> None: ... - -class DoubleRules(_message.Message): - __slots__ = ("const", "lt", "lte", "gt", "gte", "not_in", "ignore_empty") - CONST_FIELD_NUMBER: _ClassVar[int] - LT_FIELD_NUMBER: _ClassVar[int] - LTE_FIELD_NUMBER: _ClassVar[int] - GT_FIELD_NUMBER: _ClassVar[int] - GTE_FIELD_NUMBER: _ClassVar[int] - IN_FIELD_NUMBER: _ClassVar[int] - NOT_IN_FIELD_NUMBER: _ClassVar[int] - IGNORE_EMPTY_FIELD_NUMBER: _ClassVar[int] - const: float - lt: float - lte: float - gt: float - gte: float - not_in: _containers.RepeatedScalarFieldContainer[float] - ignore_empty: bool - def __init__(self, const: _Optional[float] = ..., lt: _Optional[float] = ..., lte: _Optional[float] = ..., gt: _Optional[float] = ..., gte: _Optional[float] = ..., not_in: _Optional[_Iterable[float]] = ..., ignore_empty: bool = ..., **kwargs) -> None: ... - -class Int32Rules(_message.Message): - __slots__ = ("const", "lt", "lte", "gt", "gte", "not_in", "ignore_empty") - CONST_FIELD_NUMBER: _ClassVar[int] - LT_FIELD_NUMBER: _ClassVar[int] - LTE_FIELD_NUMBER: _ClassVar[int] - GT_FIELD_NUMBER: _ClassVar[int] - GTE_FIELD_NUMBER: _ClassVar[int] - IN_FIELD_NUMBER: _ClassVar[int] - NOT_IN_FIELD_NUMBER: _ClassVar[int] - IGNORE_EMPTY_FIELD_NUMBER: _ClassVar[int] - const: int - lt: int - lte: int - gt: int - gte: int - not_in: _containers.RepeatedScalarFieldContainer[int] - ignore_empty: bool - def __init__(self, const: _Optional[int] = ..., lt: _Optional[int] = ..., lte: _Optional[int] = ..., gt: _Optional[int] = ..., gte: _Optional[int] = ..., not_in: _Optional[_Iterable[int]] = ..., ignore_empty: bool = ..., **kwargs) -> None: ... - -class Int64Rules(_message.Message): - __slots__ = ("const", "lt", "lte", "gt", "gte", "not_in", "ignore_empty") - CONST_FIELD_NUMBER: _ClassVar[int] - LT_FIELD_NUMBER: _ClassVar[int] - LTE_FIELD_NUMBER: _ClassVar[int] - GT_FIELD_NUMBER: _ClassVar[int] - GTE_FIELD_NUMBER: _ClassVar[int] - IN_FIELD_NUMBER: _ClassVar[int] - NOT_IN_FIELD_NUMBER: _ClassVar[int] - IGNORE_EMPTY_FIELD_NUMBER: _ClassVar[int] - const: int - lt: int - lte: int - gt: int - gte: int - not_in: _containers.RepeatedScalarFieldContainer[int] - ignore_empty: bool - def __init__(self, const: _Optional[int] = ..., lt: _Optional[int] = ..., lte: _Optional[int] = ..., gt: _Optional[int] = ..., gte: _Optional[int] = ..., not_in: _Optional[_Iterable[int]] = ..., ignore_empty: bool = ..., **kwargs) -> None: ... - -class UInt32Rules(_message.Message): - __slots__ = ("const", "lt", "lte", "gt", "gte", "not_in", "ignore_empty") - CONST_FIELD_NUMBER: _ClassVar[int] - LT_FIELD_NUMBER: _ClassVar[int] - LTE_FIELD_NUMBER: _ClassVar[int] - GT_FIELD_NUMBER: _ClassVar[int] - GTE_FIELD_NUMBER: _ClassVar[int] - IN_FIELD_NUMBER: _ClassVar[int] - NOT_IN_FIELD_NUMBER: _ClassVar[int] - IGNORE_EMPTY_FIELD_NUMBER: _ClassVar[int] - const: int - lt: int - lte: int - gt: int - gte: int - not_in: _containers.RepeatedScalarFieldContainer[int] - ignore_empty: bool - def __init__(self, const: _Optional[int] = ..., lt: _Optional[int] = ..., lte: _Optional[int] = ..., gt: _Optional[int] = ..., gte: _Optional[int] = ..., not_in: _Optional[_Iterable[int]] = ..., ignore_empty: bool = ..., **kwargs) -> None: ... - -class UInt64Rules(_message.Message): - __slots__ = ("const", "lt", "lte", "gt", "gte", "not_in", "ignore_empty") - CONST_FIELD_NUMBER: _ClassVar[int] - LT_FIELD_NUMBER: _ClassVar[int] - LTE_FIELD_NUMBER: _ClassVar[int] - GT_FIELD_NUMBER: _ClassVar[int] - GTE_FIELD_NUMBER: _ClassVar[int] - IN_FIELD_NUMBER: _ClassVar[int] - NOT_IN_FIELD_NUMBER: _ClassVar[int] - IGNORE_EMPTY_FIELD_NUMBER: _ClassVar[int] - const: int - lt: int - lte: int - gt: int - gte: int - not_in: _containers.RepeatedScalarFieldContainer[int] - ignore_empty: bool - def __init__(self, const: _Optional[int] = ..., lt: _Optional[int] = ..., lte: _Optional[int] = ..., gt: _Optional[int] = ..., gte: _Optional[int] = ..., not_in: _Optional[_Iterable[int]] = ..., ignore_empty: bool = ..., **kwargs) -> None: ... - -class SInt32Rules(_message.Message): - __slots__ = ("const", "lt", "lte", "gt", "gte", "not_in", "ignore_empty") - CONST_FIELD_NUMBER: _ClassVar[int] - LT_FIELD_NUMBER: _ClassVar[int] - LTE_FIELD_NUMBER: _ClassVar[int] - GT_FIELD_NUMBER: _ClassVar[int] - GTE_FIELD_NUMBER: _ClassVar[int] - IN_FIELD_NUMBER: _ClassVar[int] - NOT_IN_FIELD_NUMBER: _ClassVar[int] - IGNORE_EMPTY_FIELD_NUMBER: _ClassVar[int] - const: int - lt: int - lte: int - gt: int - gte: int - not_in: _containers.RepeatedScalarFieldContainer[int] - ignore_empty: bool - def __init__(self, const: _Optional[int] = ..., lt: _Optional[int] = ..., lte: _Optional[int] = ..., gt: _Optional[int] = ..., gte: _Optional[int] = ..., not_in: _Optional[_Iterable[int]] = ..., ignore_empty: bool = ..., **kwargs) -> None: ... - -class SInt64Rules(_message.Message): - __slots__ = ("const", "lt", "lte", "gt", "gte", "not_in", "ignore_empty") - CONST_FIELD_NUMBER: _ClassVar[int] - LT_FIELD_NUMBER: _ClassVar[int] - LTE_FIELD_NUMBER: _ClassVar[int] - GT_FIELD_NUMBER: _ClassVar[int] - GTE_FIELD_NUMBER: _ClassVar[int] - IN_FIELD_NUMBER: _ClassVar[int] - NOT_IN_FIELD_NUMBER: _ClassVar[int] - IGNORE_EMPTY_FIELD_NUMBER: _ClassVar[int] - const: int - lt: int - lte: int - gt: int - gte: int - not_in: _containers.RepeatedScalarFieldContainer[int] - ignore_empty: bool - def __init__(self, const: _Optional[int] = ..., lt: _Optional[int] = ..., lte: _Optional[int] = ..., gt: _Optional[int] = ..., gte: _Optional[int] = ..., not_in: _Optional[_Iterable[int]] = ..., ignore_empty: bool = ..., **kwargs) -> None: ... - -class Fixed32Rules(_message.Message): - __slots__ = ("const", "lt", "lte", "gt", "gte", "not_in", "ignore_empty") - CONST_FIELD_NUMBER: _ClassVar[int] - LT_FIELD_NUMBER: _ClassVar[int] - LTE_FIELD_NUMBER: _ClassVar[int] - GT_FIELD_NUMBER: _ClassVar[int] - GTE_FIELD_NUMBER: _ClassVar[int] - IN_FIELD_NUMBER: _ClassVar[int] - NOT_IN_FIELD_NUMBER: _ClassVar[int] - IGNORE_EMPTY_FIELD_NUMBER: _ClassVar[int] - const: int - lt: int - lte: int - gt: int - gte: int - not_in: _containers.RepeatedScalarFieldContainer[int] - ignore_empty: bool - def __init__(self, const: _Optional[int] = ..., lt: _Optional[int] = ..., lte: _Optional[int] = ..., gt: _Optional[int] = ..., gte: _Optional[int] = ..., not_in: _Optional[_Iterable[int]] = ..., ignore_empty: bool = ..., **kwargs) -> None: ... - -class Fixed64Rules(_message.Message): - __slots__ = ("const", "lt", "lte", "gt", "gte", "not_in", "ignore_empty") - CONST_FIELD_NUMBER: _ClassVar[int] - LT_FIELD_NUMBER: _ClassVar[int] - LTE_FIELD_NUMBER: _ClassVar[int] - GT_FIELD_NUMBER: _ClassVar[int] - GTE_FIELD_NUMBER: _ClassVar[int] - IN_FIELD_NUMBER: _ClassVar[int] - NOT_IN_FIELD_NUMBER: _ClassVar[int] - IGNORE_EMPTY_FIELD_NUMBER: _ClassVar[int] - const: int - lt: int - lte: int - gt: int - gte: int - not_in: _containers.RepeatedScalarFieldContainer[int] - ignore_empty: bool - def __init__(self, const: _Optional[int] = ..., lt: _Optional[int] = ..., lte: _Optional[int] = ..., gt: _Optional[int] = ..., gte: _Optional[int] = ..., not_in: _Optional[_Iterable[int]] = ..., ignore_empty: bool = ..., **kwargs) -> None: ... - -class SFixed32Rules(_message.Message): - __slots__ = ("const", "lt", "lte", "gt", "gte", "not_in", "ignore_empty") - CONST_FIELD_NUMBER: _ClassVar[int] - LT_FIELD_NUMBER: _ClassVar[int] - LTE_FIELD_NUMBER: _ClassVar[int] - GT_FIELD_NUMBER: _ClassVar[int] - GTE_FIELD_NUMBER: _ClassVar[int] - IN_FIELD_NUMBER: _ClassVar[int] - NOT_IN_FIELD_NUMBER: _ClassVar[int] - IGNORE_EMPTY_FIELD_NUMBER: _ClassVar[int] - const: int - lt: int - lte: int - gt: int - gte: int - not_in: _containers.RepeatedScalarFieldContainer[int] - ignore_empty: bool - def __init__(self, const: _Optional[int] = ..., lt: _Optional[int] = ..., lte: _Optional[int] = ..., gt: _Optional[int] = ..., gte: _Optional[int] = ..., not_in: _Optional[_Iterable[int]] = ..., ignore_empty: bool = ..., **kwargs) -> None: ... - -class SFixed64Rules(_message.Message): - __slots__ = ("const", "lt", "lte", "gt", "gte", "not_in", "ignore_empty") - CONST_FIELD_NUMBER: _ClassVar[int] - LT_FIELD_NUMBER: _ClassVar[int] - LTE_FIELD_NUMBER: _ClassVar[int] - GT_FIELD_NUMBER: _ClassVar[int] - GTE_FIELD_NUMBER: _ClassVar[int] - IN_FIELD_NUMBER: _ClassVar[int] - NOT_IN_FIELD_NUMBER: _ClassVar[int] - IGNORE_EMPTY_FIELD_NUMBER: _ClassVar[int] - const: int - lt: int - lte: int - gt: int - gte: int - not_in: _containers.RepeatedScalarFieldContainer[int] - ignore_empty: bool - def __init__(self, const: _Optional[int] = ..., lt: _Optional[int] = ..., lte: _Optional[int] = ..., gt: _Optional[int] = ..., gte: _Optional[int] = ..., not_in: _Optional[_Iterable[int]] = ..., ignore_empty: bool = ..., **kwargs) -> None: ... - -class BoolRules(_message.Message): - __slots__ = ("const",) - CONST_FIELD_NUMBER: _ClassVar[int] - const: bool - def __init__(self, const: bool = ...) -> None: ... - -class StringRules(_message.Message): - __slots__ = ("const", "len", "min_len", "max_len", "len_bytes", "min_bytes", "max_bytes", "pattern", "prefix", "suffix", "contains", "not_contains", "not_in", "email", "hostname", "ip", "ipv4", "ipv6", "uri", "uri_ref", "address", "uuid", "well_known_regex", "strict", "ignore_empty") - CONST_FIELD_NUMBER: _ClassVar[int] - LEN_FIELD_NUMBER: _ClassVar[int] - MIN_LEN_FIELD_NUMBER: _ClassVar[int] - MAX_LEN_FIELD_NUMBER: _ClassVar[int] - LEN_BYTES_FIELD_NUMBER: _ClassVar[int] - MIN_BYTES_FIELD_NUMBER: _ClassVar[int] - MAX_BYTES_FIELD_NUMBER: _ClassVar[int] - PATTERN_FIELD_NUMBER: _ClassVar[int] - PREFIX_FIELD_NUMBER: _ClassVar[int] - SUFFIX_FIELD_NUMBER: _ClassVar[int] - CONTAINS_FIELD_NUMBER: _ClassVar[int] - NOT_CONTAINS_FIELD_NUMBER: _ClassVar[int] - IN_FIELD_NUMBER: _ClassVar[int] - NOT_IN_FIELD_NUMBER: _ClassVar[int] - EMAIL_FIELD_NUMBER: _ClassVar[int] - HOSTNAME_FIELD_NUMBER: _ClassVar[int] - IP_FIELD_NUMBER: _ClassVar[int] - IPV4_FIELD_NUMBER: _ClassVar[int] - IPV6_FIELD_NUMBER: _ClassVar[int] - URI_FIELD_NUMBER: _ClassVar[int] - URI_REF_FIELD_NUMBER: _ClassVar[int] - ADDRESS_FIELD_NUMBER: _ClassVar[int] - UUID_FIELD_NUMBER: _ClassVar[int] - WELL_KNOWN_REGEX_FIELD_NUMBER: _ClassVar[int] - STRICT_FIELD_NUMBER: _ClassVar[int] - IGNORE_EMPTY_FIELD_NUMBER: _ClassVar[int] - const: str - len: int - min_len: int - max_len: int - len_bytes: int - min_bytes: int - max_bytes: int - pattern: str - prefix: str - suffix: str - contains: str - not_contains: str - not_in: _containers.RepeatedScalarFieldContainer[str] - email: bool - hostname: bool - ip: bool - ipv4: bool - ipv6: bool - uri: bool - uri_ref: bool - address: bool - uuid: bool - well_known_regex: KnownRegex - strict: bool - ignore_empty: bool - def __init__(self, const: _Optional[str] = ..., len: _Optional[int] = ..., min_len: _Optional[int] = ..., max_len: _Optional[int] = ..., len_bytes: _Optional[int] = ..., min_bytes: _Optional[int] = ..., max_bytes: _Optional[int] = ..., pattern: _Optional[str] = ..., prefix: _Optional[str] = ..., suffix: _Optional[str] = ..., contains: _Optional[str] = ..., not_contains: _Optional[str] = ..., not_in: _Optional[_Iterable[str]] = ..., email: bool = ..., hostname: bool = ..., ip: bool = ..., ipv4: bool = ..., ipv6: bool = ..., uri: bool = ..., uri_ref: bool = ..., address: bool = ..., uuid: bool = ..., well_known_regex: _Optional[_Union[KnownRegex, str]] = ..., strict: bool = ..., ignore_empty: bool = ..., **kwargs) -> None: ... - -class BytesRules(_message.Message): - __slots__ = ("const", "len", "min_len", "max_len", "pattern", "prefix", "suffix", "contains", "not_in", "ip", "ipv4", "ipv6", "ignore_empty") - CONST_FIELD_NUMBER: _ClassVar[int] - LEN_FIELD_NUMBER: _ClassVar[int] - MIN_LEN_FIELD_NUMBER: _ClassVar[int] - MAX_LEN_FIELD_NUMBER: _ClassVar[int] - PATTERN_FIELD_NUMBER: _ClassVar[int] - PREFIX_FIELD_NUMBER: _ClassVar[int] - SUFFIX_FIELD_NUMBER: _ClassVar[int] - CONTAINS_FIELD_NUMBER: _ClassVar[int] - IN_FIELD_NUMBER: _ClassVar[int] - NOT_IN_FIELD_NUMBER: _ClassVar[int] - IP_FIELD_NUMBER: _ClassVar[int] - IPV4_FIELD_NUMBER: _ClassVar[int] - IPV6_FIELD_NUMBER: _ClassVar[int] - IGNORE_EMPTY_FIELD_NUMBER: _ClassVar[int] - const: bytes - len: int - min_len: int - max_len: int - pattern: str - prefix: bytes - suffix: bytes - contains: bytes - not_in: _containers.RepeatedScalarFieldContainer[bytes] - ip: bool - ipv4: bool - ipv6: bool - ignore_empty: bool - def __init__(self, const: _Optional[bytes] = ..., len: _Optional[int] = ..., min_len: _Optional[int] = ..., max_len: _Optional[int] = ..., pattern: _Optional[str] = ..., prefix: _Optional[bytes] = ..., suffix: _Optional[bytes] = ..., contains: _Optional[bytes] = ..., not_in: _Optional[_Iterable[bytes]] = ..., ip: bool = ..., ipv4: bool = ..., ipv6: bool = ..., ignore_empty: bool = ..., **kwargs) -> None: ... - -class EnumRules(_message.Message): - __slots__ = ("const", "defined_only", "not_in") - CONST_FIELD_NUMBER: _ClassVar[int] - DEFINED_ONLY_FIELD_NUMBER: _ClassVar[int] - IN_FIELD_NUMBER: _ClassVar[int] - NOT_IN_FIELD_NUMBER: _ClassVar[int] - const: int - defined_only: bool - not_in: _containers.RepeatedScalarFieldContainer[int] - def __init__(self, const: _Optional[int] = ..., defined_only: bool = ..., not_in: _Optional[_Iterable[int]] = ..., **kwargs) -> None: ... - -class MessageRules(_message.Message): - __slots__ = ("skip", "required") - SKIP_FIELD_NUMBER: _ClassVar[int] - REQUIRED_FIELD_NUMBER: _ClassVar[int] - skip: bool - required: bool - def __init__(self, skip: bool = ..., required: bool = ...) -> None: ... - -class RepeatedRules(_message.Message): - __slots__ = ("min_items", "max_items", "unique", "items", "ignore_empty") - MIN_ITEMS_FIELD_NUMBER: _ClassVar[int] - MAX_ITEMS_FIELD_NUMBER: _ClassVar[int] - UNIQUE_FIELD_NUMBER: _ClassVar[int] - ITEMS_FIELD_NUMBER: _ClassVar[int] - IGNORE_EMPTY_FIELD_NUMBER: _ClassVar[int] - min_items: int - max_items: int - unique: bool - items: FieldRules - ignore_empty: bool - def __init__(self, min_items: _Optional[int] = ..., max_items: _Optional[int] = ..., unique: bool = ..., items: _Optional[_Union[FieldRules, _Mapping]] = ..., ignore_empty: bool = ...) -> None: ... - -class MapRules(_message.Message): - __slots__ = ("min_pairs", "max_pairs", "no_sparse", "keys", "values", "ignore_empty") - MIN_PAIRS_FIELD_NUMBER: _ClassVar[int] - MAX_PAIRS_FIELD_NUMBER: _ClassVar[int] - NO_SPARSE_FIELD_NUMBER: _ClassVar[int] - KEYS_FIELD_NUMBER: _ClassVar[int] - VALUES_FIELD_NUMBER: _ClassVar[int] - IGNORE_EMPTY_FIELD_NUMBER: _ClassVar[int] - min_pairs: int - max_pairs: int - no_sparse: bool - keys: FieldRules - values: FieldRules - ignore_empty: bool - def __init__(self, min_pairs: _Optional[int] = ..., max_pairs: _Optional[int] = ..., no_sparse: bool = ..., keys: _Optional[_Union[FieldRules, _Mapping]] = ..., values: _Optional[_Union[FieldRules, _Mapping]] = ..., ignore_empty: bool = ...) -> None: ... - -class AnyRules(_message.Message): - __slots__ = ("required", "not_in") - REQUIRED_FIELD_NUMBER: _ClassVar[int] - IN_FIELD_NUMBER: _ClassVar[int] - NOT_IN_FIELD_NUMBER: _ClassVar[int] - required: bool - not_in: _containers.RepeatedScalarFieldContainer[str] - def __init__(self, required: bool = ..., not_in: _Optional[_Iterable[str]] = ..., **kwargs) -> None: ... - -class DurationRules(_message.Message): - __slots__ = ("required", "const", "lt", "lte", "gt", "gte", "not_in") - REQUIRED_FIELD_NUMBER: _ClassVar[int] - CONST_FIELD_NUMBER: _ClassVar[int] - LT_FIELD_NUMBER: _ClassVar[int] - LTE_FIELD_NUMBER: _ClassVar[int] - GT_FIELD_NUMBER: _ClassVar[int] - GTE_FIELD_NUMBER: _ClassVar[int] - IN_FIELD_NUMBER: _ClassVar[int] - NOT_IN_FIELD_NUMBER: _ClassVar[int] - required: bool - const: _duration_pb2.Duration - lt: _duration_pb2.Duration - lte: _duration_pb2.Duration - gt: _duration_pb2.Duration - gte: _duration_pb2.Duration - not_in: _containers.RepeatedCompositeFieldContainer[_duration_pb2.Duration] - def __init__(self, required: bool = ..., const: _Optional[_Union[_duration_pb2.Duration, _Mapping]] = ..., lt: _Optional[_Union[_duration_pb2.Duration, _Mapping]] = ..., lte: _Optional[_Union[_duration_pb2.Duration, _Mapping]] = ..., gt: _Optional[_Union[_duration_pb2.Duration, _Mapping]] = ..., gte: _Optional[_Union[_duration_pb2.Duration, _Mapping]] = ..., not_in: _Optional[_Iterable[_Union[_duration_pb2.Duration, _Mapping]]] = ..., **kwargs) -> None: ... - -class TimestampRules(_message.Message): - __slots__ = ("required", "const", "lt", "lte", "gt", "gte", "lt_now", "gt_now", "within") - REQUIRED_FIELD_NUMBER: _ClassVar[int] - CONST_FIELD_NUMBER: _ClassVar[int] - LT_FIELD_NUMBER: _ClassVar[int] - LTE_FIELD_NUMBER: _ClassVar[int] - GT_FIELD_NUMBER: _ClassVar[int] - GTE_FIELD_NUMBER: _ClassVar[int] - LT_NOW_FIELD_NUMBER: _ClassVar[int] - GT_NOW_FIELD_NUMBER: _ClassVar[int] - WITHIN_FIELD_NUMBER: _ClassVar[int] - required: bool - const: _timestamp_pb2.Timestamp - lt: _timestamp_pb2.Timestamp - lte: _timestamp_pb2.Timestamp - gt: _timestamp_pb2.Timestamp - gte: _timestamp_pb2.Timestamp - lt_now: bool - gt_now: bool - within: _duration_pb2.Duration - def __init__(self, required: bool = ..., const: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., lt: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., lte: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., gt: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., gte: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., lt_now: bool = ..., gt_now: bool = ..., within: _Optional[_Union[_duration_pb2.Duration, _Mapping]] = ...) -> None: ... diff --git a/netbox_diode_plugin/reconciler/sdk/validate/validate_pb2_grpc.py b/netbox_diode_plugin/reconciler/sdk/validate/validate_pb2_grpc.py deleted file mode 100644 index 2daafff..0000000 --- a/netbox_diode_plugin/reconciler/sdk/validate/validate_pb2_grpc.py +++ /dev/null @@ -1,4 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc - diff --git a/netbox_diode_plugin/tables.py b/netbox_diode_plugin/tables.py deleted file mode 100644 index 0d8869d..0000000 --- a/netbox_diode_plugin/tables.py +++ /dev/null @@ -1,142 +0,0 @@ -#!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc -"""Diode NetBox Plugin - Tables.""" -import datetime - -import django_tables2 as tables -import zoneinfo -from django.conf import settings -from packaging import version - -if version.parse(settings.VERSION).major >= 4: - from core.models import ObjectType as NetBoxType -else: - from django.contrib.contenttypes.models import ContentType as NetBoxType - -from netbox.tables import BaseTable, columns -from utilities.object_types import object_type_identifier, object_type_name - -from netbox_diode_plugin.reconciler.sdk.v1 import reconciler_pb2 - -INGESTION_LOGS_TABLE_ACTIONS_TEMPLATE = """ - -""" - - -class IngestionStateColumn(tables.Column): - """Renders the ingestion state as a human-readable string.""" - - def render(self, value): - """Renders the ingestion state as a human-readable string.""" - if value: - try: - state_name = reconciler_pb2.State.Name(value) - except ValueError: - state_name = reconciler_pb2.State.Name(reconciler_pb2.State.UNSPECIFIED) - return " ".join(state_name.title().split("_")) - return None - - -class TimestampColumn(columns.DateTimeColumn): - """Custom implementation of Timestamp to render an epoch timestamp as a human-readable date.""" - - def render(self, value): - """Renders an epoch timestamp as a human-readable date.""" - if value: - current_tz = zoneinfo.ZoneInfo(settings.TIME_ZONE) - value = datetime.datetime.fromtimestamp(value / 1_000_000_000).astimezone( - current_tz - ) - return f"{value.date().isoformat()} {value.time().isoformat(timespec=self.timespec)}" - return None - - -class DataTypeColumn(columns.ContentTypeColumn): - """Custom implementation of ContentTypeColumn to render a data type based on app_label and model.""" - - def render(self, value): - """Renders a data type based on app_label and model.""" - app_label, model_name = value.split(".") - object_content_type = NetBoxType.objects.get_by_natural_key( - app_label, model_name - ) - return object_type_name(object_content_type, include_app=False) - - def value(self, value): - """Returns the value.""" - return value - - -class IngestionLogsTable(BaseTable): - """Ingestion logs table.""" - - ingestion_ts = TimestampColumn( - verbose_name="Ingestion Timestamp", - accessor="ingestion_ts", - orderable=False, - ) - - state = IngestionStateColumn( - verbose_name="State", - accessor="state", - orderable=False, - ) - - object_type = DataTypeColumn( - verbose_name="Data Type", - accessor="data_type", - orderable=False, - ) - - request_id = tables.Column( - verbose_name="Request ID", - accessor="request_id", - orderable=False, - ) - - producer = tables.Column( - verbose_name="Producer", - empty_values=(), - orderable=False, - ) - - sdk = tables.Column( - verbose_name="SDK", - empty_values=(), - orderable=False, - ) - - actions = tables.TemplateColumn( - template_code=INGESTION_LOGS_TABLE_ACTIONS_TEMPLATE, - verbose_name="", - orderable=False, - ) - - class Meta: - """Meta class.""" - - attrs = { - "class": "table table-hover table-striped table-condensed", - "td": {"class": "align-middle"}, - } - fields = ( - "ingestion_ts", - "object_type", - "state", - "producer", - "sdk", - "request_id", - "actions", - ) - empty_text = "No ingestion logs to display" - footer = False - - def render_producer(self, record): - """Renders the producer.""" - return f"{record.producer_app_name}/{record.producer_app_version}" - - def render_sdk(self, record): - """Renders the SDK.""" - return f"{record.sdk_name}/{record.sdk_version}" diff --git a/netbox_diode_plugin/templates/diode/ingestion_logs.html b/netbox_diode_plugin/templates/diode/ingestion_logs.html deleted file mode 100644 index 8b076a3..0000000 --- a/netbox_diode_plugin/templates/diode/ingestion_logs.html +++ /dev/null @@ -1,141 +0,0 @@ -{% extends 'generic/_base.html' %} -{% load buttons %} -{% load helpers %} -{% load render_table from django_tables2 %} -{% load i18n %} - -{% block title %}{% trans "Ingestion Logs" %}{% endblock %} - -{% block content %} - -{% if netbox_to_diode_user_error %} - -{% elif ingestion_logs_error %} - -{% else %} -
-
-
-
-
-
- -
Queued
-
-
- -
Changes
-
-
- -
Failed
-
-
- -
No Changes
-
-
- -
Total
-
-
-
-
-
-
-
-
-
- {% render_table ingestion_logs_table 'diode/ingestion_logs_table.html' %} - {% if ingestion_logs_table.data %} - {% include 'diode/ingestion_logs_paginator.html' with next_page_token=next_page_token total_count=total_count %} - {% endif %} -
-
-
-
-
- -{% endif %} - -{% endblock content %} diff --git a/netbox_diode_plugin/templates/diode/ingestion_logs_paginator.html b/netbox_diode_plugin/templates/diode/ingestion_logs_paginator.html deleted file mode 100644 index 99d9379..0000000 --- a/netbox_diode_plugin/templates/diode/ingestion_logs_paginator.html +++ /dev/null @@ -1,17 +0,0 @@ -{% load helpers %} -{% load i18n %} - -{% if next_page_token %} -
- Total: {{ total_count }} - - {% trans "Next Page" %} - -
-{% else %} -
-{% if total_count > 0 %} - Total: {{ total_count }} -{% endif %} -
-{% endif %} diff --git a/netbox_diode_plugin/templates/diode/ingestion_logs_table.html b/netbox_diode_plugin/templates/diode/ingestion_logs_table.html deleted file mode 100644 index b6d9fdf..0000000 --- a/netbox_diode_plugin/templates/diode/ingestion_logs_table.html +++ /dev/null @@ -1,109 +0,0 @@ -{% load django_tables2 %} -{% load i18n %} -{% load diode_filters %} - - -{% if table.show_header %} - - - {% for column in table.columns %} - {% if column.orderable %} - {{ column.header }} - {% else %} - {{ column.header }} - {% endif %} - {% endfor %} - - -{% endif %} - -{% for row in table.page.object_list|default:table.rows %} - - {% for column, cell in row.items %} - {{ cell }} - {% endfor %} - - - -
-
- {% block tabs %} - - {% endblock tabs %} -
-
-
-
-
-
{{ row.record.entity|proto_to_json }}
-
-
-
-
-
-
-
-
- {% if row.record.change_set.data != "" and row.record.change_set.data|length > 0 %} -
{{ row.record.change_set|proto_to_json }}
- {% else %} -
None
- {% endif %} -
-
-
-
-
-
-
-
- {% if row.record.error.message != "" %} -
{{ row.record.error|proto_to_json }}
- {% else %} -
None
- {% endif %} -
-
-
-
-
-
-
- - -{% empty %} -{% if table.empty_text %} - - — {{ table.empty_text }} — - -{% endif %} -{% endfor %} - -{% if table.has_footer %} - - - {% for column in table.columns %} - {{ column.footer }} - {% endfor %} - - -{% endif %} - diff --git a/netbox_diode_plugin/templatetags/__init__.py b/netbox_diode_plugin/templatetags/__init__.py deleted file mode 100644 index 794a126..0000000 --- a/netbox_diode_plugin/templatetags/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc -"""Diode NetBox Plugin - Template Tags.""" diff --git a/netbox_diode_plugin/templatetags/diode_filters.py b/netbox_diode_plugin/templatetags/diode_filters.py deleted file mode 100644 index 466107f..0000000 --- a/netbox_diode_plugin/templatetags/diode_filters.py +++ /dev/null @@ -1,35 +0,0 @@ -#!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc -"""Diode NetBox Plugin - Template Tags.""" - -import json - -import brotli -from django import template -from google.protobuf.json_format import MessageToJson - -from netbox_diode_plugin.reconciler.sdk.v1 import ingester_pb2, reconciler_pb2 - -register = template.Library() - - -@register.filter("proto_to_json") -def proto_to_json(value): - """Converts a protobuf message to a JSON string.""" - indent = 4 - if isinstance(value, reconciler_pb2.IngestionError) and value.message != "": - return MessageToJson(value, indent=indent) - - if isinstance(value, ingester_pb2.Entity): - return MessageToJson(value, indent=indent) - - if isinstance(value, reconciler_pb2.ChangeSet): - try: - decompressed_data = brotli.decompress(value.data) - decompressed_string = decompressed_data.decode("utf-8") - json_data = json.loads(decompressed_string) - return json.dumps(json_data, indent=indent) - except Exception: - return None - - return None diff --git a/netbox_diode_plugin/tests/test_reconciler_sdk_client.py b/netbox_diode_plugin/tests/test_reconciler_sdk_client.py deleted file mode 100644 index 3820bac..0000000 --- a/netbox_diode_plugin/tests/test_reconciler_sdk_client.py +++ /dev/null @@ -1,331 +0,0 @@ -#!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc -"""NetBox Labs - Tests.""" -from unittest import mock - -import grpc -import pytest -from django.test import TestCase - -import netbox_diode_plugin -from netbox_diode_plugin.reconciler.sdk.client import ( - ReconcilerClient, - ReconcilerMethodClientInterceptor, - _ClientCallDetails, - _load_certs, - parse_target, -) -from netbox_diode_plugin.reconciler.sdk.exceptions import ReconcilerClientError - - -class ReconcilerSDKClientTestCase(TestCase): - """Test case for the Reconciler SDK client.""" - - def test_init(self): - """Check we can initiate a client configuration.""" - client = ReconcilerClient( - target="grpc://localhost:8081", - api_key="foobar", - ) - - plugin_config = netbox_diode_plugin.config - - assert client.target == "localhost:8081" - assert client.name == "reconciler-sdk-python" - assert client.version == "0.0.1" - assert client.app_name == plugin_config.name - assert client.app_version == plugin_config.version - assert client.tls_verify is False - assert client.path == "" - - - def test_client_error(self): - """Check we can raise a client error.""" - with pytest.raises(ReconcilerClientError) as err: - client = ReconcilerClient( - target="grpc://invalid:8089", - api_key="foobar", - ) - client.retrieve_ingestion_logs() - - assert err.value.status_code == grpc.StatusCode.UNAVAILABLE.name - assert "DNS resolution failed for invalid:8089" in err.value.details - - - def test_client_error_non_grpc_status_code(self): - """Check we can raise a client error.""" - with pytest.raises(ReconcilerClientError) as err: - rpc_error = grpc.RpcError() - rpc_error.code = lambda: "NON_GRPC_STATUS_CODE" - rpc_error.details = lambda: "Some details about the error" - raise ReconcilerClientError(rpc_error) - - assert not isinstance(err.value.status_code, grpc.StatusCode) - assert err.value.status_code == "NON_GRPC_STATUS_CODE" - assert err.value.details == "Some details about the error" - - - def test_client_error_repr_returns_correct_string(self): - """Check we can return the correct string representation of the error.""" - grpc_error = grpc.RpcError() - grpc_error.code = lambda: grpc.StatusCode.UNAVAILABLE - grpc_error.details = lambda: "Some details about the error" - error = ReconcilerClientError(grpc_error) - error._status_code = grpc.StatusCode.UNAVAILABLE - error._details = "Some details about the error" - assert ( - repr(error) - == "" - ) - - - def test_load_certs_returns_bytes(self): - """Check that _load_certs returns bytes.""" - assert isinstance(_load_certs(), bytes) - - - def test_parse_target_handles_http_prefix(self): - """Check that parse_target raises an error when the target contains http://.""" - with pytest.raises(ValueError): - parse_target("http://localhost:8081") - - - def test_parse_target_handles_https_prefix(self): - """Check that parse_target raises an error when the target contains https://.""" - with pytest.raises(ValueError): - parse_target("https://localhost:8081") - - - def test_parse_target_parses_authority_correctly(self): - """Check that parse_target parses the authority correctly.""" - authority, path, tls_verify = parse_target("grpc://localhost:8081") - assert authority == "localhost:8081" - assert path == "" - assert tls_verify is False - - - def test_parse_target_adds_default_port_if_missing(self): - """Check that parse_target adds the default port if missing.""" - authority, _, _ = parse_target("grpc://localhost") - assert authority == "localhost:443" - - - def test_parse_target_parses_path_correctly(self): - """Check that parse_target parses the path correctly.""" - _, path, _ = parse_target("grpc://localhost:8081/my/path") - assert path == "/my/path" - - - def test_parse_target_handles_no_path(self): - """Check that parse_target handles no path.""" - _, path, _ = parse_target("grpc://localhost:8081") - assert path == "" - - - def test_parse_target_parses_tls_verify_correctly(self): - """Check that parse_target parses tls_verify correctly.""" - _, _, tls_verify = parse_target("grpcs://localhost:8081") - assert tls_verify is True - - - def test_client_sets_up_secure_channel_when_grpcs_scheme_is_found_in_target(self): - """Check that ReconcilerClient.__init__() sets up the gRPC secure channel when grpcs:// scheme is found in the target.""" - with ( - mock.patch("grpc.secure_channel") as mock_secure_channel, - mock.patch("logging.Logger.debug") as mock_debug, - ): - _ = ReconcilerClient( - target="grpcs://localhost:8081", - api_key="foobar", - ) - - mock_debug.assert_called_once_with("Setting up gRPC secure channel") - mock_secure_channel.assert_called_once() - - - def test_client_sets_up_insecure_channel_when_grpc_scheme_is_found_in_target(self): - """Check that ReconcilerClient.__init__() sets up the gRPC insecure channel when grpc:// scheme is found in the target.""" - with ( - mock.patch("grpc.insecure_channel") as mock_insecure_channel, - mock.patch("logging.Logger.debug") as mock_debug, - ): - _ = ReconcilerClient( - target="grpc://localhost:8081", - api_key="foobar", - ) - - mock_debug.assert_called_with( - "Setting up gRPC insecure channel", - ) - mock_insecure_channel.assert_called_once() - - - def test_insecure_channel_options_with_primary_user_agent(self): - """Check that ReconcilerClient.__init__() sets the gRPC primary_user_agent option for insecure channel.""" - with mock.patch("grpc.insecure_channel") as mock_insecure_channel: - client = ReconcilerClient( - target="grpc://localhost:8081", - api_key="abcde", - ) - - mock_insecure_channel.assert_called_once() - _, kwargs = mock_insecure_channel.call_args - assert kwargs["options"] == ( - ( - "grpc.primary_user_agent", - f"{client.name}/{client.version} {client.app_name}/{client.app_version}", - ), - ) - - - def test_secure_channel_options_with_primary_user_agent(self): - """Check that ReconcilerClient.__init__() sets the gRPC primary_user_agent option for secure channel.""" - with mock.patch("grpc.secure_channel") as mock_secure_channel: - client = ReconcilerClient( - target="grpcs://localhost:8081", - api_key="abcde", - ) - - mock_secure_channel.assert_called_once() - _, kwargs = mock_secure_channel.call_args - assert kwargs["options"] == ( - ( - "grpc.primary_user_agent", - f"{client.name}/{client.version} {client.app_name}/{client.app_version}", - ), - ) - - def test_client_interceptor_setup_with_path(self): - """Check that ReconcilerClient.__init__() sets up the gRPC interceptor when a path is provided.""" - with ( - mock.patch("grpc.intercept_channel") as mock_intercept_channel, - mock.patch("logging.Logger.debug") as mock_debug, - ): - _ = ReconcilerClient( - target="grpcs://localhost:8081/my-path", - api_key="foobar", - ) - - mock_debug.assert_called_with( - "Setting up gRPC interceptor for path: /my-path", - ) - mock_intercept_channel.assert_called_once() - - - def test_client_interceptor_not_setup_without_path(self): - """Check that ReconcilerClient.__init__() does not set up the gRPC interceptor when no path is provided.""" - with ( - mock.patch("grpc.intercept_channel") as mock_intercept_channel, - mock.patch("logging.Logger.debug") as mock_debug, - ): - _ = ReconcilerClient( - target="grpc://localhost:8081", - api_key="foobar", - ) - - mock_debug.assert_called_with( - "Setting up gRPC insecure channel", - ) - mock_intercept_channel.assert_not_called() - - - def test_client_properties_return_expected_values(self): - """Check that ReconcilerClient properties return the expected values.""" - client = ReconcilerClient( - target="grpc://localhost:8081", - api_key="foobar", - ) - - plugin_config = netbox_diode_plugin.config - - assert client.target == "localhost:8081" - assert client.name == "reconciler-sdk-python" - assert client.version == "0.0.1" - assert client.app_name == plugin_config.name - assert client.app_version == plugin_config.version - assert client.tls_verify is False - assert client.path == "" - assert isinstance(client.channel, grpc.Channel) - - - def test_client_enter_returns_self(self): - """Check that ReconcilerClient.__enter__() returns self.""" - client = ReconcilerClient( - target="grpc://localhost:8081", - api_key="foobar", - ) - assert client.__enter__() is client - - - def test_client_exit_closes_channel(self): - """Check that ReconcilerClient.__exit__() closes the channel.""" - client = ReconcilerClient( - target="grpc://localhost:8081", - api_key="foobar", - ) - with mock.patch.object(client._channel, "close") as mock_close: - client.__exit__(None, None, None) - mock_close.assert_called_once() - - - def test_client_close_closes_channel(self): - """Check that ReconcilerClient.close() closes the channel.""" - client = ReconcilerClient( - target="grpc://localhost:8081", - api_key="foobar", - ) - with mock.patch.object(client._channel, "close") as mock_close: - client.close() - mock_close.assert_called_once() - - - def test_interceptor_init_sets_subpath(self): - """Check that ReconcilerMethodClientInterceptor.__init__() sets the subpath.""" - interceptor = ReconcilerMethodClientInterceptor("/my/path") - assert interceptor._subpath == "/my/path" - - - def test_interceptor_intercepts_unary_unary_calls(self): - """Check that the interceptor intercepts unary unary calls.""" - interceptor = ReconcilerMethodClientInterceptor("/my/path") - - def continuation(x, _): - return x.method - - client_call_details = _ClientCallDetails( - "/diode.v1.ReconcilerService/RetrieveIngestionLogs", - None, - None, - None, - None, - None, - ) - request = None - assert ( - interceptor.intercept_unary_unary(continuation, client_call_details, request) - == "/my/path/diode.v1.ReconcilerService/RetrieveIngestionLogs" - ) - - - def test_interceptor_intercepts_stream_unary_calls(self): - """Check that ReconcilerMethodClientInterceptor.intercept_stream_unary() intercepts stream unary calls.""" - interceptor = ReconcilerMethodClientInterceptor("/my/path") - - def continuation(x, _): - return x.method - - client_call_details = _ClientCallDetails( - "/diode.v1.ReconcilerService/RetrieveIngestionLogs", - None, - None, - None, - None, - None, - ) - request_iterator = None - assert ( - interceptor.intercept_stream_unary( - continuation, client_call_details, request_iterator - ) - == "/my/path/diode.v1.ReconcilerService/RetrieveIngestionLogs" - ) diff --git a/netbox_diode_plugin/tests/test_tables.py b/netbox_diode_plugin/tests/test_tables.py deleted file mode 100644 index 38dd93e..0000000 --- a/netbox_diode_plugin/tests/test_tables.py +++ /dev/null @@ -1,84 +0,0 @@ -#!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc -"""Diode NetBox Plugin - Tests.""" - -import datetime - -import zoneinfo -from django.conf import settings -from django.test import TestCase - -from netbox_diode_plugin.reconciler.sdk.v1 import reconciler_pb2 -from netbox_diode_plugin.tables import IngestionLogsTable - - -class IngestionLogsTableTestCase(TestCase): - """Test case for the IngestionLogsTable.""" - - def setUp(self): - """Set up mock data for the table.""" - self.mock_data = [ - reconciler_pb2.IngestionLog( - ingestion_ts=1638316800000000000, # Example timestamp in nanoseconds - state=reconciler_pb2.State.APPLIED, - data_type="dcim.site", - request_id="12345", - producer_app_name="TestApp", - producer_app_version="1.0.0", - sdk_name="TestSDK", - sdk_version="1.0.0", - ), - reconciler_pb2.IngestionLog(), - ] - - def test_ingestion_ts_rendering(self): - """Test rendering of the ingestion_ts column.""" - table = IngestionLogsTable(self.mock_data) - current_tz = zoneinfo.ZoneInfo(settings.TIME_ZONE) - expected_date = ( - datetime.datetime.fromtimestamp( - self.mock_data[0].ingestion_ts / 1_000_000_000 - ) - .astimezone(current_tz) - .date() - .isoformat() - ) - expected_time = ( - datetime.datetime.fromtimestamp( - self.mock_data[0].ingestion_ts / 1_000_000_000 - ) - .astimezone(current_tz) - .time() - .isoformat(timespec="seconds") - ) - self.assertEqual( - table.rows[0].get_cell("ingestion_ts"), f"{expected_date} {expected_time}" - ) - self.assertEqual(table.rows[1].get_cell("ingestion_ts"), None) - - def test_state_rendering(self): - """Test rendering of the state column.""" - table = IngestionLogsTable(self.mock_data) - self.assertEqual(table.rows[0].get_cell("state"), "Applied") - self.assertEqual(table.rows[1].get_cell("state"), None) - - def test_data_type_rendering(self): - """Test rendering of the data_type column.""" - table = IngestionLogsTable(self.mock_data) - self.assertEqual(table.rows[0].get_cell("object_type"), "Site") - self.assertEqual(table.rows[1].get_cell("object_type"), table.default) - - def test_producer_rendering(self): - """Test rendering of the producer column.""" - table = IngestionLogsTable(self.mock_data) - self.assertEqual(table.rows[0].get_cell("producer"), "TestApp/1.0.0") - - def test_sdk_rendering(self): - """Test rendering of the sdk column.""" - table = IngestionLogsTable(self.mock_data) - self.assertEqual(table.rows[0].get_cell("sdk"), "TestSDK/1.0.0") - - def test_request_id_rendering(self): - """Test rendering of the request_id column.""" - table = IngestionLogsTable(self.mock_data) - self.assertEqual(table.rows[0].get_cell("request_id"), "12345") diff --git a/netbox_diode_plugin/tests/test_templatetags.py b/netbox_diode_plugin/tests/test_templatetags.py deleted file mode 100644 index c500f4a..0000000 --- a/netbox_diode_plugin/tests/test_templatetags.py +++ /dev/null @@ -1,62 +0,0 @@ -#!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc -"""Diode NetBox Plugin - Tests.""" - -from django.test import TestCase -from google.protobuf.json_format import MessageToJson - -from netbox_diode_plugin.reconciler.sdk.v1 import ingester_pb2, reconciler_pb2 -from netbox_diode_plugin.templatetags.diode_filters import proto_to_json - - -class TestProtoToJsonTestCase(TestCase): - """Test case for the proto_to_json template filter.""" - - def test_ingestion_error_with_message(self): - """Create a mock IngestionError with a message.""" - error = reconciler_pb2.IngestionError(message="Test error") - expected_json = MessageToJson(error, indent=4) - self.assertEqual(proto_to_json(error), expected_json) - - def test_ingestion_error_without_message(self): - """Create a mock IngestionError without a message.""" - error = reconciler_pb2.IngestionError(message="") - self.assertIsNone(proto_to_json(error)) - - def test_entity(self): - """Create a mock Entity.""" - entity = ingester_pb2.Entity( - site=ingester_pb2.Site( - name="Test Site", - ) - ) - expected_json = MessageToJson(entity, indent=4) - self.assertEqual(proto_to_json(entity), expected_json) - - def test_changeset(self): - """Create a mock ChangeSet.""" - changeset = reconciler_pb2.ChangeSet( - id="ac6481de-2351-49b6-9095-75a69fe47b1f", - data=( - b'\x1b\x92\x04\x00dq-\x95\x8fnV\xc0\x9c\xdbX"\\)\xfd1N\xdd\x04\x0e,\xc0\x00\xf8\x17\xb6\x17\x96\x05' - b"4\xa6f\x0b8\x90\xa8\x0b\xc3P\xf3.z\x8e0\xd5\xb9r\xf0\x18?Y\xc4n\xb6\xe7\x82\xfa\x0b\xcb\x9f\x9f_\xf9" - b"m\xbe\xee0Ai\xc1%\xdd\x19\x8d\xf5\x1a\x1d\xd5\x80\xa4\xc8c\xf4%\xd0`\x17\xab\x1e ^\r`:\xdb\t\xeer\xec" - b"\xa9\xea\xa0MA6Dx\xc3c\xe5\xd0\xb0\xb1J\x8aUg\xed+`\xaf\x8d\x07\x13\x9cM\xe0\x81\x80\xa3\nH(\xe9\xed" - b"\xfa~\x89\xb2\x03\x02\x9an\xb20\xfd\xc2C\xb9\x0f\x9fl\xed\x80\x80\xd7\xbb\xf7\x05Lp\xf5\xfc\x9c\xbe" - b"\x95\xb7\xf7\xd746\xbc]\x7f0\xfc\xff\x0br\\\xc5-\xf8\x90\xd8a\xaa\xa6\xc2H\r\x8b\xd6\x84\xb6\x902\xb1" - b"\xda`,\nJ\xab \t;7\x80_z\x9dkn\xb9\x9c\xe0\xb1\x19\x81|\xac\x16\xabO\x16\xa6\xc3:\x1e\xcec3\xddF\xd5" - b"t\xf4-\x0c\xa4*x\xe5\xeeU\xf7\x8f\x9d\xef\xe6P@r\xeddU:\xf7\xadN\xc3\x0e\x1f\xd0\x96VA\xd7\xa44R\xd2" - b"\x8cIyK\xa5G\x1a\xad\x018\xb0\xa2\xe2\xf1\xd1\xf5\x0b\x9bk4/o3\xe4?\x03Ly\x82~Y\x80\t~\xc0\xbe\x8bCW" - b'_\x18=+\x03\xba6\xaa"1+\x8c\x81\x88\xaaM6ZB\x05\xd3k\xab\x8f\x0f\x83_\xf8\xa1\xf1\xbc\xfb\xf8x?/o\xcf' - b"\xcap\x062oM\xdf\x8a\x80RtQ\x03U\x00I\xcc\xfb\xcf\xda\xc8\nx\xe5\x97\xeb\xf2\x8d\xdb\xb7wv\x9d\x0f1" - b"\x91\xd2\xc6\xb6\xf2\xc5?" - ), - ) - - decompress = proto_to_json(changeset) - self.assertIsNotNone(decompress) - self.assertIn("ac6481de-2351-49b6-9095-75a69fe47b1f", decompress) - - def test_invalid_type(self): - """Test an invalid type.""" - self.assertIsNone(proto_to_json("invalid")) diff --git a/netbox_diode_plugin/tests/test_views.py b/netbox_diode_plugin/tests/test_views.py index 5c28ce2..aaf54ab 100644 --- a/netbox_diode_plugin/tests/test_views.py +++ b/netbox_diode_plugin/tests/test_views.py @@ -8,204 +8,17 @@ from django.contrib.messages.middleware import MessageMiddleware from django.contrib.messages.storage.fallback import FallbackStorage from django.contrib.sessions.middleware import SessionMiddleware -from django.core.cache import cache from django.test import RequestFactory, TestCase from django.urls import reverse from rest_framework import status from users.models import Token from netbox_diode_plugin.models import Setting -from netbox_diode_plugin.reconciler.sdk.v1 import ingester_pb2, reconciler_pb2 -from netbox_diode_plugin.views import IngestionLogsView, SettingsEditView, SettingsView, SetupView +from netbox_diode_plugin.views import SettingsEditView, SettingsView, SetupView User = get_user_model() -class IngestionLogsViewTestCase(TestCase): - """Test case for the IngestionLogsView.""" - - def setUp(self): - """Setup the test case.""" - self.path = reverse("plugins:netbox_diode_plugin:ingestion_logs") - self.request = RequestFactory().get(self.path) - self.view = IngestionLogsView() - self.view.setup(self.request) - cache.delete("ingestion_metrics") - - def test_returns_200_for_authenticated(self): - """Test that the view returns 200 for an authenticated user.""" - self.request.user = User.objects.create_user("foo", password="pass") - self.request.user.is_staff = True - - response = self.view.get(self.request) - self.assertEqual(response.status_code, status.HTTP_200_OK) - - def test_redirects_to_login_page_for_unauthenticated_user(self): - """Test that the view returns 200 for an authenticated user.""" - self.request.user = AnonymousUser() - self.view.setup(self.request) - - response = IngestionLogsView.as_view()(self.request) - - self.assertEqual(response.status_code, status.HTTP_302_FOUND) - self.assertEqual(response.url, f"/netbox/login/?next={self.path}") - - def test_ingestion_logs_failed_to_retrieve(self): - """Test that the ingestion logs failed to retrieve throw an error.""" - self.request.user = User.objects.create_user("foo", password="pass") - self.request.user.is_staff = True - - response = self.view.get(self.request) - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertIn( - "UNAVAILABLE: failed to connect to all addresses;", str(response.content) - ) - - def test_ingestion_logs_retrieve_logs(self): - """Test that the retrieved ingestion logs are rendered.""" - self.request.user = User.objects.create_user("foo", password="pass") - self.request.user.is_staff = True - - with mock.patch( - "netbox_diode_plugin.reconciler.sdk.client.ReconcilerClient.retrieve_ingestion_logs" - ) as mock_retrieve_ingestion_logs: - mock_retrieve_ingestion_logs.side_effect = ( - reconciler_pb2.RetrieveIngestionLogsResponse( - logs=[ - reconciler_pb2.IngestionLog( - data_type="dcim.site", - state=reconciler_pb2.State.APPLIED, - request_id="c6ecd1ea-b23b-4f98-8593-d01d5a0da012", - ingestion_ts=1725617988, - producer_app_name="diode-test-app", - producer_app_version="0.1.0", - sdk_name="diode-sdk-python", - sdk_version="0.1.0", - entity=ingester_pb2.Entity( - site=ingester_pb2.Site( - name="Test Site", - ), - ), - ) - ], - next_page_token="AAAAMg==", - metrics=reconciler_pb2.IngestionMetrics( - total=1, - ), - ), - reconciler_pb2.RetrieveIngestionLogsResponse( - metrics=reconciler_pb2.IngestionMetrics( - total=1, - ), - ), - ) - - response = self.view.get(self.request) - mock_retrieve_ingestion_logs.assert_called() - self.assertEqual(mock_retrieve_ingestion_logs.call_count, 2) - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertNotIn("Server Error", str(response.content)) - - def test_cached_metrics(self): - """Test that the cached metrics are used.""" - self.request.user = User.objects.create_user("foo", password="pass") - self.request.user.is_staff = True - - with mock.patch( - "netbox_diode_plugin.reconciler.sdk.client.ReconcilerClient.retrieve_ingestion_logs" - ) as mock_retrieve_ingestion_logs: - mock_retrieve_ingestion_logs.side_effect = ( - reconciler_pb2.RetrieveIngestionLogsResponse( - logs=[ - reconciler_pb2.IngestionLog( - data_type="dcim.site", - state=reconciler_pb2.State.APPLIED, - request_id="c6ecd1ea-b23b-4f98-8593-d01d5a0da012", - ingestion_ts=1725617988, - producer_app_name="diode-test-app", - producer_app_version="0.1.0", - sdk_name="diode-sdk-python", - sdk_version="0.1.0", - entity=ingester_pb2.Entity( - site=ingester_pb2.Site( - name="Test Site", - ), - ), - ) - ], - next_page_token="AAAAMg==", - metrics=reconciler_pb2.IngestionMetrics( - total=1, - ), - ), - ) - - # Set up the cache - cache.set( - "ingestion_metrics", - { - "queued": 10, - "applied": 20, - "failed": 5, - "no_changes": 65, - "total": 1, - }, - timeout=300, - ) - - response = self.view.get(self.request) - mock_retrieve_ingestion_logs.assert_called() - self.assertEqual(mock_retrieve_ingestion_logs.call_count, 1) - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertNotIn("Server Error", str(response.content)) - - def test_redirects_to_setup_view_on_missing_diode_user(self): - """Test that we redirect to plugin setup view if the Diode user is missing.""" - self.request.user = User.objects.create_user("foo", password="pass") - self.request.user.is_staff = True - - with ( - mock.patch( - "netbox_diode_plugin.views.get_diode_username_for_user_type" - ) as mock_get_diode_username_for_user_type, - mock.patch( - "netbox_diode_plugin.views.get_user_model" - ) as mock_get_user_model, - ): - mock_get_diode_username_for_user_type.return_value = ( - "fake-netbox-to-diode" - ) - mock_get_user_model.return_value.objects.get.side_effect = User.DoesNotExist - - response = self.view.get(self.request) - - self.assertEqual(response.status_code, status.HTTP_302_FOUND) - self.assertEqual(response.url, reverse("plugins:netbox_diode_plugin:setup")) - - def test_redirects_to_setup_view_on_missing_diode_user_token(self): - """Test that we redirect to plugin setup view if the Diode user token is missing.""" - self.request.user = User.objects.create_user("foo", password="pass") - self.request.user.is_staff = True - - with ( - mock.patch( - "netbox_diode_plugin.views.get_diode_username_for_user_type" - ) as mock_get_diode_username_for_user_type, - mock.patch( - "netbox_diode_plugin.views.Token.objects.filter" - ) as mock_token_objects_filter, - ): - mock_get_diode_username_for_user_type.return_value = ( - "netbox-to-diode" - ) - mock_token_objects_filter.return_value.exists.return_value = False - - response = self.view.get(self.request) - - self.assertEqual(response.status_code, status.HTTP_302_FOUND) - self.assertEqual(response.url, reverse("plugins:netbox_diode_plugin:setup")) - - class SettingsViewTestCase(TestCase): """Test case for the SettingsView.""" diff --git a/netbox_diode_plugin/urls.py b/netbox_diode_plugin/urls.py index 881ffd4..613c231 100644 --- a/netbox_diode_plugin/urls.py +++ b/netbox_diode_plugin/urls.py @@ -7,7 +7,6 @@ from . import views urlpatterns = ( - path("ingestion-logs/", views.IngestionLogsView.as_view(), name="ingestion_logs"), path("setup/", views.SetupView.as_view(), name="setup"), path("settings/", views.SettingsView.as_view(), name="settings"), path("settings/edit/", views.SettingsEditView.as_view(), name="settings_edit"), diff --git a/netbox_diode_plugin/views.py b/netbox_diode_plugin/views.py index 75f884b..99403d6 100644 --- a/netbox_diode_plugin/views.py +++ b/netbox_diode_plugin/views.py @@ -5,8 +5,7 @@ from django.conf import settings as netbox_settings from django.contrib import messages -from django.contrib.auth import get_user, get_user_model -from django.core.cache import cache +from django.contrib.auth import get_user_model from django.http import HttpResponseRedirect from django.shortcuts import redirect, render from django.utils.http import url_has_allowed_host_and_scheme @@ -23,9 +22,6 @@ get_diode_username_for_user_type, get_diode_usernames, ) -from netbox_diode_plugin.reconciler.sdk.client import ReconcilerClient -from netbox_diode_plugin.reconciler.sdk.exceptions import ReconcilerClientError -from netbox_diode_plugin.tables import IngestionLogsTable User = get_user_model() @@ -41,90 +37,6 @@ def redirect_to_login(request): return HttpResponseRedirect(redirect_url) -class IngestionLogsView(View): - """Ingestion logs view.""" - - INGESTION_METRICS_CACHE_KEY = "ingestion_metrics" - - def get(self, request): - """Render ingestion logs template.""" - if not request.user.is_authenticated or not request.user.is_staff: - return redirect_to_login(request) - - netbox_to_diode_username = get_diode_username_for_user_type("netbox_to_diode") - try: - user = get_user_model().objects.get(username=netbox_to_diode_username) - except User.DoesNotExist: - return redirect("plugins:netbox_diode_plugin:setup") - - if not Token.objects.filter(user=user).exists(): - return redirect("plugins:netbox_diode_plugin:setup") - - token = Token.objects.get(user=user) - - settings = Setting.objects.get() - - diode_target_override = get_plugin_config( - "netbox_diode_plugin", "diode_target_override" - ) - diode_target = diode_target_override or settings.diode_target - - reconciler_client = ReconcilerClient( - target=diode_target, - api_key=token.key, - ) - - page_size = 50 - - try: - ingestion_logs_filters = { - "page_size": page_size, - } - request_page_token = request.GET.get("page_token") - if request_page_token is not None: - ingestion_logs_filters["page_token"] = request_page_token - - resp = reconciler_client.retrieve_ingestion_logs(**ingestion_logs_filters) - table = IngestionLogsTable(resp.logs) - - cached_ingestion_metrics = cache.get(self.INGESTION_METRICS_CACHE_KEY) - if ( - cached_ingestion_metrics is not None - and cached_ingestion_metrics["total"] == resp.metrics.total - ): - metrics = cached_ingestion_metrics - else: - ingestion_metrics = reconciler_client.retrieve_ingestion_logs( - only_metrics=True - ) - metrics = { - "queued": ingestion_metrics.metrics.queued or 0, - "reconciled": ingestion_metrics.metrics.reconciled or 0, - "failed": ingestion_metrics.metrics.failed or 0, - "no_changes": ingestion_metrics.metrics.no_changes or 0, - "total": ingestion_metrics.metrics.total or 0, - } - cache.set( - self.INGESTION_METRICS_CACHE_KEY, - metrics, - timeout=300, - ) - - context = { - "next_page_token": resp.next_page_token, - "ingestion_logs_table": table, - "total_count": resp.metrics.total, - "ingestion_metrics": metrics, - } - - except ReconcilerClientError as error: - context = { - "ingestion_logs_error": error, - } - - return render(request, "diode/ingestion_logs.html", context) - - class SettingsView(View): """Settings view.""" From 9d6dec4f050c6635caf4ec9a939447c7fc8b7d56 Mon Sep 17 00:00:00 2001 From: Michal Fiedorowicz Date: Fri, 14 Feb 2025 15:21:00 +0100 Subject: [PATCH 14/52] fix: retrieve object states using concrete models (#60) * fix: retrieve object states using concrete models Search backend/CachedValue is global and doesn't seem to be reliable with branching Signed-off-by: Michal Fiedorowicz * tidy up Signed-off-by: Michal Fiedorowicz * chore: bump netbox min version and netbox-docker Signed-off-by: Michal Fiedorowicz * chore: bump netbox min version Signed-off-by: Michal Fiedorowicz * fix tests Signed-off-by: Michal Fiedorowicz * use clean_fields() and tidy up Signed-off-by: Michal Fiedorowicz * fix: add backwards compatible support for mac_address field (#62) * fix: add backwards compatible support for mac_address field * feat: extract and serialize site from scope Signed-off-by: Michal Fiedorowicz * feat: apply change set - handle scope site Signed-off-by: Michal Fiedorowicz --------- Signed-off-by: Michal Fiedorowicz Co-authored-by: Luke Tucker <64618+ltucker@users.noreply.github.com> --- docker/Dockerfile-diode-netbox-plugin | 2 +- docker/docker-compose.yaml | 2 +- netbox-plugin.yaml | 3 + netbox_diode_plugin/__init__.py | 2 +- netbox_diode_plugin/api/serializers.py | 34 ++- netbox_diode_plugin/api/views.py | 268 +++++++++++++----- .../tests/test_api_apply_change_set.py | 176 +++++++++++- .../tests/test_api_object_state.py | 6 +- 8 files changed, 405 insertions(+), 88 deletions(-) diff --git a/docker/Dockerfile-diode-netbox-plugin b/docker/Dockerfile-diode-netbox-plugin index ec3f9c6..24a73fd 100644 --- a/docker/Dockerfile-diode-netbox-plugin +++ b/docker/Dockerfile-diode-netbox-plugin @@ -1,4 +1,4 @@ -FROM netboxcommunity/netbox:v4.1.11-3.0.2 +FROM netboxcommunity/netbox:v4.2.3-3.1.1 COPY ./netbox/configuration/ /etc/netbox/config/ RUN chmod 755 /etc/netbox/config/* && \ diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index c092668..c1112ab 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -1,7 +1,7 @@ name: diode-netbox-plugin services: netbox: &netbox - image: netboxcommunity/netbox:v4.1.11-3.0.2-diode-netbox-plugin + image: netboxcommunity/netbox:v4.2.3-3.1.1-diode-netbox-plugin build: context: . dockerfile: Dockerfile-diode-netbox-plugin diff --git a/netbox-plugin.yaml b/netbox-plugin.yaml index 3046a2a..916c77d 100644 --- a/netbox-plugin.yaml +++ b/netbox-plugin.yaml @@ -1,6 +1,9 @@ version: 0.1 package_name: netboxlabs-diode-netbox-plugin compatibility: + - release: 0.7.0 + netbox_min: 4.2.3 + netbox_max: 4.2.3 - release: 0.6.0 netbox_min: 4.1.0 netbox_max: 4.1.3 diff --git a/netbox_diode_plugin/__init__.py b/netbox_diode_plugin/__init__.py index fa1d860..c5da907 100644 --- a/netbox_diode_plugin/__init__.py +++ b/netbox_diode_plugin/__init__.py @@ -15,7 +15,7 @@ class NetBoxDiodePluginConfig(PluginConfig): description = "Diode plugin for NetBox." version = version_semver() base_url = "diode" - min_version = "3.7.2" + min_version = "4.2.3" default_settings = { # Auto-provision users for Diode plugin "auto_provision_users": False, diff --git a/netbox_diode_plugin/api/serializers.py b/netbox_diode_plugin/api/serializers.py index df0c5fb..838f8d3 100644 --- a/netbox_diode_plugin/api/serializers.py +++ b/netbox_diode_plugin/api/serializers.py @@ -275,14 +275,27 @@ class Meta: class DiodePrefixSerializer(PrefixSerializer): """Diode Prefix Serializer.""" - site = DiodeSiteSerializer() status = serializers.CharField() + site = serializers.SerializerMethodField(read_only=True) class Meta: """Meta class.""" model = PrefixSerializer.Meta.model - fields = PrefixSerializer.Meta.fields + fields = PrefixSerializer.Meta.fields + ["site"] + + def get_site(self, obj): + """Get the site from the instance scope.""" + if obj.scope is None: + return None + + scope_model_meta = obj.scope_type.model_class()._meta + if scope_model_meta.app_label == "dcim" and scope_model_meta.model_name == "site": + serializer = get_serializer_for_model(obj.scope) + context = {'request': self.context['request']} + return serializer(obj.scope, nested=True, context=context).data + + return None class DiodeClusterGroupSerializer(ClusterGroupSerializer): @@ -311,13 +324,26 @@ class DiodeClusterSerializer(ClusterSerializer): type = DiodeClusterTypeSerializer() group = DiodeClusterGroupSerializer() status = serializers.CharField() - site = DiodeSiteSerializer() + site = serializers.SerializerMethodField(read_only=True) class Meta: """Meta class.""" model = ClusterSerializer.Meta.model - fields = ClusterSerializer.Meta.fields + fields = ClusterSerializer.Meta.fields + ["site"] + + def get_site(self, obj): + """Get the site from the instance scope.""" + if obj.scope is None: + return None + + scope_model_meta = obj.scope_type.model_class()._meta + if scope_model_meta.app_label == "dcim" and scope_model_meta.model_name == "site": + serializer = get_serializer_for_model(obj.scope) + context = {'request': self.context['request']} + return serializer(obj.scope, nested=True, context=context).data + + return None class DiodeVirtualMachineSerializer(VirtualMachineSerializer): diff --git a/netbox_diode_plugin/api/views.py b/netbox_diode_plugin/api/views.py index 1768e5a..d2fdd15 100644 --- a/netbox_diode_plugin/api/views.py +++ b/netbox_diode_plugin/api/views.py @@ -1,9 +1,9 @@ #!/usr/bin/env python # Copyright 2024 NetBox Labs Inc """Diode NetBox Plugin - API Views.""" - from typing import Any, Dict, Optional +from django.apps import apps from django.conf import settings from packaging import version @@ -11,11 +11,11 @@ from core.models import ObjectType as NetBoxType else: from django.contrib.contenttypes.models import ContentType as NetBoxType + from django.core.exceptions import FieldError -from django.db import transaction +from django.core.exceptions import ValidationError as DjangoValidationError +from django.db import models, transaction from django.db.models import Q -from extras.models import CachedValue -from netbox.search import LookupTypes from rest_framework import status, views from rest_framework.exceptions import ValidationError from rest_framework.permissions import IsAuthenticated @@ -23,10 +23,87 @@ from utilities.api import get_serializer_for_model from netbox_diode_plugin.api.permissions import IsDiodeReader, IsDiodeWriter -from netbox_diode_plugin.api.serializers import ( - ApplyChangeSetRequestSerializer, - ObjectStateSerializer, -) +from netbox_diode_plugin.api.serializers import ApplyChangeSetRequestSerializer, ObjectStateSerializer + + +def dynamic_import(name): + """Dynamically import a class from an absolute path string.""" + components = name.split(".") + mod = __import__(components[0]) + for comp in components[1:]: + mod = getattr(mod, comp) + return mod + + +def _get_index_class_fields(object_type): + """ + Given an object type name (e.g., 'dcim.site'), dynamically find and return the corresponding Index class fields. + + :param object_type: Object type name in the format 'app_label.model_name' + :return: The corresponding model and its Index class (e.g., SiteIndex) field names or None. + """ + try: + # Extract app_label and model_name from 'dcim.site' + app_label, model_name = object_type.split('.') + + # Get the model class dynamically + model = apps.get_model(app_label, model_name) + + # Import the module where index classes are defined (adjust if needed) + index_module = dynamic_import(f"{app_label}.search.{model.__name__}Index") + + # Retrieve the index class fields tuple + fields = getattr(index_module, "fields", None) + + # Extract the field names list from the tuple + field_names = [field[0] for field in fields] + + return model, field_names + + except (LookupError, ModuleNotFoundError, AttributeError, ValueError): + return None, None + +def _validate_model_instance_fields(instance, fields, value): + """ + Validate the model instance fields against the value. + + :param instance: The model instance. + :param fields: The fields of the model instance. + :param value: The value to validate against the model instance fields. + :return: fields list passed validation + """ + errors = {} + + # Set provided values to the instance fields + for field in fields: + if hasattr(instance, field): + # get the field type + field_cls = instance._meta.get_field(field).__class__ + + field_value = _convert_field_value(field_cls, value) + setattr(instance, field, field_value) + + # Attempt to validate the instance + try: + instance.clean_fields() + except DjangoValidationError as e: + errors = e.message_dict + return errors + +def _convert_field_value(field_cls, value): + """Return the converted field value based on the field type.""" + if value is None: + return value + + try: + if issubclass(field_cls, (models.FloatField, models.DecimalField)): + return float(value) + if issubclass(field_cls, models.IntegerField): + return int(value) + except (ValueError, TypeError): + pass + + return value class ObjectStateView(views.APIView): @@ -60,49 +137,39 @@ def _get_lookups(self, object_type_model: str) -> tuple: return ("site",) return () - def get(self, request, *args, **kwargs): - """ - Return a JSON with object_type, object_change_id, and object. - - Search for objects according to object type. - If the obj_type parameter is not in the parameters, raise a ValidationError. - When object ID is provided in the request, search using it in the model specified by object type. - If ID is not provided, use the q parameter for searching. - Lookup is iexact - """ - object_type = self.request.query_params.get("object_type", None) + def _search_queryset(self, request): + """Search for objects according to object type using search index classes.""" + object_type = request.GET.get("object_type", None) + object_id = request.GET.get("id", None) + query = request.GET.get("q", None) if not object_type: raise ValidationError("object_type parameter is required") - app_label, model_name = object_type.split(".") - object_content_type = NetBoxType.objects.get_by_natural_key( - app_label, model_name - ) - object_type_model = object_content_type.model_class() + if not object_id and not query: + raise ValidationError("id or q parameter is required") - object_id = self.request.query_params.get("id", None) + model, fields = _get_index_class_fields(object_type) if object_id: - queryset = object_type_model.objects.filter(id=object_id) + queryset = model.objects.filter(id=object_id) else: - lookup = LookupTypes.EXACT - search_value = self.request.query_params.get("q", None) - if not search_value: - raise ValidationError("id or q parameter is required") + q = Q() - query_filter = Q(**{f"value__{lookup}": search_value}) - query_filter &= Q(object_type__in=[object_content_type]) + invalid_fields = _validate_model_instance_fields(model(), fields, query) - object_id_in_cached_value = CachedValue.objects.filter( - query_filter - ).values_list("object_id", flat=True) + fields = [field for field in fields if field not in invalid_fields] - queryset = object_type_model.objects.filter( - id__in=object_id_in_cached_value - ) + for field in fields: + q |= Q(**{f"{field}__exact": query}) # Exact match - lookups = self._get_lookups(str(object_type_model).lower()) + try: + queryset = model.objects.filter(q) + except DjangoValidationError: + queryset = model.objects.none() + pass + + lookups = self._get_lookups(str(model).lower()) if lookups: queryset = queryset.prefetch_related(*lookups) @@ -112,16 +179,32 @@ def get(self, request, *args, **kwargs): ) if additional_attributes_query_filter: - try: - queryset = queryset.filter(**additional_attributes_query_filter) - except (FieldError, ValueError): - return Response( - {"errors": ["invalid additional attributes provided"]}, - status=status.HTTP_400_BAD_REQUEST, - ) + queryset = queryset.filter(**additional_attributes_query_filter) + + return queryset + + def get(self, request, *args, **kwargs): + """ + Return a JSON with object_type, object_change_id, and object. + + Search for objects according to object type. + If the obj_type parameter is not in the parameters, raise a ValidationError. + When object ID is provided in the request, search using it in the model specified by object type. + If ID is not provided, use the q parameter for searching. + Lookup is iexact + """ + try: + queryset = self._search_queryset(request) + except (FieldError, ValueError): + return Response( + {"errors": ["invalid additional attributes provided"]}, + status=status.HTTP_400_BAD_REQUEST, + ) self.check_object_permissions(request, queryset) + object_type = request.GET.get("object_type", None) + serializer = ObjectStateSerializer( queryset, many=True, @@ -285,17 +368,6 @@ def _get_error_response(change_set_id, error): status=status.HTTP_400_BAD_REQUEST, ) - def _ipaddress_assigned_object(self, change_set: list) -> list: - """Retrieve the IP address assigned object from the change set.""" - ipaddress_assigned_object = [ - change.get("data").get("assigned_object", None) - for change in change_set - if change.get("object_type") == "ipam.ipaddress" - and change.get("data", {}).get("assigned_object", None) - ] - - return ipaddress_assigned_object - def _retrieve_assigned_object_interface_device_lookup_args( self, device: dict ) -> dict: @@ -338,17 +410,17 @@ def _retrieve_assigned_object_interface_device_lookup_args( ) return args - def _handle_ipaddress_assigned_object( - self, object_data: dict, ipaddress_assigned_object: list - ) -> Optional[Dict[str, Any]]: + def _handle_ipaddress_assigned_object(self, object_data: dict) -> Optional[Dict[str, Any]]: """Handle IPAM IP address assigned object.""" - if any(ipaddress_assigned_object): - assigned_object_keys = list(ipaddress_assigned_object[0].keys()) + ipaddress_assigned_object = object_data.get("assigned_object", None) + + if ipaddress_assigned_object is not None: + assigned_object_keys = list(ipaddress_assigned_object.keys()) model_name = assigned_object_keys[0] assigned_object_type = self._get_assigned_object_type(model_name) assigned_object_model = self._get_object_type_model(assigned_object_type) assigned_object_properties_dict = dict( - ipaddress_assigned_object[0][model_name].items() + ipaddress_assigned_object[model_name].items() ) if len(assigned_object_properties_dict) == 0: @@ -381,7 +453,7 @@ def _handle_ipaddress_assigned_object( ) except assigned_object_model.DoesNotExist: return { - "assigned_object": f"Assigned object with name {ipaddress_assigned_object[0][model_name]} does not exist" + "assigned_object": f"Assigned object with name {ipaddress_assigned_object[model_name]} does not exist" } object_data.pop("assigned_object") @@ -389,6 +461,57 @@ def _handle_ipaddress_assigned_object( object_data["assigned_object_id"] = assigned_object_instance.id return None + def _handle_interface_mac_address_compat(self, instance, object_type: str, object_data: dict) -> Optional[Dict[str, Any]]: + """Handle interface mac address backward compatibility.""" + # TODO(ltucker): deprecate. + if object_type != "dcim.interface" and object_type != "virtualization.vminterface": + return None + + if object_data.get("mac_address"): + mac_address_value = object_data.pop("mac_address") + mac_address_instance, _ = instance.mac_addresses.get_or_create( + mac_address=mac_address_value, + ) + instance.primary_mac_address = mac_address_instance + instance.save() + return None + + def _handle_scope(self, object_data: dict) -> Optional[Dict[str, Any]]: + """Handle scope object.""" + if object_data.get("site"): + site = object_data.pop("site") + object_data["scope_type"] = "dcim.site" + scope_type_model = self._get_object_type_model("dcim.site") + site_id = site.get("id", None) + if site_id is None: + try: + site = scope_type_model.objects.get( + name=site.get("name") + ) + site_id = site.id + except scope_type_model.DoesNotExist: + return {"site": f"site with name {site.get('name')} does not exist"} + + object_data["scope_id"] = site_id + + return None + + def _transform_object_data(self, object_type: str, object_data: dict) -> Optional[Dict[str, Any]]: + """Transform object data.""" + errors = None + + match object_type: + case "ipam.ipaddress": + errors = self._handle_ipaddress_assigned_object(object_data) + case "ipam.prefix": + errors = self._handle_scope(object_data) + case "virtualization.cluster": + errors = self._handle_scope(object_data) + case _: + pass + + return errors + def post(self, request, *args, **kwargs): """ Create a new change set and apply it to the current state. @@ -411,8 +534,6 @@ def post(self, request, *args, **kwargs): change_set = request_serializer.data.get("change_set", None) - ipaddress_assigned_object = self._ipaddress_assigned_object(change_set) - try: with transaction.atomic(): for change in change_set: @@ -422,14 +543,7 @@ def post(self, request, *args, **kwargs): object_data = change.get("data", None) object_id = change.get("object_id", None) - errors = None - if ( - any(ipaddress_assigned_object) - and object_type == "ipam.ipaddress" - ): - errors = self._handle_ipaddress_assigned_object( - object_data, ipaddress_assigned_object - ) + errors = self._transform_object_data(object_type, object_data) if errors is not None: serializer_errors.append({"change_id": change_id, **errors}) @@ -450,6 +564,12 @@ def post(self, request, *args, **kwargs): serializer_errors.append( {"change_id": change_id, **errors_dict} ) + continue + + errors = self._handle_interface_mac_address_compat(serializer.instance, object_type, object_data) + if errors is not None: + serializer_errors.append({"change_id": change_id, **errors}) + continue if len(serializer_errors) > 0: raise ApplyChangeSetException except ApplyChangeSetException: diff --git a/netbox_diode_plugin/tests/test_api_apply_change_set.py b/netbox_diode_plugin/tests/test_api_apply_change_set.py index c8e1232..6bef32d 100644 --- a/netbox_diode_plugin/tests/test_api_apply_change_set.py +++ b/netbox_diode_plugin/tests/test_api_apply_change_set.py @@ -14,12 +14,17 @@ Site, ) from django.contrib.auth import get_user_model -from ipam.models import ASN, RIR, IPAddress +from ipam.models import ASN, RIR, IPAddress, Prefix from netaddr import IPNetwork from rest_framework import status from users.models import Token from utilities.testing import APITestCase -from virtualization.models import Cluster, ClusterType +from virtualization.models import ( + Cluster, + ClusterType, + VirtualMachine, + VMInterface, +) User = get_user_model() @@ -145,6 +150,12 @@ def setUp(self): ) IPAddress.objects.bulk_create(self.ip_addresses) + self.virtual_machines = ( + VirtualMachine(name="Virtual Machine 1"), + VirtualMachine(name="Virtual Machine 2"), + ) + VirtualMachine.objects.bulk_create(self.virtual_machines) + self.url = "/netbox/api/plugins/diode/apply-change-set/" def send_request(self, payload, status_code=status.HTTP_200_OK): @@ -982,3 +993,164 @@ def test_add_primary_ip_address_to_device(self): self.assertEqual(response.json().get("result"), "success") self.assertEqual(device_updated.name, self.devices[0].name) self.assertEqual(device_updated.primary_ip4, self.ip_addresses[0]) + + def test_create_and_update_interface_with_compat_mac_address_field(self): + """Test create interface using backward compatible mac_address field.""" + payload = { + "change_set_id": str(uuid.uuid4()), + "change_set": [ + { + "change_id": str(uuid.uuid4()), + "change_type": "create", + "object_version": None, + "object_type": "dcim.interface", + "object_id": None, + "data": { + "name": "Interface 6", + "type": "virtual", + "mac_address": "00:00:00:00:00:01", + "device": { + "id": self.devices[1].pk, + }, + }, + }, + ], + } + + response = self.send_request(payload) + self.assertEqual(response.json().get("result"), "success") + self.assertEqual(Interface.objects.count(), 6) + interface_id = Interface.objects.order_by('-id').first().id + self.assertEqual(Interface.objects.get(id=interface_id).mac_address, "00:00:00:00:00:01") + + payload = { + "change_set_id": str(uuid.uuid4()), + "change_set": [ + { + "change_id": str(uuid.uuid4()), + "change_type": "update", + "object_version": None, + "object_type": "dcim.interface", + "object_id": interface_id, + "data": { + "name": "Interface 6", + "mac_address": "00:00:00:00:00:02", + "type": "virtual", + "device": { + "id": self.devices[1].pk, + }, + }, + }, + ], + } + response = self.send_request(payload) + self.assertEqual(response.json().get("result"), "success") + self.assertEqual(response.json().get("result"), "success") + self.assertEqual(Interface.objects.count(), 6) + self.assertEqual(Interface.objects.get(id=interface_id).mac_address, "00:00:00:00:00:02") + + def test_create_and_update_vminterface_with_compat_mac_address_field(self): + """Test create vminterface using backward compatible mac_address field.""" + payload = { + "change_set_id": str(uuid.uuid4()), + "change_set": [ + { + "change_id": str(uuid.uuid4()), + "change_type": "create", + "object_version": None, + "object_type": "virtualization.vminterface", + "object_id": None, + "data": { + "name": "VM Interface 1", + "mac_address": "00:00:00:00:00:01", + "virtual_machine": { + "id": self.virtual_machines[0].pk, + }, + }, + }, + ], + } + + response = self.send_request(payload) + self.assertEqual(response.json().get("result"), "success") + self.assertEqual(VMInterface.objects.count(), 1) + interface_id = VMInterface.objects.order_by('-id').first().id + self.assertEqual(VMInterface.objects.get(id=interface_id).mac_address, "00:00:00:00:00:01") + + payload = { + "change_set_id": str(uuid.uuid4()), + "change_set": [ + { + "change_id": str(uuid.uuid4()), + "change_type": "update", + "object_version": None, + "object_type": "virtualization.vminterface", + "object_id": interface_id, + "data": { + "name": "VM Interface 1", + "mac_address": "00:00:00:00:00:02", + "virtual_machine": { + "id": self.virtual_machines[0].pk, + }, + }, + }, + ], + } + response = self.send_request(payload) + self.assertEqual(response.json().get("result"), "success") + self.assertEqual(VMInterface.objects.count(), 1) + self.assertEqual(VMInterface.objects.get(id=interface_id).mac_address, "00:00:00:00:00:02") + + def test_create_prefix_with_site_stored_as_scope(self): + """Test create prefix with site stored as scope.""" + payload = { + "change_set_id": str(uuid.uuid4()), + "change_set": [ + { + "change_id": str(uuid.uuid4()), + "change_type": "create", + "object_version": None, + "object_type": "ipam.prefix", + "object_id": None, + "data": { + "prefix": "192.168.0.0/24", + "site": { + "name": self.sites[0].name, + }, + }, + }, + ], + } + response = self.send_request(payload) + + self.assertEqual(response.json().get("result"), "success") + self.assertEqual(Prefix.objects.get(prefix="192.168.0.0/24").scope, self.sites[0]) + + def test_create_prefix_with_unknown_site_fails(self): + """Test create prefix with unknown site fails.""" + payload = { + "change_set_id": str(uuid.uuid4()), + "change_set": [ + { + "change_id": str(uuid.uuid4()), + "change_type": "create", + "object_version": None, + "object_type": "ipam.prefix", + "object_id": None, + "data": { + "prefix": "192.168.0.0/24", + "site": { + "name": "unknown site" + }, + }, + }, + ], + } + response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) + + self.assertEqual(response.json().get("result"), "failed") + self.assertIn( + 'site with name unknown site does not exist', + response.json().get("errors")[0].get("site"), + ) + self.assertFalse(Prefix.objects.filter(prefix="192.168.0.0/24").exists()) diff --git a/netbox_diode_plugin/tests/test_api_object_state.py b/netbox_diode_plugin/tests/test_api_object_state.py index 7031549..d13ef35 100644 --- a/netbox_diode_plugin/tests/test_api_object_state.py +++ b/netbox_diode_plugin/tests/test_api_object_state.py @@ -12,7 +12,6 @@ Site, ) from django.contrib.auth import get_user_model -from django.core.management import call_command from ipam.models import IPAddress from netaddr import IPNetwork from rest_framework import status @@ -154,9 +153,6 @@ def setUpClass(cls): ) IPAddress.objects.bulk_create(cls.ip_addresses) - # call_command is because the searching using q parameter uses CachedValue to get the object ID - call_command("reindex") - def setUp(self): """Set up test.""" self.root_user = User.objects.create_user( @@ -182,7 +178,7 @@ def setUp(self): def test_return_object_state_using_id(self): """Test searching using id parameter - Root User.""" - site_id = Site.objects.get(name=self.sites[0]).id + site_id = Site.objects.get(name=self.sites[0].name).id query_parameters = {"id": site_id, "object_type": "dcim.site"} response = self.client.get(self.url, query_parameters, **self.root_header) From dacbfd96e3e1c8e02aedfc0ab36056f3f892e539 Mon Sep 17 00:00:00 2001 From: Michal Fiedorowicz Date: Mon, 17 Feb 2025 22:21:17 +0100 Subject: [PATCH 15/52] fix: apply change set concurrency (#63) * fix: retrieving object states of tags Signed-off-by: Michal Fiedorowicz * fix: skip creating objects if already exist Signed-off-by: Michal Fiedorowicz * chore: refactor apply change set view complexity Signed-off-by: Michal Fiedorowicz * tidy up Signed-off-by: Michal Fiedorowicz --------- Signed-off-by: Michal Fiedorowicz --- netbox_diode_plugin/api/views.py | 174 +++++++++++++++++++------------ 1 file changed, 108 insertions(+), 66 deletions(-) diff --git a/netbox_diode_plugin/api/views.py b/netbox_diode_plugin/api/views.py index d2fdd15..acfee6e 100644 --- a/netbox_diode_plugin/api/views.py +++ b/netbox_diode_plugin/api/views.py @@ -49,6 +49,10 @@ def _get_index_class_fields(object_type): # Get the model class dynamically model = apps.get_model(app_label, model_name) + # TagIndex registered in the netbox_diode_plugin + if app_label == "extras" and model_name == "tag": + app_label = "netbox_diode_plugin" + # Import the module where index classes are defined (adjust if needed) index_module = dynamic_import(f"{app_label}.search.{model.__name__}Index") @@ -246,7 +250,7 @@ def _get_object_type_model(object_type: str): object_content_type = NetBoxType.objects.get_by_natural_key( app_label, model_name ) - return object_content_type.model_class() + return object_content_type, object_content_type.model_class() def _get_assigned_object_type(self, model_name: str): """Get the object type model from applied IPAddress assigned object.""" @@ -255,79 +259,114 @@ def _get_assigned_object_type(self, model_name: str): } return assignable_object_types.get(model_name.lower(), None) + def _add_nested_opts(self, fields, key, value): + if isinstance(value, dict): + for nested_key, nested_value in value.items(): + self._add_nested_opts(fields, f"{key}__{nested_key}", nested_value) + elif not isinstance(value, list): + fields[key] = value + def _get_serializer( self, change_type: str, object_id: int, object_type: str, object_data: dict, - change_set_id: str, ): """Get the serializer for the object type.""" - object_type_model = self._get_object_type_model(object_type) + object_type_model, object_type_model_class = self._get_object_type_model(object_type) + if change_type == "create": - serializer = get_serializer_for_model(object_type_model)( - data=object_data, context={"request": self.request} - ) - elif change_type == "update": - lookups = () - args = {} + return self._get_serializer_to_create(object_data, object_type, object_type_model, object_type_model_class) - primary_ip_to_set: Optional[dict] = None + if change_type == "update": + return self._get_serializer_to_update(object_data, object_id, object_type, object_type_model_class) + + raise ValidationError("Invalid change_type") + + def _get_serializer_to_create(self, object_data, object_type, object_type_model, object_type_model_class): + # Get object data fields that are not dictionaries or lists + fields = self._get_fields_to_find_existing_objects(object_data, object_type, object_type_model) + # Check if the object already exists + try: + instance = object_type_model_class.objects.get(**fields) + return get_serializer_for_model(object_type_model_class)( + instance, data=object_data, context={"request": self.request, "pk": instance.pk} + ) + except object_type_model_class.DoesNotExist: + pass + serializer = get_serializer_for_model(object_type_model_class)( + data=object_data, context={"request": self.request} + ) + return serializer - if object_id: - args["id"] = object_id - elif object_type == "dcim.device" and any( + def _get_serializer_to_update(self, object_data, object_id, object_type, object_type_model_class): + lookups = () + fields = {} + primary_ip_to_set: Optional[dict] = None + if object_id: + fields["id"] = object_id + elif object_type == "dcim.device" and any( object_data.get(attr) for attr in ("primary_ip4", "primary_ip6") - ): + ): + ip_address = self._retrieve_primary_ip_address( + "primary_ip4", object_data + ) + + if ip_address is None: ip_address = self._retrieve_primary_ip_address( - "primary_ip4", object_data + "primary_ip6", object_data ) - if ip_address is None: - ip_address = self._retrieve_primary_ip_address( - "primary_ip6", object_data - ) - - if ip_address is None: - raise ValidationError("primary IP not found") - - if ip_address: - primary_ip_to_set = { - "id": ip_address.id, - "family": ip_address.family, - } + if ip_address is None: + raise ValidationError("primary IP not found") - lookups = ("site",) - args["name"] = object_data.get("name") - args["site__name"] = object_data.get("site").get("name") - else: - raise ValidationError("object_id parameter is required") + if ip_address: + primary_ip_to_set = { + "id": ip_address.id, + "family": ip_address.family, + } - try: - instance = object_type_model.objects.prefetch_related(*lookups).get( - **args - ) - if object_type == "dcim.device" and primary_ip_to_set: - object_data = { - "id": instance.id, - "device_type": instance.device_type.id, - "role": instance.role.id, - "site": instance.site.id, - f'primary_ip{primary_ip_to_set.get("family")}': primary_ip_to_set.get( - "id" - ), - } - except object_type_model.DoesNotExist: - raise ValidationError(f"object with id {object_id} does not exist") - - serializer = get_serializer_for_model(object_type_model)( - instance, data=object_data, context={"request": self.request} - ) + lookups = ("site",) + fields["name"] = object_data.get("name") + fields["site__name"] = object_data.get("site").get("name") else: - raise ValidationError("Invalid change_type") + raise ValidationError("object_id parameter is required") + try: + instance = object_type_model_class.objects.prefetch_related(*lookups).get(**fields) + if object_type == "dcim.device" and primary_ip_to_set: + object_data = { + "id": instance.id, + "device_type": instance.device_type.id, + "role": instance.role.id, + "site": instance.site.id, + f'primary_ip{primary_ip_to_set.get("family")}': primary_ip_to_set.get( + "id" + ), + } + except object_type_model_class.DoesNotExist: + raise ValidationError(f"object with id {object_id} does not exist") + serializer = get_serializer_for_model(object_type_model_class)( + instance, data=object_data, context={"request": self.request} + ) return serializer + def _get_fields_to_find_existing_objects(self, object_data, object_type, object_type_model): + fields = {} + for key, value in object_data.items(): + self._add_nested_opts(fields, key, value) + match object_type: + case "dcim.interface" | "virtualization.vminterface": + mac_address = fields.pop("mac_address", None) + if mac_address is not None: + fields["primary_mac_address__mac_address"] = mac_address + case "ipam.ipaddress": + fields.pop("assigned_object_type") + fields["assigned_object_type_id"] = fields.pop("assigned_object_id") + case "ipam.prefix" | "virtualization.cluster": + fields["scope_type"] = object_type_model + return fields + def _retrieve_primary_ip_address(self, primary_ip_attr: str, object_data: dict): """Retrieve the primary IP address object.""" ip_address = object_data.get(primary_ip_attr) @@ -347,8 +386,8 @@ def _retrieve_primary_ip_address(self, primary_ip_attr: str, object_data: dict): interface_device = interface.get("device") if interface_device is None: return None - - ip_address_object = self._get_object_type_model("ipam.ipaddress").objects.get( + object_type_mode, object_type_model_class = self._get_object_type_model("ipam.ipaddress") + ip_address_object = object_type_model_class.objects.get( address=ip_address.get("address"), interface__name=interface.get("name"), interface__device__name=interface_device.get("name"), @@ -418,7 +457,7 @@ def _handle_ipaddress_assigned_object(self, object_data: dict) -> Optional[Dict[ assigned_object_keys = list(ipaddress_assigned_object.keys()) model_name = assigned_object_keys[0] assigned_object_type = self._get_assigned_object_type(model_name) - assigned_object_model = self._get_object_type_model(assigned_object_type) + assigned_object_model, object_type_model_class = self._get_object_type_model(assigned_object_type) assigned_object_properties_dict = dict( ipaddress_assigned_object[model_name].items() ) @@ -449,9 +488,9 @@ def _handle_ipaddress_assigned_object(self, object_data: dict) -> Optional[Dict[ return {"assigned_object": error} assigned_object_instance = ( - assigned_object_model.objects.prefetch_related(*lookups).get(**args) + object_type_model_class.objects.prefetch_related(*lookups).get(**args) ) - except assigned_object_model.DoesNotExist: + except object_type_model_class.DoesNotExist: return { "assigned_object": f"Assigned object with name {ipaddress_assigned_object[model_name]} does not exist" } @@ -480,16 +519,17 @@ def _handle_scope(self, object_data: dict) -> Optional[Dict[str, Any]]: """Handle scope object.""" if object_data.get("site"): site = object_data.pop("site") - object_data["scope_type"] = "dcim.site" - scope_type_model = self._get_object_type_model("dcim.site") + scope_type = "dcim.site" + _, object_type_model_class = self._get_object_type_model(scope_type) + object_data["scope_type"] = scope_type site_id = site.get("id", None) if site_id is None: try: - site = scope_type_model.objects.get( + site = object_type_model_class.objects.get( name=site.get("name") ) site_id = site.id - except scope_type_model.DoesNotExist: + except object_type_model_class.DoesNotExist: return {"site": f"site with name {site.get('name')} does not exist"} object_data["scope_id"] = site_id @@ -549,9 +589,11 @@ def post(self, request, *args, **kwargs): serializer_errors.append({"change_id": change_id, **errors}) continue - serializer = self._get_serializer( - change_type, object_id, object_type, object_data, change_set_id - ) + serializer = self._get_serializer(change_type, object_id, object_type, object_data) + + # Skip creating an object if it already exists + if change_type == "create" and serializer.context.get("pk"): + continue if serializer.is_valid(): serializer.save() From 93e4c9bde7956a5adbd7b18afa09ef371c528f7e Mon Sep 17 00:00:00 2001 From: Michal Fiedorowicz Date: Thu, 20 Feb 2025 14:57:55 +0100 Subject: [PATCH 16/52] fix: scope support on apply change set (#64) Signed-off-by: Michal Fiedorowicz --- netbox_diode_plugin/api/views.py | 74 ++++++++++++------- .../tests/test_api_apply_change_set.py | 71 +++++++++++++++++- 2 files changed, 117 insertions(+), 28 deletions(-) diff --git a/netbox_diode_plugin/api/views.py b/netbox_diode_plugin/api/views.py index acfee6e..e791ab8 100644 --- a/netbox_diode_plugin/api/views.py +++ b/netbox_diode_plugin/api/views.py @@ -35,7 +35,7 @@ def dynamic_import(name): return mod -def _get_index_class_fields(object_type): +def _get_index_class_fields(object_type: str | NetBoxType): """ Given an object type name (e.g., 'dcim.site'), dynamically find and return the corresponding Index class fields. @@ -43,23 +43,18 @@ def _get_index_class_fields(object_type): :return: The corresponding model and its Index class (e.g., SiteIndex) field names or None. """ try: - # Extract app_label and model_name from 'dcim.site' - app_label, model_name = object_type.split('.') + if isinstance(object_type, str): + app_label, model_name = object_type.split('.') + else: + app_label, model_name = object_type.app_label, object_type.model - # Get the model class dynamically model = apps.get_model(app_label, model_name) - # TagIndex registered in the netbox_diode_plugin if app_label == "extras" and model_name == "tag": app_label = "netbox_diode_plugin" - # Import the module where index classes are defined (adjust if needed) index_module = dynamic_import(f"{app_label}.search.{model.__name__}Index") - - # Retrieve the index class fields tuple fields = getattr(index_module, "fields", None) - - # Extract the field names list from the tuple field_names = [field[0] for field in fields] return model, field_names @@ -244,12 +239,13 @@ class ApplyChangeSetView(views.APIView): permission_classes = [IsAuthenticated, IsDiodeWriter] @staticmethod - def _get_object_type_model(object_type: str): + def _get_object_type_model(object_type: str | NetBoxType): """Get the object type model from object_type.""" - app_label, model_name = object_type.split(".") - object_content_type = NetBoxType.objects.get_by_natural_key( - app_label, model_name - ) + if isinstance(object_type, str): + app_label, model_name = object_type.split(".") + object_content_type = NetBoxType.objects.get_by_natural_key(app_label, model_name) + else: + object_content_type = object_type return object_content_type, object_content_type.model_class() def _get_assigned_object_type(self, model_name: str): @@ -274,19 +270,19 @@ def _get_serializer( object_data: dict, ): """Get the serializer for the object type.""" - object_type_model, object_type_model_class = self._get_object_type_model(object_type) + _, object_type_model_class = self._get_object_type_model(object_type) if change_type == "create": - return self._get_serializer_to_create(object_data, object_type, object_type_model, object_type_model_class) + return self._get_serializer_to_create(object_data, object_type, object_type_model_class) if change_type == "update": return self._get_serializer_to_update(object_data, object_id, object_type, object_type_model_class) raise ValidationError("Invalid change_type") - def _get_serializer_to_create(self, object_data, object_type, object_type_model, object_type_model_class): + def _get_serializer_to_create(self, object_data, object_type, object_type_model_class): # Get object data fields that are not dictionaries or lists - fields = self._get_fields_to_find_existing_objects(object_data, object_type, object_type_model) + fields = self._get_fields_to_find_existing_objects(object_data, object_type) # Check if the object already exists try: instance = object_type_model_class.objects.get(**fields) @@ -351,10 +347,11 @@ def _get_serializer_to_update(self, object_data, object_id, object_type, object_ ) return serializer - def _get_fields_to_find_existing_objects(self, object_data, object_type, object_type_model): + def _get_fields_to_find_existing_objects(self, object_data, object_type): fields = {} for key, value in object_data.items(): self._add_nested_opts(fields, key, value) + match object_type: case "dcim.interface" | "virtualization.vminterface": mac_address = fields.pop("mac_address", None) @@ -364,7 +361,18 @@ def _get_fields_to_find_existing_objects(self, object_data, object_type, object_ fields.pop("assigned_object_type") fields["assigned_object_type_id"] = fields.pop("assigned_object_id") case "ipam.prefix" | "virtualization.cluster": - fields["scope_type"] = object_type_model + if scope_type := object_data.get("scope_type"): + scope_type_model, _ = self._get_object_type_model(scope_type) + fields["scope_type"] = scope_type_model + case "virtualization.virtualmachine": + if cluster_scope_type := fields.get("cluster__scope_type"): + cluster_scope_type_model, _ = self._get_object_type_model(cluster_scope_type) + fields["cluster__scope_type"] = cluster_scope_type_model + case "virtualization.vminterface": + if cluster_scope_type := fields.get("virtual_machine__cluster__scope_type"): + cluster_scope_type_model, _ = self._get_object_type_model(cluster_scope_type) + fields["virtual_machine__cluster__scope_type"] = cluster_scope_type_model + return fields def _retrieve_primary_ip_address(self, primary_ip_attr: str, object_data: dict): @@ -515,13 +523,18 @@ def _handle_interface_mac_address_compat(self, instance, object_type: str, obje instance.save() return None - def _handle_scope(self, object_data: dict) -> Optional[Dict[str, Any]]: + def _handle_scope(self, object_data: dict, is_nested: bool = False) -> Optional[Dict[str, Any]]: """Handle scope object.""" if object_data.get("site"): site = object_data.pop("site") scope_type = "dcim.site" - _, object_type_model_class = self._get_object_type_model(scope_type) - object_data["scope_type"] = scope_type + object_type_model, object_type_model_class = self._get_object_type_model(scope_type) + # Scope type of the nested object happens to be resolved differently than in the top-level object + # and is expected to be a content type object instead of "app_label.model_name" string format + if is_nested: + object_data["scope_type"] = object_type_model + else: + object_data["scope_type"] = scope_type site_id = site.get("id", None) if site_id is None: try: @@ -544,9 +557,18 @@ def _transform_object_data(self, object_type: str, object_data: dict) -> Optiona case "ipam.ipaddress": errors = self._handle_ipaddress_assigned_object(object_data) case "ipam.prefix": - errors = self._handle_scope(object_data) + errors = self._handle_scope(object_data, False) case "virtualization.cluster": - errors = self._handle_scope(object_data) + errors = self._handle_scope(object_data, False) + case "virtualization.virtualmachine": + if cluster_object_data := object_data.get("cluster"): + errors = self._handle_scope(cluster_object_data, True) + object_data["cluster"] = cluster_object_data + case "virtualization.vminterface": + cluster_object_data = object_data.get("virtual_machine", {}).get("cluster") + if cluster_object_data is not None: + errors = self._handle_scope(cluster_object_data, True) + object_data["virtual_machine"]["cluster"] = cluster_object_data case _: pass diff --git a/netbox_diode_plugin/tests/test_api_apply_change_set.py b/netbox_diode_plugin/tests/test_api_apply_change_set.py index 6bef32d..62950d4 100644 --- a/netbox_diode_plugin/tests/test_api_apply_change_set.py +++ b/netbox_diode_plugin/tests/test_api_apply_change_set.py @@ -14,6 +14,7 @@ Site, ) from django.contrib.auth import get_user_model +from django.contrib.contenttypes.models import ContentType from ipam.models import ASN, RIR, IPAddress, Prefix from netaddr import IPNetwork from rest_framework import status @@ -101,9 +102,13 @@ def setUp(self): name="Cluster Type 1", slug="cluster-type-1" ) + self.cluster_types = (cluster_type,) + + site_content_type = ContentType.objects.get_for_model(Site) + self.clusters = ( - Cluster(name="Cluster 1", type=cluster_type), - Cluster(name="Cluster 2", type=cluster_type), + Cluster(name="Cluster 1", type=cluster_type, scope_type=site_content_type, scope_id=self.sites[0].id), + Cluster(name="Cluster 2", type=cluster_type, scope_type=site_content_type, scope_id=self.sites[0].id), ) Cluster.objects.bulk_create(self.clusters) @@ -1154,3 +1159,65 @@ def test_create_prefix_with_unknown_site_fails(self): response.json().get("errors")[0].get("site"), ) self.assertFalse(Prefix.objects.filter(prefix="192.168.0.0/24").exists()) + + def test_create_virtualization_cluster_with_site_stored_as_scope(self): + """Test create cluster with site stored as scope.""" + payload = { + "change_set_id": str(uuid.uuid4()), + "change_set": [ + { + "change_id": str(uuid.uuid4()), + "change_type": "create", + "object_version": None, + "object_type": "virtualization.cluster", + "object_id": None, + "data": { + "name": "Cluster 3", + "type": { + "name": self.cluster_types[0].name, + }, + "site": { + "name": self.sites[0].name, + }, + }, + }, + ], + } + response = self.send_request(payload) + + self.assertEqual(response.json().get("result"), "success") + self.assertEqual(Cluster.objects.get(name="Cluster 3").scope, self.sites[0]) + + def test_create_virtualmachine_with_cluster_site_stored_as_scope(self): + """Test create virtualmachine with cluster site stored as scope.""" + payload = { + "change_set_id": str(uuid.uuid4()), + "change_set": [ + { + "change_id": str(uuid.uuid4()), + "change_type": "create", + "object_version": None, + "object_type": "virtualization.virtualmachine", + "object_id": None, + "data": { + "name": "VM foobar", + "site": { + "name": self.sites[0].name, + }, + "cluster": { + "name": self.clusters[0].name, + "type": { + "name": self.cluster_types[0].name, + }, + "site": { + "name": self.sites[0].name, + }, + }, + }, + }, + ], + } + response = self.send_request(payload) + + self.assertEqual(response.json().get("result"), "success") + self.assertEqual(VirtualMachine.objects.get(name="VM foobar", site_id=self.sites[0].id).cluster.scope, self.sites[0]) From ce3beeb82fa25f68f6294b18224b38b006074d32 Mon Sep 17 00:00:00 2001 From: Luke Tucker <64618+ltucker@users.noreply.github.com> Date: Mon, 7 Apr 2025 17:14:59 -0400 Subject: [PATCH 17/52] feat: update data model (#73) * fix: scope support on apply change set (#64) Signed-off-by: Michal Fiedorowicz * wip diff api * set default values and missing slugs Signed-off-by: Michal Fiedorowicz * tidy up setting defaults Signed-off-by: Michal Fiedorowicz * remove unused imports Signed-off-by: Michal Fiedorowicz * fix constructor of object type Signed-off-by: Michal Fiedorowicz * set slugs (if not present) after resolving existing instances Signed-off-by: Michal Fiedorowicz * emit ref_id instead of variable object_id field for new objects * improve entity field mapping coverage * fill in primary value mapping, use primary value for slug * use canonical field ordering in change dicts * first pass at certain common circular refs * remove ref id to itself Signed-off-by: Michal Fiedorowicz * tidy up Signed-off-by: Michal Fiedorowicz * add applier Signed-off-by: Michal Fiedorowicz * fix resolve ref before lookup, use field name directly, not field attr * don't query with unresolved references * fix _build_expressions_queryset Signed-off-by: Michal Fiedorowicz * resolve lint issues Signed-off-by: Michal Fiedorowicz * exclude fields with GenericRelation type Signed-off-by: Michal Fiedorowicz * fix sorting dict we may get ints strings etc Signed-off-by: Michal Fiedorowicz * rework applier logic Signed-off-by: Michal Fiedorowicz * applier with content type fields Signed-off-by: Michal Fiedorowicz * fix content type related existing value Signed-off-by: Michal Fiedorowicz * exclude foreign key fields with many to one rel Signed-off-by: Michal Fiedorowicz * fix: support for post create updates eg (primary mac address) (#68) * fix: support for post create updates eg (primary mac address) * use serializers, fix relevent tests * linting * filter fields in the prior state * add some basic smoke tests for diff / diff+apply * Change set validation (#69) * add change set validation Signed-off-by: Michal Fiedorowicz * remove redundant serializers Signed-off-by: Michal Fiedorowicz * remove redundant serializers tests Signed-off-by: Michal Fiedorowicz * tidy up Signed-off-by: Michal Fiedorowicz * adjust tests, lighter validation of refs on diff, handle generic --------- Signed-off-by: Michal Fiedorowicz Co-authored-by: Michal Fiedorowicz * fix: expand support for cycle breaking, add additional logical matchers (#70) * fix: fix error fingerprinting tags (#71) * fix: all noops -> no changes, show noops as only prior state (#72) --------- Signed-off-by: Michal Fiedorowicz Co-authored-by: Michal Fiedorowicz --- Makefile | 5 + docker/netbox/configuration/configuration.py | 9 +- docker/netbox/env/netbox.env | 1 + docker/netbox/local_settings.py | 3 +- netbox_diode_plugin/api/applier.py | 122 +++ netbox_diode_plugin/api/common.py | 191 ++++ netbox_diode_plugin/api/differ.py | 193 ++++ netbox_diode_plugin/api/matcher.py | 440 +++++++++ netbox_diode_plugin/api/plugin_utils.py | 906 ++++++++++++++++++ netbox_diode_plugin/api/serializers.py | 368 ------- netbox_diode_plugin/api/supported_models.py | 292 ++++++ netbox_diode_plugin/api/transformer.py | 396 ++++++++ netbox_diode_plugin/api/urls.py | 4 +- netbox_diode_plugin/api/views.py | 739 ++------------ .../tests/test_api_apply_change_set.py | 756 ++++++--------- .../tests/test_api_diff_and_apply.py | 184 ++++ .../tests/test_api_generate_diff.py | 107 +++ .../tests/test_api_object_state.py | 391 -------- .../tests/test_api_serializers.py | 32 - pyproject.toml | 2 + 20 files changed, 3248 insertions(+), 1893 deletions(-) create mode 100644 netbox_diode_plugin/api/applier.py create mode 100644 netbox_diode_plugin/api/common.py create mode 100644 netbox_diode_plugin/api/differ.py create mode 100644 netbox_diode_plugin/api/matcher.py create mode 100644 netbox_diode_plugin/api/plugin_utils.py create mode 100644 netbox_diode_plugin/api/supported_models.py create mode 100644 netbox_diode_plugin/api/transformer.py create mode 100644 netbox_diode_plugin/tests/test_api_diff_and_apply.py create mode 100644 netbox_diode_plugin/tests/test_api_generate_diff.py delete mode 100644 netbox_diode_plugin/tests/test_api_object_state.py delete mode 100644 netbox_diode_plugin/tests/test_api_serializers.py diff --git a/Makefile b/Makefile index 6145666..79e7f75 100644 --- a/Makefile +++ b/Makefile @@ -17,6 +17,11 @@ docker-compose-netbox-plugin-test: -@$(DOCKER_COMPOSE) -f docker/docker-compose.yaml -f docker/docker-compose.test.yaml run -u root --rm netbox ./manage.py test --keepdb netbox_diode_plugin @$(MAKE) docker-compose-netbox-plugin-down +.PHONY: docker-compose-netbox-plugin-test-ff +docker-compose-netbox-plugin-test-ff: + -@$(DOCKER_COMPOSE) -f docker/docker-compose.yaml -f docker/docker-compose.test.yaml run -u root --rm netbox ./manage.py test --failfast --keepdb netbox_diode_plugin + @$(MAKE) docker-compose-netbox-plugin-down + .PHONY: docker-compose-netbox-plugin-test-cover docker-compose-netbox-plugin-test-cover: -@$(DOCKER_COMPOSE) -f docker/docker-compose.yaml -f docker/docker-compose.test.yaml run --rm -u root -e COVERAGE_FILE=/opt/netbox/netbox/coverage/.coverage netbox sh -c "coverage run --source=netbox_diode_plugin --omit=*/migrations/* ./manage.py test --keepdb netbox_diode_plugin && coverage xml -o /opt/netbox/netbox/coverage/report.xml && coverage report -m | tee /opt/netbox/netbox/coverage/report.txt" diff --git a/docker/netbox/configuration/configuration.py b/docker/netbox/configuration/configuration.py index cc51c59..d459441 100644 --- a/docker/netbox/configuration/configuration.py +++ b/docker/netbox/configuration/configuration.py @@ -44,9 +44,12 @@ def _environ_get_and_map(variable_name: str, default: str | None = None, return map_fn(env_value) -_AS_BOOL = lambda value: value.lower() == 'true' -_AS_INT = lambda value: int(value) -_AS_LIST = lambda value: list(filter(None, value.split(' '))) +def _AS_BOOL(value): + return value.lower() == 'true' +def _AS_INT(value): + return int(value) +def _AS_LIST(value): + return list(filter(None, value.split(' '))) _BASE_DIR = dirname(dirname(abspath(__file__))) diff --git a/docker/netbox/env/netbox.env b/docker/netbox/env/netbox.env index 45993fc..38a0211 100644 --- a/docker/netbox/env/netbox.env +++ b/docker/netbox/env/netbox.env @@ -41,3 +41,4 @@ DIODE_TO_NETBOX_API_KEY=1368dbad13e418d5a443d93cf255edde03a2a754 NETBOX_TO_DIODE_API_KEY=1e99338b8cab5fc637bc55f390bda1446f619c42 DIODE_API_KEY=5a52c45ee8231156cb620d193b0291912dd15433 BASE_PATH=netbox/ +DEBUG=True \ No newline at end of file diff --git a/docker/netbox/local_settings.py b/docker/netbox/local_settings.py index 6ab2063..0542c56 100644 --- a/docker/netbox/local_settings.py +++ b/docker/netbox/local_settings.py @@ -1,4 +1,5 @@ from netbox_branching.utilities import DynamicSchemaDict + from .configuration import DATABASE # Wrap DATABASES with DynamicSchemaDict for dynamic schema support @@ -9,4 +10,4 @@ # Employ our custom database router DATABASE_ROUTERS = [ 'netbox_branching.database.BranchAwareRouter', -] \ No newline at end of file +] diff --git a/netbox_diode_plugin/api/applier.py b/netbox_diode_plugin/api/applier.py new file mode 100644 index 0000000..101f30f --- /dev/null +++ b/netbox_diode_plugin/api/applier.py @@ -0,0 +1,122 @@ +#!/usr/bin/env python +# Copyright 2024 NetBox Labs Inc +"""Diode NetBox Plugin - API - Applier.""" + + +import logging + +from django.apps import apps +from django.contrib.contenttypes.models import ContentType +from django.core.exceptions import ObjectDoesNotExist +from django.db import models +from rest_framework.exceptions import ValidationError as ValidationError + +from .common import NON_FIELD_ERRORS, Change, ChangeSet, ChangeSetException, ChangeSetResult, ChangeType +from .plugin_utils import get_object_type_model, legal_fields +from .supported_models import get_serializer_for_model + +logger = logging.getLogger(__name__) + + +def apply_changeset(change_set: ChangeSet) -> ChangeSetResult: + """Apply a change set.""" + _validate_change_set(change_set) + + created = {} + for i, change in enumerate(change_set.changes): + change_type = change.change_type + object_type = change.object_type + + if change_type == ChangeType.NOOP.value: + continue + + try: + model_class = get_object_type_model(object_type) + data = _pre_apply(model_class, change, created) + _apply_change(data, model_class, change, created) + except ValidationError as e: + raise _err_from_validation_error(e, f"changes[{i}]") + except ObjectDoesNotExist: + raise _err(f"{object_type} with id {change.object_id} does not exist", f"changes[{i}]", "object_id") + # ConstraintViolationError ? + # ... + + return ChangeSetResult( + id=change_set.id, + ) + +def _apply_change(data: dict, model_class: models.Model, change: Change, created: dict): + serializer_class = get_serializer_for_model(model_class) + change_type = change.change_type + if change_type == ChangeType.CREATE.value: + serializer = serializer_class(data=data) + serializer.is_valid(raise_exception=True) + instance = serializer.save() + created[change.ref_id] = instance + + elif change_type == ChangeType.UPDATE.value: + if object_id := change.object_id: + instance = model_class.objects.get(id=object_id) + serializer = serializer_class(instance, data=data, partial=True) + serializer.is_valid(raise_exception=True) + serializer.save() + # create and update in a same change set + elif change.ref_id and (instance := created[change.ref_id]): + serializer = serializer_class(instance, data=data, partial=True) + serializer.is_valid(raise_exception=True) + serializer.save() + +def _pre_apply(model_class: models.Model, change: Change, created: dict): + data = change.data.copy() + + # resolve foreign key references to new objects + for ref_field in change.new_refs: + if isinstance(data[ref_field], (list, tuple)): + ref_list = [] + for ref in data[ref_field]: + if isinstance(ref, str): + ref_list.append(created[ref].pk) + elif isinstance(ref, int): + ref_list.append(ref) + data[ref_field] = ref_list + else: + data[ref_field] = created[data[ref_field]].pk + + # ignore? fields that are not in the data model (error?) + allowed_fields = legal_fields(model_class) + for key in list(data.keys()): + if key not in allowed_fields: + logger.warning(f"Field {key} is not in the diode data model, ignoring.") + data.pop(key) + + return data + +def _validate_change_set(change_set: ChangeSet): + if not change_set.id: + raise _err("Change set ID is required", "changeset","id") + if not change_set.changes: + raise _err("Changes are required", "changeset", "changes") + + for i, change in enumerate(change_set.changes): + if change.object_id is None and change.ref_id is None: + raise _err("Object ID or Ref ID must be provided", f"changes[{i}]", NON_FIELD_ERRORS) + if change.change_type not in ChangeType: + raise _err(f"Unsupported change type '{change.change_type}'", f"changes[{i}]", "change_type") + +def _err(message, object_name, field): + return ChangeSetException(message, errors={object_name: {field: [message]}}) + +def _err_from_validation_error(e, object_name): + errors = {} + if e.detail: + if isinstance(e.detail, dict): + errors[object_name] = e.detail + elif isinstance(e.detail, (list, tuple)): + errors[object_name] = { + NON_FIELD_ERRORS: e.detail + } + else: + errors[object_name] = { + NON_FIELD_ERRORS: [e.detail] + } + return ChangeSetException("validation error", errors=errors) diff --git a/netbox_diode_plugin/api/common.py b/netbox_diode_plugin/api/common.py new file mode 100644 index 0000000..9bcb6b2 --- /dev/null +++ b/netbox_diode_plugin/api/common.py @@ -0,0 +1,191 @@ +#!/usr/bin/env python +# Copyright 2025 NetBox Labs Inc +"""Diode NetBox Plugin - API - Common types and utilities.""" + +import logging +import uuid +from collections import defaultdict +from dataclasses import dataclass, field +from enum import Enum + +from django.apps import apps +from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation +from django.contrib.contenttypes.models import ContentType +from django.core.exceptions import ValidationError +from django.db import models +from rest_framework import status + +logger = logging.getLogger("netbox.diode_data") + +NON_FIELD_ERRORS = "__all__" + +@dataclass +class UnresolvedReference: + """unresolved reference to an object.""" + + object_type: str + uuid: str + + def __str__(self): + """String representation of the unresolved reference.""" + return f"new_object:{self.object_type}:{self.uuid}" + + def __eq__(self, other): + """Equality operator.""" + if not isinstance(other, UnresolvedReference): + return False + return self.object_type == other.object_type and self.uuid == other.uuid + + def __hash__(self): + """Hash function.""" + return hash((self.object_type, self.uuid)) + + def __lt__(self, other): + """Less than operator.""" + return self.object_type < other.object_type or (self.object_type == other.object_type and self.uuid < other.uuid) + + +class ChangeType(Enum): + """Change type enum.""" + + CREATE = "create" + UPDATE = "update" + NOOP = "noop" + + +@dataclass +class Change: + """A change to a model instance.""" + + change_type: ChangeType + object_type: str + object_id: int | None = field(default=None) + object_primary_value: str | None = field(default=None) + ref_id: str | None = field(default=None) + id: str = field(default_factory=lambda: str(uuid.uuid4())) + before: dict | None = field(default=None) + data: dict | None = field(default=None) + new_refs: list[str] = field(default_factory=list) + + def to_dict(self) -> dict: + """Convert the change to a dictionary.""" + return { + "id": self.id, + "change_type": self.change_type.value, + "object_type": self.object_type, + "object_id": self.object_id, + "ref_id": self.ref_id, + "object_primary_value": self.object_primary_value, + "before": self.before, + "data": self.data, + "new_refs": self.new_refs, + } + + +@dataclass +class ChangeSet: + """A set of changes to a model instance.""" + + id: str = field(default_factory=lambda: str(uuid.uuid4())) + changes: list[Change] = field(default_factory=list) + branch: dict[str, str] | None = field(default=None) # {"id": str, "name": str} + + def to_dict(self) -> dict: + """Convert the change set to a dictionary.""" + return { + "id": self.id, + "changes": [change.to_dict() for change in self.changes], + "branch": self.branch, + } + + def validate(self) -> dict[str, list[str]]: + """Validate basics of the change set data.""" + errors = defaultdict(dict) + + for change in self.changes: + model = apps.get_model(change.object_type) + + change_data = change.data.copy() + if change.before: + change_data.update(change.before) + + excluded_relation_fields, rel_errors = self._validate_relations(change_data, model) + if rel_errors: + errors[change.object_type] = rel_errors + + try: + instance = model(**change_data) + instance.clean_fields(exclude=excluded_relation_fields) + except ValidationError as e: + errors[change.object_type].update(e.error_dict) + + return errors or None + + def _validate_relations(self, change_data: dict, model: models.Model) -> tuple[list[str], dict]: + # check that there is some value for every required + # reference field, but don't validate the actual reference. + # the fields are removed from the change_data so that other + # fields can be validated by instantiating the model. + excluded_relation_fields = [] + rel_errors = defaultdict(list) + for f in model._meta.get_fields(): + if isinstance(f, (GenericRelation, GenericForeignKey)): + excluded_relation_fields.append(f.name) + continue + if not f.is_relation: + continue + field_name = f.name + excluded_relation_fields.append(field_name) + + if hasattr(f, "related_model") and f.related_model == ContentType: + change_data.pop(field_name, None) + base_field = field_name[:-5] + excluded_relation_fields.append(base_field + "_id") + value = change_data.pop(base_field + "_id", None) + else: + value = change_data.pop(field_name, None) + + if not f.null and not f.blank and not f.many_to_many: + # this field is a required relation... + if value is None: + rel_errors[f.name].append(f"Field {f.name} is required") + return excluded_relation_fields, rel_errors + + +@dataclass +class ChangeSetResult: + """A result of applying a change set.""" + + id: str | None = field(default_factory=lambda: str(uuid.uuid4())) + change_set: ChangeSet | None = field(default=None) + errors: dict | None = field(default=None) + + def to_dict(self) -> dict: + """Convert the result to a dictionary.""" + if self.change_set: + return self.change_set.to_dict() + + return { + "id": self.id, + "errors": self.errors, + } + + def get_status_code(self) -> int: + """Get the status code for the result.""" + return status.HTTP_200_OK if not self.errors else status.HTTP_400_BAD_REQUEST + + +class ChangeSetException(Exception): + """ChangeSetException is raised when an error occurs while generating or applying a change set.""" + + def __init__(self, message, errors=None): + """Initialize the exception.""" + super().__init__(message) + self.message = message + self.errors = errors or {} + + def __str__(self): + """Return the string representation of the exception.""" + if self.errors: + return f"{self.message}: {self.errors}" + return self.message diff --git a/netbox_diode_plugin/api/differ.py b/netbox_diode_plugin/api/differ.py new file mode 100644 index 0000000..a1721a0 --- /dev/null +++ b/netbox_diode_plugin/api/differ.py @@ -0,0 +1,193 @@ +#!/usr/bin/env python +# Copyright 2025 NetBox Labs Inc +"""Diode NetBox Plugin - API - Differ.""" + +import copy +import logging + +from django.contrib.contenttypes.models import ContentType +from django.core.exceptions import ValidationError +from utilities.data import shallow_compare_dict + +from .common import Change, ChangeSet, ChangeSetException, ChangeSetResult, ChangeType +from .plugin_utils import get_primary_value, legal_fields +from .supported_models import extract_supported_models +from .transformer import cleanup_unresolved_references, transform_proto_json + +logger = logging.getLogger(__name__) + +SUPPORTED_MODELS = extract_supported_models() + + +def prechange_data_from_instance(instance) -> dict: # noqa: C901 + """Convert model instance data to a dictionary format for comparison.""" + prechange_data = {} + + if instance is None: + return prechange_data + + model_class = instance.__class__ + object_type = f"{model_class._meta.app_label}.{model_class._meta.model_name}" + + model = SUPPORTED_MODELS.get(object_type) + if not model: + raise ValidationError(f"Model {model_class.__name__} is not supported") + + fields = model.get("fields", {}) + if not fields: + raise ValidationError(f"Model {model_class.__name__} has no fields") + + diode_fields = legal_fields(model_class) + + for field_name, field_info in fields.items(): + # permit only diode fields and the primary key + if field_name not in diode_fields and field_name != "id": + continue + + if not hasattr(instance, field_name): + continue + + if field_info["type"] == "ForeignKey" and field_info.get("is_many_to_one_rel", False): + continue + + value = getattr(instance, field_name) + if hasattr(value, "all"): # Handle many-to-many and many-to-one relationships + # For any relationship that has an 'all' method, get all related objects' primary keys + prechange_data[field_name] = ( + [item.pk for item in value.all()] if value is not None else [] + ) + elif hasattr( + value, "pk" + ): # Handle regular related fields (ForeignKey, OneToOne) + # Handle ContentType fields + if isinstance(value, ContentType): + prechange_data[field_name] = f"{value.app_label}.{value.model}" + else: + # For regular related fields, get the primary key + prechange_data[field_name] = value.pk if value is not None else None + else: + prechange_data[field_name] = value + + return prechange_data + + +def clean_diff_data(data: dict, exclude_empty_values: bool = True) -> dict: + """Clean diff data by removing null values.""" + result = {} + for k, v in data.items(): + if exclude_empty_values: + if v is None: + continue + if isinstance(v, list) and len(v) == 0: + continue + if isinstance(v, dict) and len(v) == 0: + continue + if isinstance(v, str) and v == "": + continue + result[k] = v + return result + + +def diff_to_change( + object_type: str, + prechange_data: dict, + postchange_data: dict, + changed_attrs: list[str], + unresolved_references: list[str], +) -> Change: + """Convert a diff to a change.""" + change_type = ChangeType.UPDATE if len(prechange_data) > 0 else ChangeType.CREATE + if change_type == ChangeType.UPDATE and not len(changed_attrs) > 0: + change_type = ChangeType.NOOP + + primary_value = get_primary_value(prechange_data | postchange_data, object_type) + if primary_value is None: + primary_value = "(unnamed)" + + prior_id = prechange_data.get("id") + ref_id = None + if prior_id is None: + ref_id = postchange_data.pop("id", None) + + change = Change( + change_type=change_type, + object_type=object_type, + object_id=prior_id if isinstance(prior_id, int) else None, + ref_id=ref_id, + object_primary_value=primary_value, + new_refs=unresolved_references, + ) + + if change_type != ChangeType.NOOP: + postchange_data_clean = clean_diff_data(postchange_data) + change.data = sort_dict_recursively(postchange_data_clean) + else: + change.data = {} + + if change_type == ChangeType.UPDATE or change_type == ChangeType.NOOP: + prechange_data_clean = clean_diff_data(prechange_data) + change.before = sort_dict_recursively(prechange_data_clean) + + return change + +def sort_dict_recursively(d): + """Recursively sorts a dictionary by keys.""" + if isinstance(d, dict): + return {k: sort_dict_recursively(v) for k, v in sorted(d.items())} + if isinstance(d, list): + # Convert all items to strings for comparison + return sorted([sort_dict_recursively(item) for item in d], key=str) + return d + + +def generate_changeset(entity: dict, object_type: str) -> ChangeSetResult: + """Generate a changeset for an entity.""" + change_set = ChangeSet() + + entities = transform_proto_json(entity, object_type, SUPPORTED_MODELS) + by_uuid = {x['_uuid']: x for x in entities} + for entity in entities: + prechange_data = {} + changed_attrs = [] + new_refs = cleanup_unresolved_references(entity) + object_type = entity.pop("_object_type") + _ = entity.pop("_uuid") + instance = entity.pop("_instance", None) + + if instance: + # the prior state is another new object... + if isinstance(instance, str): + prechange_data = copy.deepcopy(by_uuid[instance]) + # prior state is a model instance + else: + prechange_data = prechange_data_from_instance(instance) + + changed_data = shallow_compare_dict( + prechange_data, entity, + ) + changed_attrs = sorted(changed_data.keys()) + change = diff_to_change( + object_type, + prechange_data, + entity, + changed_attrs, + new_refs, + ) + + change_set.changes.append(change) + + has_any_changes = False + for change in change_set.changes: + if change.change_type != ChangeType.NOOP: + has_any_changes = True + break + + if not has_any_changes: + change_set.changes = [] + if errors := change_set.validate(): + raise ChangeSetException("Invalid change set", errors) + + return ChangeSetResult( + id=change_set.id, + change_set=change_set, + ) diff --git a/netbox_diode_plugin/api/matcher.py b/netbox_diode_plugin/api/matcher.py new file mode 100644 index 0000000..8f11735 --- /dev/null +++ b/netbox_diode_plugin/api/matcher.py @@ -0,0 +1,440 @@ +#!/usr/bin/env python +# Copyright 2024 NetBox Labs Inc +"""Diode NetBox Plugin - API - Object matching utilities.""" + +import copy +import logging +from dataclasses import dataclass +from functools import cache, lru_cache +from typing import Type + +from core.models import ObjectType as NetBoxType +from django.conf import settings +from django.contrib.contenttypes.fields import ContentType +from django.core.exceptions import FieldDoesNotExist +from django.db import models +from django.db.models import F, Value +from django.db.models.lookups import Exact +from django.db.models.query_utils import Q + +from .common import UnresolvedReference +from .plugin_utils import content_type_id, get_object_type, get_object_type_model + +logger = logging.getLogger(__name__) + +# +# these matchers are not driven by netbox unique constraints, +# but are logical criteria that may be used to match objects. +# These should represent the likely intent of a user when +# matching existing objects. +# +_LOGICAL_MATCHERS = { + "dcim.macaddress": lambda: [ + ObjectMatchCriteria( + fields=("mac_address", "assigned_object_type", "assigned_object_id"), + name="logical_mac_address_within_parent", + model_class=get_object_type_model("dcim.macaddress"), + condition=Q(assigned_object_id__isnull=False), + ), + ObjectMatchCriteria( + fields=("mac_address", "assigned_object_type", "assigned_object_id"), + name="logical_mac_address_within_parent", + model_class=get_object_type_model("dcim.macaddress"), + condition=Q(assigned_object_id__isnull=True), + ), + ], + "ipam.ipaddress": lambda: [ + ObjectMatchCriteria( + fields=("address", ), + name="logical_ip_address_global_no_vrf", + model_class=get_object_type_model("ipam.ipaddress"), + condition=Q(vrf__isnull=True), + ), + ObjectMatchCriteria( + fields=("address", "assigned_object_type", "assigned_object_id"), + name="logical_ip_address_within_vrf", + model_class=get_object_type_model("ipam.ipaddress"), + condition=Q(vrf__isnull=False) + ), + ], + "ipam.prefix": lambda: [ + ObjectMatchCriteria( + fields=("prefix",), + name="logical_prefix_global_no_vrf", + model_class=get_object_type_model("ipam.prefix"), + condition=Q(vrf__isnull=True), + ), + ObjectMatchCriteria( + fields=("prefix", "vrf_id"), + name="logical_prefix_within_vrf", + model_class=get_object_type_model("ipam.prefix"), + condition=Q(vrf__isnull=False), + ), + ], +} + + +@dataclass +class ObjectMatchCriteria: + """ + Defines criteria for identifying a specific object. + + This matcher expects a fully 'transformed' and resolved + set of fields. ie field names are snake case and match + the model fields and any references to another object + specify a specific id in the appropriate field name. + eg device_id=123 etc and for any generic references, + both the type and idshould be specified, eg: + scope_type="dcim.site" and scope_id=123 + """ + + fields: tuple[str] | None = None + expressions: tuple | None = None + condition: Q | None = None + model_class: Type[models.Model] | None = None + name: str | None = None + + def __hash__(self): + """Hash the object match criteria.""" + return hash((self.fields, self.expressions, self.condition, self.model_class.__name__, self.name)) + + def has_required_fields(self, data) -> bool: + """Returns True if the data given contains a value for all fields referenced by the constraint.""" + return all(field in data for field in self.get_refs()) + + @cache + def get_refs(self) -> set[str]: + """Returns a set of all field names referenced by the constraint.""" + refs = set() + if self.fields: + refs.update(self.fields) + elif self.expressions: + for expr in self.expressions: + refs |= _get_refs(expr) + return frozenset(refs) + + @cache + def get_insensitive_refs(self) -> set[str]: + """ + Returns a set of all field names that should be compared in a case insensitive manner. + + best effort, doesn't handle things being nested in a complex way. + """ + refs = set() + if self.expressions: + for expr in self.expressions: + # TODO be more careful here + if expr.__class__.__name__ == "Lower": + for source_expr in getattr(expr, "source_expressions", []): + if hasattr(source_expr, "name"): + refs.add(source_expr.name) + return refs + + def fingerprint(self, data: dict) -> str|None: + """ + Returns a fingerprint of the data based on these criteria. + + These criteria that can be used to determine if two + data structs roughly match. + + This is a best effort based on the referenced fields + and some interrogation of case sensitivity. The + real criteria are potentially complex... + """ + if not self.has_required_fields(data): + return None + + if self.condition: + if not self._check_condition(data): + return None + + # sort the fields by name + sorted_fields = sorted(self.get_refs()) + insensitive = self.get_insensitive_refs() + values = [] + for field in sorted_fields: + value = data[field] + if isinstance(value, dict): + logger.warning(f"unexpected value type for fingerprinting: {value}") + return None + if field in insensitive: + value = value.lower() + values.append(value) + # logger.debug(f"fingerprint {self}: {data} -> values: {tuple(values)}") + + return hash(tuple(values)) + + def _check_condition(self, data) -> bool: + if self.condition is None: + return True + # TODO: handle evaluating complex conditions, + # there are only simple ones currently + if self.condition.connector != Q.AND: + logger.error(f"Unhandled condition {self.condition}") + return False + + if len(self.condition.children) != 1: + logger.error(f"Unhandled condition {self.condition}") + return False + + if len(self.condition.children[0]) != 2: + logger.error(f"Unhandled condition {self.condition}") + return False + + k, v = self.condition.children[0] + result = False + if k.endswith("__isnull"): + k = k[:-8] + result = k not in data or data[k] is None + else: + result = k in data and data[k] == v + + if self.condition.negated: + result = not result + + return result + + def build_queryset(self, data) -> models.QuerySet: + """Builds a queryset for the constraint with the given data.""" + if self.fields and len(self.fields) > 0: + return self._build_fields_queryset(data) + if self.expressions and len(self.expressions) > 0: + return self._build_expressions_queryset(data) + raise ValueError("No fields or expressions to build queryset from") + + def _build_fields_queryset(self, data) -> models.QuerySet: + """Builds a queryset for a simple set-of-fields constraint.""" + data = self._prepare_data(data) + lookup_kwargs = {} + for field_name in self.fields: + field = self.model_class._meta.get_field(field_name) + if field_name not in data: + logger.error(f" * cannot build fields queryset for {self.name} (missing field {field_name})") + return None # cannot match, missing field data + lookup_value = data.get(field_name) + if isinstance(lookup_value, UnresolvedReference): + logger.error(f" * cannot build fields queryset for {self.name} ({field_name} is unresolved reference)") + return None # cannot match, missing field data + if isinstance(lookup_value, dict): + logger.error(f" * cannot build fields queryset for {self.name} ({field_name} is dict)") + return None # cannot match, missing field data + lookup_kwargs[field.name] = lookup_value + + # logger.error(f" * query kwargs: {lookup_kwargs}") + qs = self.model_class.objects.filter(**lookup_kwargs) + if self.condition: + qs = qs.filter(self.condition) + return qs + + def _build_expressions_queryset(self, data) -> models.QuerySet: + """Builds a queryset for the constraint with the given data.""" + data = self._prepare_data(data) + replacements = { + F(field): Value(value) if isinstance(value, (str, int, float, bool)) else value + for field, value in data.items() + } + + filters = [] + for expr in self.expressions: + if hasattr(expr, "get_expression_for_validation"): + expr = expr.get_expression_for_validation() + + refs = [F(ref) for ref in _get_refs(expr)] + for ref in refs: + if ref not in replacements: + logger.error(f" * cannot build expr queryset for {self.name} (missing field {ref})") + return None # cannot match, missing field data + if isinstance(replacements[ref], UnresolvedReference): + logger.error(f" * cannot build expr queryset for {self.name} ({ref} is unresolved reference)") + return None # cannot match, missing field data + + rhs = expr.replace_expressions(replacements) + condition = Exact(expr, rhs) + filters.append(condition) + + qs = self.model_class.objects.filter(*filters) + if self.condition: + qs = qs.filter(self.condition) + return qs + + def _prepare_data(self, data: dict) -> dict: + prepared = {} + for field_name, value in data.items(): + try: + field = self.model_class._meta.get_field(field_name) + # special handling for object type -> content type id + if field.is_relation and hasattr(field, "related_model") and field.related_model == ContentType: + prepared[field_name] = content_type_id(value) + else: + prepared[field_name] = value + + except FieldDoesNotExist: + continue + # logger.error(f"prepared data: {data} -> {prepared}") + return prepared + +@lru_cache(maxsize=256) +def get_model_matchers(model_class) -> list[ObjectMatchCriteria]: + """Extract unique constraints from a Django model.""" + object_type = get_object_type(model_class) + matchers = _LOGICAL_MATCHERS.get(object_type, lambda: [])() + + # collect single fields that are unique + for field in model_class._meta.fields: + if field.name == "id": + # TODO(ltucker): more django-general detection of pk field? + continue + + if field.unique: + matchers.append( + ObjectMatchCriteria( + model_class=model_class, + fields=(field.name,), + name=f"unique_{field.name}", + ) + ) + + # collect UniqueConstraint constraints + for constraint in model_class._meta.constraints: + if not _is_supported_constraint(constraint, model_class): + continue + if len(constraint.fields) > 0: + matchers.append( + ObjectMatchCriteria( + model_class=model_class, + fields=tuple(constraint.fields), + condition=constraint.condition, + name=constraint.name, + ) + ) + elif len(constraint.expressions) > 0: + matchers.append( + ObjectMatchCriteria( + model_class=model_class, + expressions=tuple(constraint.expressions), + condition=constraint.condition, + name=constraint.name, + ) + ) + else: + logger.error( + f"Constraint {constraint.name} on {model_class.__name__} had no fields or expressions (skipped)" + ) + # (this shouldn't happen / enforced by django) + continue + + return matchers + + +def _is_supported_constraint(constraint, model_class) -> bool: + if not isinstance(constraint, models.UniqueConstraint): + return False + + if len(constraint.opclasses) > 0: + logger.warning(f"Constraint {constraint.name} on {model_class.__name__} had opclasses (skipped)") + return False + + if constraint.nulls_distinct is not None and constraint.nulls_distinct is True: + logger.warning(f"Constraint {constraint.name} on {model_class.__name__} had nulls_distinct (skipped)") + return False + + for field_name in constraint.fields: + field = model_class._meta.get_field(field_name) + if field.generated: + logger.warning( + f"Constraint {constraint.name} on {model_class.__name__} had" + f" generated field {field_name} (skipped)" + ) + return False + + return True + +def _get_refs(expr) -> set[str]: + refs = set() + if isinstance(expr, str): + refs.add(expr) + elif isinstance(expr, F): + refs.add(expr.name) + elif hasattr(expr, "get_source_expressions"): + for subexpr in expr.get_source_expressions(): + refs |= _get_refs(subexpr) + else: + logger.warning(f"Unhandled expression type for _get_refs: {type(expr)}") + return refs + +def _fingerprint_all(data: dict) -> str: + """ + Returns a fingerprint of the data based on all fields. + + Data should be a (flattened) dictionary of field values. + This ignores any fields that start with an underscore. + """ + if data is None: + return None + + values = [] + for k, v in sorted(data.items()): + if k.startswith("_"): + continue + values.append(k) + if isinstance(v, (list, tuple)): + values.extend(sorted(v)) + elif isinstance(v, dict): + values.append(_fingerprint_all(v)) + else: + values.append(v) + # logger.error(f"_fingerprint_all: {data} -> values: {tuple(values)}") + + return hash(tuple(values)) + +def fingerprint(data: dict, object_type: str) -> str: + """ + Fingerprint a data structure. + + This uses the first matcher that has all + required fields or else uses all fields. + + TODO: This means there are pathological? cases where + the same object is being referenced but by + different unique constraints in the same diff... + this could lead to some unexpected behavior. + """ + if data is None: + return None + + model_class = get_object_type_model(object_type) + # check any known match criteria + for matcher in get_model_matchers(model_class): + fp = matcher.fingerprint(data) + if fp is not None: + return fp + # fall back to fingerprinting all the data + return _fingerprint_all(data) + +def find_existing_object(data: dict, object_type: str): + """ + Find an existing object that matches the given data. + + Uses all object match criteria to look for an existing + object. Returns the first match found. + + Returns the object if found, otherwise None. + """ + logger.error(f"resolving {data}") + model_class = get_object_type_model(object_type) + for matcher in get_model_matchers(model_class): + if not matcher.has_required_fields(data): + logger.error(f" * skipped matcher {matcher.name} (missing fields)") + continue + q = matcher.build_queryset(data) + if q is None: + logger.error(f" * skipped matcher {matcher.name} (no queryset)") + continue + logger.error(f" * trying query {q.query}") + existing = q.order_by('pk').first() + if existing is not None: + logger.error(f" -> Found object {existing} via {matcher.name}") + return existing + logger.error(f" -> No object found for matcher {matcher.name}") + logger.error(" * No matchers found an existing object") + return None diff --git a/netbox_diode_plugin/api/plugin_utils.py b/netbox_diode_plugin/api/plugin_utils.py new file mode 100644 index 0000000..b526a5c --- /dev/null +++ b/netbox_diode_plugin/api/plugin_utils.py @@ -0,0 +1,906 @@ +"""Diode plugin helpers.""" + +# Generated code. DO NOT EDIT. +# Timestamp: 2025-04-01 21:05:16Z + +from dataclasses import dataclass +from functools import lru_cache +from typing import Type + +from core.models import ObjectType as NetBoxType +from django.contrib.contenttypes.models import ContentType +from django.db import models + + +@lru_cache(maxsize=256) +def get_object_type_model(object_type: str) -> Type[models.Model]: + """Get the model class for a given object type.""" + app_label, model_name = object_type.split('.') + object_content_type = NetBoxType.objects.get_by_natural_key(app_label, model_name) + return object_content_type.model_class() + +@lru_cache(maxsize=256) +def get_object_type(model_class: Type[models.Model]) -> str: + """Get the object type for a given model class.""" + content_type = ContentType.objects.get_for_model(model_class) + return content_type.app_label + '.' + content_type.model + +@lru_cache(maxsize=256) +def content_type_id(object_type: str) -> int: + """Get the content type id for a given object type.""" + app_label, model_name = object_type.split('.') + object_content_type = NetBoxType.objects.get_by_natural_key(app_label, model_name) + return object_content_type.id + +@dataclass +class RefInfo: + object_type: str + field_name: str + is_generic: bool = False + is_many: bool = False + +_JSON_REF_INFO = { + 'circuits.circuit': { + 'assignments': RefInfo(object_type='circuits.circuitgroupassignment', field_name='assignments', is_many=True), + 'provider': RefInfo(object_type='circuits.provider', field_name='provider'), + 'providerAccount': RefInfo(object_type='circuits.provideraccount', field_name='provider_account'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + 'type': RefInfo(object_type='circuits.circuittype', field_name='type'), + }, + 'circuits.circuitgroup': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + }, + 'circuits.circuitgroupassignment': { + 'group': RefInfo(object_type='circuits.circuitgroup', field_name='group'), + 'memberCircuit': RefInfo(object_type='circuits.circuit', field_name='member', is_generic=True), + 'memberVirtualCircuit': RefInfo(object_type='circuits.virtualcircuit', field_name='member', is_generic=True), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'circuits.circuittermination': { + 'circuit': RefInfo(object_type='circuits.circuit', field_name='circuit'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'terminationLocation': RefInfo(object_type='dcim.location', field_name='termination', is_generic=True), + 'terminationProviderNetwork': RefInfo(object_type='circuits.providernetwork', field_name='termination', is_generic=True), + 'terminationRegion': RefInfo(object_type='dcim.region', field_name='termination', is_generic=True), + 'terminationSite': RefInfo(object_type='dcim.site', field_name='termination', is_generic=True), + 'terminationSiteGroup': RefInfo(object_type='dcim.sitegroup', field_name='termination', is_generic=True), + }, + 'circuits.circuittype': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'circuits.provider': { + 'accounts': RefInfo(object_type='circuits.provideraccount', field_name='accounts', is_many=True), + 'asns': RefInfo(object_type='ipam.asn', field_name='asns', is_many=True), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'circuits.provideraccount': { + 'provider': RefInfo(object_type='circuits.provider', field_name='provider'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'circuits.providernetwork': { + 'provider': RefInfo(object_type='circuits.provider', field_name='provider'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'circuits.virtualcircuit': { + 'providerAccount': RefInfo(object_type='circuits.provideraccount', field_name='provider_account'), + 'providerNetwork': RefInfo(object_type='circuits.providernetwork', field_name='provider_network'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + 'type': RefInfo(object_type='circuits.virtualcircuittype', field_name='type'), + }, + 'circuits.virtualcircuittermination': { + 'interface': RefInfo(object_type='dcim.interface', field_name='interface'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'virtualCircuit': RefInfo(object_type='circuits.virtualcircuit', field_name='virtual_circuit'), + }, + 'circuits.virtualcircuittype': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.cable': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + }, + 'dcim.cabletermination': { + 'cable': RefInfo(object_type='dcim.cable', field_name='cable'), + 'terminationCircuitTermination': RefInfo(object_type='circuits.circuittermination', field_name='termination', is_generic=True), + 'terminationConsolePort': RefInfo(object_type='dcim.consoleport', field_name='termination', is_generic=True), + 'terminationConsoleServerPort': RefInfo(object_type='dcim.consoleserverport', field_name='termination', is_generic=True), + 'terminationFrontPort': RefInfo(object_type='dcim.frontport', field_name='termination', is_generic=True), + 'terminationInterface': RefInfo(object_type='dcim.interface', field_name='termination', is_generic=True), + 'terminationPowerFeed': RefInfo(object_type='dcim.powerfeed', field_name='termination', is_generic=True), + 'terminationPowerOutlet': RefInfo(object_type='dcim.poweroutlet', field_name='termination', is_generic=True), + 'terminationPowerPort': RefInfo(object_type='dcim.powerport', field_name='termination', is_generic=True), + 'terminationRearPort': RefInfo(object_type='dcim.rearport', field_name='termination', is_generic=True), + }, + 'dcim.consoleport': { + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'module': RefInfo(object_type='dcim.module', field_name='module'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.consoleserverport': { + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'module': RefInfo(object_type='dcim.module', field_name='module'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.device': { + 'cluster': RefInfo(object_type='virtualization.cluster', field_name='cluster'), + 'deviceType': RefInfo(object_type='dcim.devicetype', field_name='device_type'), + 'location': RefInfo(object_type='dcim.location', field_name='location'), + 'oobIp': RefInfo(object_type='ipam.ipaddress', field_name='oob_ip'), + 'platform': RefInfo(object_type='dcim.platform', field_name='platform'), + 'primaryIp4': RefInfo(object_type='ipam.ipaddress', field_name='primary_ip4'), + 'primaryIp6': RefInfo(object_type='ipam.ipaddress', field_name='primary_ip6'), + 'rack': RefInfo(object_type='dcim.rack', field_name='rack'), + 'role': RefInfo(object_type='dcim.devicerole', field_name='role'), + 'site': RefInfo(object_type='dcim.site', field_name='site'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + 'virtualChassis': RefInfo(object_type='dcim.virtualchassis', field_name='virtual_chassis'), + }, + 'dcim.devicebay': { + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'installedDevice': RefInfo(object_type='dcim.device', field_name='installed_device'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.devicerole': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.devicetype': { + 'defaultPlatform': RefInfo(object_type='dcim.platform', field_name='default_platform'), + 'manufacturer': RefInfo(object_type='dcim.manufacturer', field_name='manufacturer'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.frontport': { + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'module': RefInfo(object_type='dcim.module', field_name='module'), + 'rearPort': RefInfo(object_type='dcim.rearport', field_name='rear_port'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.interface': { + 'bridge': RefInfo(object_type='dcim.interface', field_name='bridge'), + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'lag': RefInfo(object_type='dcim.interface', field_name='lag'), + 'module': RefInfo(object_type='dcim.module', field_name='module'), + 'parent': RefInfo(object_type='dcim.interface', field_name='parent'), + 'primaryMacAddress': RefInfo(object_type='dcim.macaddress', field_name='primary_mac_address'), + 'qinqSvlan': RefInfo(object_type='ipam.vlan', field_name='qinq_svlan'), + 'taggedVlans': RefInfo(object_type='ipam.vlan', field_name='tagged_vlans', is_many=True), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'untaggedVlan': RefInfo(object_type='ipam.vlan', field_name='untagged_vlan'), + 'vdcs': RefInfo(object_type='dcim.virtualdevicecontext', field_name='vdcs', is_many=True), + 'vlanTranslationPolicy': RefInfo(object_type='ipam.vlantranslationpolicy', field_name='vlan_translation_policy'), + 'vrf': RefInfo(object_type='ipam.vrf', field_name='vrf'), + 'wirelessLans': RefInfo(object_type='wireless.wirelesslan', field_name='wireless_lans', is_many=True), + }, + 'dcim.inventoryitem': { + 'componentConsolePort': RefInfo(object_type='dcim.consoleport', field_name='component', is_generic=True), + 'componentConsoleServerPort': RefInfo(object_type='dcim.consoleserverport', field_name='component', is_generic=True), + 'componentFrontPort': RefInfo(object_type='dcim.frontport', field_name='component', is_generic=True), + 'componentInterface': RefInfo(object_type='dcim.interface', field_name='component', is_generic=True), + 'componentPowerOutlet': RefInfo(object_type='dcim.poweroutlet', field_name='component', is_generic=True), + 'componentPowerPort': RefInfo(object_type='dcim.powerport', field_name='component', is_generic=True), + 'componentRearPort': RefInfo(object_type='dcim.rearport', field_name='component', is_generic=True), + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'manufacturer': RefInfo(object_type='dcim.manufacturer', field_name='manufacturer'), + 'parent': RefInfo(object_type='dcim.inventoryitem', field_name='parent'), + 'role': RefInfo(object_type='dcim.inventoryitemrole', field_name='role'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.inventoryitemrole': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.location': { + 'parent': RefInfo(object_type='dcim.location', field_name='parent'), + 'site': RefInfo(object_type='dcim.site', field_name='site'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + }, + 'dcim.macaddress': { + 'assignedObjectInterface': RefInfo(object_type='dcim.interface', field_name='assigned_object', is_generic=True), + 'assignedObjectVmInterface': RefInfo(object_type='virtualization.vminterface', field_name='assigned_object', is_generic=True), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.manufacturer': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.module': { + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'moduleBay': RefInfo(object_type='dcim.modulebay', field_name='module_bay'), + 'moduleType': RefInfo(object_type='dcim.moduletype', field_name='module_type'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.modulebay': { + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'installedModule': RefInfo(object_type='dcim.module', field_name='installed_module'), + 'module': RefInfo(object_type='dcim.module', field_name='module'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.moduletype': { + 'manufacturer': RefInfo(object_type='dcim.manufacturer', field_name='manufacturer'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.platform': { + 'manufacturer': RefInfo(object_type='dcim.manufacturer', field_name='manufacturer'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.powerfeed': { + 'powerPanel': RefInfo(object_type='dcim.powerpanel', field_name='power_panel'), + 'rack': RefInfo(object_type='dcim.rack', field_name='rack'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + }, + 'dcim.poweroutlet': { + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'module': RefInfo(object_type='dcim.module', field_name='module'), + 'powerPort': RefInfo(object_type='dcim.powerport', field_name='power_port'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.powerpanel': { + 'location': RefInfo(object_type='dcim.location', field_name='location'), + 'site': RefInfo(object_type='dcim.site', field_name='site'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.powerport': { + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'module': RefInfo(object_type='dcim.module', field_name='module'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.rack': { + 'location': RefInfo(object_type='dcim.location', field_name='location'), + 'rackType': RefInfo(object_type='dcim.racktype', field_name='rack_type'), + 'role': RefInfo(object_type='dcim.rackrole', field_name='role'), + 'site': RefInfo(object_type='dcim.site', field_name='site'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + }, + 'dcim.rackreservation': { + 'rack': RefInfo(object_type='dcim.rack', field_name='rack'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + }, + 'dcim.rackrole': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.racktype': { + 'manufacturer': RefInfo(object_type='dcim.manufacturer', field_name='manufacturer'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.rearport': { + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'module': RefInfo(object_type='dcim.module', field_name='module'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.region': { + 'parent': RefInfo(object_type='dcim.region', field_name='parent'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.site': { + 'asns': RefInfo(object_type='ipam.asn', field_name='asns', is_many=True), + 'group': RefInfo(object_type='dcim.sitegroup', field_name='group'), + 'region': RefInfo(object_type='dcim.region', field_name='region'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + }, + 'dcim.sitegroup': { + 'parent': RefInfo(object_type='dcim.sitegroup', field_name='parent'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.virtualchassis': { + 'master': RefInfo(object_type='dcim.device', field_name='master'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.virtualdevicecontext': { + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'primaryIp4': RefInfo(object_type='ipam.ipaddress', field_name='primary_ip4'), + 'primaryIp6': RefInfo(object_type='ipam.ipaddress', field_name='primary_ip6'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + }, + 'ipam.aggregate': { + 'rir': RefInfo(object_type='ipam.rir', field_name='rir'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + }, + 'ipam.asn': { + 'rir': RefInfo(object_type='ipam.rir', field_name='rir'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + }, + 'ipam.asnrange': { + 'rir': RefInfo(object_type='ipam.rir', field_name='rir'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + }, + 'ipam.fhrpgroup': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'ipam.fhrpgroupassignment': { + 'group': RefInfo(object_type='ipam.fhrpgroup', field_name='group'), + 'interfaceAsn': RefInfo(object_type='ipam.asn', field_name='interface', is_generic=True), + 'interfaceAsnRange': RefInfo(object_type='ipam.asnrange', field_name='interface', is_generic=True), + 'interfaceAggregate': RefInfo(object_type='ipam.aggregate', field_name='interface', is_generic=True), + 'interfaceCable': RefInfo(object_type='dcim.cable', field_name='interface', is_generic=True), + 'interfaceCablePath': RefInfo(object_type='dcim.cablepath', field_name='interface', is_generic=True), + 'interfaceCableTermination': RefInfo(object_type='dcim.cabletermination', field_name='interface', is_generic=True), + 'interfaceCircuit': RefInfo(object_type='circuits.circuit', field_name='interface', is_generic=True), + 'interfaceCircuitGroup': RefInfo(object_type='circuits.circuitgroup', field_name='interface', is_generic=True), + 'interfaceCircuitGroupAssignment': RefInfo(object_type='circuits.circuitgroupassignment', field_name='interface', is_generic=True), + 'interfaceCircuitTermination': RefInfo(object_type='circuits.circuittermination', field_name='interface', is_generic=True), + 'interfaceCircuitType': RefInfo(object_type='circuits.circuittype', field_name='interface', is_generic=True), + 'interfaceCluster': RefInfo(object_type='virtualization.cluster', field_name='interface', is_generic=True), + 'interfaceClusterGroup': RefInfo(object_type='virtualization.clustergroup', field_name='interface', is_generic=True), + 'interfaceClusterType': RefInfo(object_type='virtualization.clustertype', field_name='interface', is_generic=True), + 'interfaceConsolePort': RefInfo(object_type='dcim.consoleport', field_name='interface', is_generic=True), + 'interfaceConsoleServerPort': RefInfo(object_type='dcim.consoleserverport', field_name='interface', is_generic=True), + 'interfaceContact': RefInfo(object_type='tenancy.contact', field_name='interface', is_generic=True), + 'interfaceContactAssignment': RefInfo(object_type='tenancy.contactassignment', field_name='interface', is_generic=True), + 'interfaceContactGroup': RefInfo(object_type='tenancy.contactgroup', field_name='interface', is_generic=True), + 'interfaceContactRole': RefInfo(object_type='tenancy.contactrole', field_name='interface', is_generic=True), + 'interfaceDevice': RefInfo(object_type='dcim.device', field_name='interface', is_generic=True), + 'interfaceDeviceBay': RefInfo(object_type='dcim.devicebay', field_name='interface', is_generic=True), + 'interfaceDeviceRole': RefInfo(object_type='dcim.devicerole', field_name='interface', is_generic=True), + 'interfaceDeviceType': RefInfo(object_type='dcim.devicetype', field_name='interface', is_generic=True), + 'interfaceFhrpGroup': RefInfo(object_type='ipam.fhrpgroup', field_name='interface', is_generic=True), + 'interfaceFhrpGroupAssignment': RefInfo(object_type='ipam.fhrpgroupassignment', field_name='interface', is_generic=True), + 'interfaceFrontPort': RefInfo(object_type='dcim.frontport', field_name='interface', is_generic=True), + 'interfaceIkePolicy': RefInfo(object_type='vpn.ikepolicy', field_name='interface', is_generic=True), + 'interfaceIkeProposal': RefInfo(object_type='vpn.ikeproposal', field_name='interface', is_generic=True), + 'interfaceIpAddress': RefInfo(object_type='ipam.ipaddress', field_name='interface', is_generic=True), + 'interfaceIpRange': RefInfo(object_type='ipam.iprange', field_name='interface', is_generic=True), + 'interfaceIpSecPolicy': RefInfo(object_type='vpn.ipsecpolicy', field_name='interface', is_generic=True), + 'interfaceIpSecProfile': RefInfo(object_type='vpn.ipsecprofile', field_name='interface', is_generic=True), + 'interfaceIpSecProposal': RefInfo(object_type='vpn.ipsecproposal', field_name='interface', is_generic=True), + 'interfaceInterface': RefInfo(object_type='dcim.interface', field_name='interface', is_generic=True), + 'interfaceInventoryItem': RefInfo(object_type='dcim.inventoryitem', field_name='interface', is_generic=True), + 'interfaceInventoryItemRole': RefInfo(object_type='dcim.inventoryitemrole', field_name='interface', is_generic=True), + 'interfaceL2Vpn': RefInfo(object_type='vpn.l2vpn', field_name='interface', is_generic=True), + 'interfaceL2VpnTermination': RefInfo(object_type='vpn.l2vpntermination', field_name='interface', is_generic=True), + 'interfaceLocation': RefInfo(object_type='dcim.location', field_name='interface', is_generic=True), + 'interfaceMacAddress': RefInfo(object_type='dcim.macaddress', field_name='interface', is_generic=True), + 'interfaceManufacturer': RefInfo(object_type='dcim.manufacturer', field_name='interface', is_generic=True), + 'interfaceModule': RefInfo(object_type='dcim.module', field_name='interface', is_generic=True), + 'interfaceModuleBay': RefInfo(object_type='dcim.modulebay', field_name='interface', is_generic=True), + 'interfaceModuleType': RefInfo(object_type='dcim.moduletype', field_name='interface', is_generic=True), + 'interfacePlatform': RefInfo(object_type='dcim.platform', field_name='interface', is_generic=True), + 'interfacePowerFeed': RefInfo(object_type='dcim.powerfeed', field_name='interface', is_generic=True), + 'interfacePowerOutlet': RefInfo(object_type='dcim.poweroutlet', field_name='interface', is_generic=True), + 'interfacePowerPanel': RefInfo(object_type='dcim.powerpanel', field_name='interface', is_generic=True), + 'interfacePowerPort': RefInfo(object_type='dcim.powerport', field_name='interface', is_generic=True), + 'interfacePrefix': RefInfo(object_type='ipam.prefix', field_name='interface', is_generic=True), + 'interfaceProvider': RefInfo(object_type='circuits.provider', field_name='interface', is_generic=True), + 'interfaceProviderAccount': RefInfo(object_type='circuits.provideraccount', field_name='interface', is_generic=True), + 'interfaceProviderNetwork': RefInfo(object_type='circuits.providernetwork', field_name='interface', is_generic=True), + 'interfaceRir': RefInfo(object_type='ipam.rir', field_name='interface', is_generic=True), + 'interfaceRack': RefInfo(object_type='dcim.rack', field_name='interface', is_generic=True), + 'interfaceRackReservation': RefInfo(object_type='dcim.rackreservation', field_name='interface', is_generic=True), + 'interfaceRackRole': RefInfo(object_type='dcim.rackrole', field_name='interface', is_generic=True), + 'interfaceRackType': RefInfo(object_type='dcim.racktype', field_name='interface', is_generic=True), + 'interfaceRearPort': RefInfo(object_type='dcim.rearport', field_name='interface', is_generic=True), + 'interfaceRegion': RefInfo(object_type='dcim.region', field_name='interface', is_generic=True), + 'interfaceRole': RefInfo(object_type='ipam.role', field_name='interface', is_generic=True), + 'interfaceRouteTarget': RefInfo(object_type='ipam.routetarget', field_name='interface', is_generic=True), + 'interfaceService': RefInfo(object_type='ipam.service', field_name='interface', is_generic=True), + 'interfaceSite': RefInfo(object_type='dcim.site', field_name='interface', is_generic=True), + 'interfaceSiteGroup': RefInfo(object_type='dcim.sitegroup', field_name='interface', is_generic=True), + 'interfaceTag': RefInfo(object_type='extras.tag', field_name='interface', is_generic=True), + 'interfaceTenant': RefInfo(object_type='tenancy.tenant', field_name='interface', is_generic=True), + 'interfaceTenantGroup': RefInfo(object_type='tenancy.tenantgroup', field_name='interface', is_generic=True), + 'interfaceTunnel': RefInfo(object_type='vpn.tunnel', field_name='interface', is_generic=True), + 'interfaceTunnelGroup': RefInfo(object_type='vpn.tunnelgroup', field_name='interface', is_generic=True), + 'interfaceTunnelTermination': RefInfo(object_type='vpn.tunneltermination', field_name='interface', is_generic=True), + 'interfaceVlan': RefInfo(object_type='ipam.vlan', field_name='interface', is_generic=True), + 'interfaceVlanGroup': RefInfo(object_type='ipam.vlangroup', field_name='interface', is_generic=True), + 'interfaceVlanTranslationPolicy': RefInfo(object_type='ipam.vlantranslationpolicy', field_name='interface', is_generic=True), + 'interfaceVlanTranslationRule': RefInfo(object_type='ipam.vlantranslationrule', field_name='interface', is_generic=True), + 'interfaceVmInterface': RefInfo(object_type='virtualization.vminterface', field_name='interface', is_generic=True), + 'interfaceVrf': RefInfo(object_type='ipam.vrf', field_name='interface', is_generic=True), + 'interfaceVirtualChassis': RefInfo(object_type='dcim.virtualchassis', field_name='interface', is_generic=True), + 'interfaceVirtualCircuit': RefInfo(object_type='circuits.virtualcircuit', field_name='interface', is_generic=True), + 'interfaceVirtualCircuitTermination': RefInfo(object_type='circuits.virtualcircuittermination', field_name='interface', is_generic=True), + 'interfaceVirtualCircuitType': RefInfo(object_type='circuits.virtualcircuittype', field_name='interface', is_generic=True), + 'interfaceVirtualDeviceContext': RefInfo(object_type='dcim.virtualdevicecontext', field_name='interface', is_generic=True), + 'interfaceVirtualDisk': RefInfo(object_type='virtualization.virtualdisk', field_name='interface', is_generic=True), + 'interfaceVirtualMachine': RefInfo(object_type='virtualization.virtualmachine', field_name='interface', is_generic=True), + 'interfaceWirelessLan': RefInfo(object_type='wireless.wirelesslan', field_name='interface', is_generic=True), + 'interfaceWirelessLanGroup': RefInfo(object_type='wireless.wirelesslangroup', field_name='interface', is_generic=True), + 'interfaceWirelessLink': RefInfo(object_type='wireless.wirelesslink', field_name='interface', is_generic=True), + }, + 'ipam.ipaddress': { + 'assignedObjectFhrpGroup': RefInfo(object_type='ipam.fhrpgroup', field_name='assigned_object', is_generic=True), + 'assignedObjectInterface': RefInfo(object_type='dcim.interface', field_name='assigned_object', is_generic=True), + 'assignedObjectVmInterface': RefInfo(object_type='virtualization.vminterface', field_name='assigned_object', is_generic=True), + 'natInside': RefInfo(object_type='ipam.ipaddress', field_name='nat_inside'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + 'vrf': RefInfo(object_type='ipam.vrf', field_name='vrf'), + }, + 'ipam.iprange': { + 'role': RefInfo(object_type='ipam.role', field_name='role'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + 'vrf': RefInfo(object_type='ipam.vrf', field_name='vrf'), + }, + 'ipam.prefix': { + 'role': RefInfo(object_type='ipam.role', field_name='role'), + 'scopeLocation': RefInfo(object_type='dcim.location', field_name='scope', is_generic=True), + 'scopeRegion': RefInfo(object_type='dcim.region', field_name='scope', is_generic=True), + 'scopeSite': RefInfo(object_type='dcim.site', field_name='scope', is_generic=True), + 'scopeSiteGroup': RefInfo(object_type='dcim.sitegroup', field_name='scope', is_generic=True), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + 'vlan': RefInfo(object_type='ipam.vlan', field_name='vlan'), + 'vrf': RefInfo(object_type='ipam.vrf', field_name='vrf'), + }, + 'ipam.rir': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'ipam.role': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'ipam.routetarget': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + }, + 'ipam.service': { + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'ipaddresses': RefInfo(object_type='ipam.ipaddress', field_name='ipaddresses', is_many=True), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'virtualMachine': RefInfo(object_type='virtualization.virtualmachine', field_name='virtual_machine'), + }, + 'ipam.vlan': { + 'group': RefInfo(object_type='ipam.vlangroup', field_name='group'), + 'qinqSvlan': RefInfo(object_type='ipam.vlan', field_name='qinq_svlan'), + 'role': RefInfo(object_type='ipam.role', field_name='role'), + 'site': RefInfo(object_type='dcim.site', field_name='site'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + }, + 'ipam.vlangroup': { + 'scopeCluster': RefInfo(object_type='virtualization.cluster', field_name='scope', is_generic=True), + 'scopeClusterGroup': RefInfo(object_type='virtualization.clustergroup', field_name='scope', is_generic=True), + 'scopeLocation': RefInfo(object_type='dcim.location', field_name='scope', is_generic=True), + 'scopeRack': RefInfo(object_type='dcim.rack', field_name='scope', is_generic=True), + 'scopeRegion': RefInfo(object_type='dcim.region', field_name='scope', is_generic=True), + 'scopeSite': RefInfo(object_type='dcim.site', field_name='scope', is_generic=True), + 'scopeSiteGroup': RefInfo(object_type='dcim.sitegroup', field_name='scope', is_generic=True), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'ipam.vlantranslationrule': { + 'policy': RefInfo(object_type='ipam.vlantranslationpolicy', field_name='policy'), + }, + 'ipam.vrf': { + 'exportTargets': RefInfo(object_type='ipam.routetarget', field_name='export_targets', is_many=True), + 'importTargets': RefInfo(object_type='ipam.routetarget', field_name='import_targets', is_many=True), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + }, + 'tenancy.contact': { + 'group': RefInfo(object_type='tenancy.contactgroup', field_name='group'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'tenancy.contactassignment': { + 'contact': RefInfo(object_type='tenancy.contact', field_name='contact'), + 'objectAsn': RefInfo(object_type='ipam.asn', field_name='object', is_generic=True), + 'objectAsnRange': RefInfo(object_type='ipam.asnrange', field_name='object', is_generic=True), + 'objectAggregate': RefInfo(object_type='ipam.aggregate', field_name='object', is_generic=True), + 'objectCable': RefInfo(object_type='dcim.cable', field_name='object', is_generic=True), + 'objectCablePath': RefInfo(object_type='dcim.cablepath', field_name='object', is_generic=True), + 'objectCableTermination': RefInfo(object_type='dcim.cabletermination', field_name='object', is_generic=True), + 'objectCircuit': RefInfo(object_type='circuits.circuit', field_name='object', is_generic=True), + 'objectCircuitGroup': RefInfo(object_type='circuits.circuitgroup', field_name='object', is_generic=True), + 'objectCircuitGroupAssignment': RefInfo(object_type='circuits.circuitgroupassignment', field_name='object', is_generic=True), + 'objectCircuitTermination': RefInfo(object_type='circuits.circuittermination', field_name='object', is_generic=True), + 'objectCircuitType': RefInfo(object_type='circuits.circuittype', field_name='object', is_generic=True), + 'objectCluster': RefInfo(object_type='virtualization.cluster', field_name='object', is_generic=True), + 'objectClusterGroup': RefInfo(object_type='virtualization.clustergroup', field_name='object', is_generic=True), + 'objectClusterType': RefInfo(object_type='virtualization.clustertype', field_name='object', is_generic=True), + 'objectConsolePort': RefInfo(object_type='dcim.consoleport', field_name='object', is_generic=True), + 'objectConsoleServerPort': RefInfo(object_type='dcim.consoleserverport', field_name='object', is_generic=True), + 'objectContact': RefInfo(object_type='tenancy.contact', field_name='object', is_generic=True), + 'objectContactAssignment': RefInfo(object_type='tenancy.contactassignment', field_name='object', is_generic=True), + 'objectContactGroup': RefInfo(object_type='tenancy.contactgroup', field_name='object', is_generic=True), + 'objectContactRole': RefInfo(object_type='tenancy.contactrole', field_name='object', is_generic=True), + 'objectDevice': RefInfo(object_type='dcim.device', field_name='object', is_generic=True), + 'objectDeviceBay': RefInfo(object_type='dcim.devicebay', field_name='object', is_generic=True), + 'objectDeviceRole': RefInfo(object_type='dcim.devicerole', field_name='object', is_generic=True), + 'objectDeviceType': RefInfo(object_type='dcim.devicetype', field_name='object', is_generic=True), + 'objectFhrpGroup': RefInfo(object_type='ipam.fhrpgroup', field_name='object', is_generic=True), + 'objectFhrpGroupAssignment': RefInfo(object_type='ipam.fhrpgroupassignment', field_name='object', is_generic=True), + 'objectFrontPort': RefInfo(object_type='dcim.frontport', field_name='object', is_generic=True), + 'objectIkePolicy': RefInfo(object_type='vpn.ikepolicy', field_name='object', is_generic=True), + 'objectIkeProposal': RefInfo(object_type='vpn.ikeproposal', field_name='object', is_generic=True), + 'objectIpAddress': RefInfo(object_type='ipam.ipaddress', field_name='object', is_generic=True), + 'objectIpRange': RefInfo(object_type='ipam.iprange', field_name='object', is_generic=True), + 'objectIpSecPolicy': RefInfo(object_type='vpn.ipsecpolicy', field_name='object', is_generic=True), + 'objectIpSecProfile': RefInfo(object_type='vpn.ipsecprofile', field_name='object', is_generic=True), + 'objectIpSecProposal': RefInfo(object_type='vpn.ipsecproposal', field_name='object', is_generic=True), + 'objectInterface': RefInfo(object_type='dcim.interface', field_name='object', is_generic=True), + 'objectInventoryItem': RefInfo(object_type='dcim.inventoryitem', field_name='object', is_generic=True), + 'objectInventoryItemRole': RefInfo(object_type='dcim.inventoryitemrole', field_name='object', is_generic=True), + 'objectL2Vpn': RefInfo(object_type='vpn.l2vpn', field_name='object', is_generic=True), + 'objectL2VpnTermination': RefInfo(object_type='vpn.l2vpntermination', field_name='object', is_generic=True), + 'objectLocation': RefInfo(object_type='dcim.location', field_name='object', is_generic=True), + 'objectMacAddress': RefInfo(object_type='dcim.macaddress', field_name='object', is_generic=True), + 'objectManufacturer': RefInfo(object_type='dcim.manufacturer', field_name='object', is_generic=True), + 'objectModule': RefInfo(object_type='dcim.module', field_name='object', is_generic=True), + 'objectModuleBay': RefInfo(object_type='dcim.modulebay', field_name='object', is_generic=True), + 'objectModuleType': RefInfo(object_type='dcim.moduletype', field_name='object', is_generic=True), + 'objectPlatform': RefInfo(object_type='dcim.platform', field_name='object', is_generic=True), + 'objectPowerFeed': RefInfo(object_type='dcim.powerfeed', field_name='object', is_generic=True), + 'objectPowerOutlet': RefInfo(object_type='dcim.poweroutlet', field_name='object', is_generic=True), + 'objectPowerPanel': RefInfo(object_type='dcim.powerpanel', field_name='object', is_generic=True), + 'objectPowerPort': RefInfo(object_type='dcim.powerport', field_name='object', is_generic=True), + 'objectPrefix': RefInfo(object_type='ipam.prefix', field_name='object', is_generic=True), + 'objectProvider': RefInfo(object_type='circuits.provider', field_name='object', is_generic=True), + 'objectProviderAccount': RefInfo(object_type='circuits.provideraccount', field_name='object', is_generic=True), + 'objectProviderNetwork': RefInfo(object_type='circuits.providernetwork', field_name='object', is_generic=True), + 'objectRir': RefInfo(object_type='ipam.rir', field_name='object', is_generic=True), + 'objectRack': RefInfo(object_type='dcim.rack', field_name='object', is_generic=True), + 'objectRackReservation': RefInfo(object_type='dcim.rackreservation', field_name='object', is_generic=True), + 'objectRackRole': RefInfo(object_type='dcim.rackrole', field_name='object', is_generic=True), + 'objectRackType': RefInfo(object_type='dcim.racktype', field_name='object', is_generic=True), + 'objectRearPort': RefInfo(object_type='dcim.rearport', field_name='object', is_generic=True), + 'objectRegion': RefInfo(object_type='dcim.region', field_name='object', is_generic=True), + 'objectRole': RefInfo(object_type='ipam.role', field_name='object', is_generic=True), + 'objectRouteTarget': RefInfo(object_type='ipam.routetarget', field_name='object', is_generic=True), + 'objectService': RefInfo(object_type='ipam.service', field_name='object', is_generic=True), + 'objectSite': RefInfo(object_type='dcim.site', field_name='object', is_generic=True), + 'objectSiteGroup': RefInfo(object_type='dcim.sitegroup', field_name='object', is_generic=True), + 'objectTag': RefInfo(object_type='extras.tag', field_name='object', is_generic=True), + 'objectTenant': RefInfo(object_type='tenancy.tenant', field_name='object', is_generic=True), + 'objectTenantGroup': RefInfo(object_type='tenancy.tenantgroup', field_name='object', is_generic=True), + 'objectTunnel': RefInfo(object_type='vpn.tunnel', field_name='object', is_generic=True), + 'objectTunnelGroup': RefInfo(object_type='vpn.tunnelgroup', field_name='object', is_generic=True), + 'objectTunnelTermination': RefInfo(object_type='vpn.tunneltermination', field_name='object', is_generic=True), + 'objectVlan': RefInfo(object_type='ipam.vlan', field_name='object', is_generic=True), + 'objectVlanGroup': RefInfo(object_type='ipam.vlangroup', field_name='object', is_generic=True), + 'objectVlanTranslationPolicy': RefInfo(object_type='ipam.vlantranslationpolicy', field_name='object', is_generic=True), + 'objectVlanTranslationRule': RefInfo(object_type='ipam.vlantranslationrule', field_name='object', is_generic=True), + 'objectVmInterface': RefInfo(object_type='virtualization.vminterface', field_name='object', is_generic=True), + 'objectVrf': RefInfo(object_type='ipam.vrf', field_name='object', is_generic=True), + 'objectVirtualChassis': RefInfo(object_type='dcim.virtualchassis', field_name='object', is_generic=True), + 'objectVirtualCircuit': RefInfo(object_type='circuits.virtualcircuit', field_name='object', is_generic=True), + 'objectVirtualCircuitTermination': RefInfo(object_type='circuits.virtualcircuittermination', field_name='object', is_generic=True), + 'objectVirtualCircuitType': RefInfo(object_type='circuits.virtualcircuittype', field_name='object', is_generic=True), + 'objectVirtualDeviceContext': RefInfo(object_type='dcim.virtualdevicecontext', field_name='object', is_generic=True), + 'objectVirtualDisk': RefInfo(object_type='virtualization.virtualdisk', field_name='object', is_generic=True), + 'objectVirtualMachine': RefInfo(object_type='virtualization.virtualmachine', field_name='object', is_generic=True), + 'objectWirelessLan': RefInfo(object_type='wireless.wirelesslan', field_name='object', is_generic=True), + 'objectWirelessLanGroup': RefInfo(object_type='wireless.wirelesslangroup', field_name='object', is_generic=True), + 'objectWirelessLink': RefInfo(object_type='wireless.wirelesslink', field_name='object', is_generic=True), + 'role': RefInfo(object_type='tenancy.contactrole', field_name='role'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'tenancy.contactgroup': { + 'parent': RefInfo(object_type='tenancy.contactgroup', field_name='parent'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'tenancy.contactrole': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'tenancy.tenant': { + 'group': RefInfo(object_type='tenancy.tenantgroup', field_name='group'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'tenancy.tenantgroup': { + 'parent': RefInfo(object_type='tenancy.tenantgroup', field_name='parent'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'virtualization.cluster': { + 'group': RefInfo(object_type='virtualization.clustergroup', field_name='group'), + 'scopeLocation': RefInfo(object_type='dcim.location', field_name='scope', is_generic=True), + 'scopeRegion': RefInfo(object_type='dcim.region', field_name='scope', is_generic=True), + 'scopeSite': RefInfo(object_type='dcim.site', field_name='scope', is_generic=True), + 'scopeSiteGroup': RefInfo(object_type='dcim.sitegroup', field_name='scope', is_generic=True), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + 'type': RefInfo(object_type='virtualization.clustertype', field_name='type'), + }, + 'virtualization.clustergroup': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'virtualization.clustertype': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'virtualization.virtualdisk': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'virtualMachine': RefInfo(object_type='virtualization.virtualmachine', field_name='virtual_machine'), + }, + 'virtualization.virtualmachine': { + 'cluster': RefInfo(object_type='virtualization.cluster', field_name='cluster'), + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'platform': RefInfo(object_type='dcim.platform', field_name='platform'), + 'primaryIp4': RefInfo(object_type='ipam.ipaddress', field_name='primary_ip4'), + 'primaryIp6': RefInfo(object_type='ipam.ipaddress', field_name='primary_ip6'), + 'role': RefInfo(object_type='dcim.devicerole', field_name='role'), + 'site': RefInfo(object_type='dcim.site', field_name='site'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + }, + 'virtualization.vminterface': { + 'bridge': RefInfo(object_type='virtualization.vminterface', field_name='bridge'), + 'parent': RefInfo(object_type='virtualization.vminterface', field_name='parent'), + 'primaryMacAddress': RefInfo(object_type='dcim.macaddress', field_name='primary_mac_address'), + 'qinqSvlan': RefInfo(object_type='ipam.vlan', field_name='qinq_svlan'), + 'taggedVlans': RefInfo(object_type='ipam.vlan', field_name='tagged_vlans', is_many=True), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'untaggedVlan': RefInfo(object_type='ipam.vlan', field_name='untagged_vlan'), + 'virtualMachine': RefInfo(object_type='virtualization.virtualmachine', field_name='virtual_machine'), + 'vlanTranslationPolicy': RefInfo(object_type='ipam.vlantranslationpolicy', field_name='vlan_translation_policy'), + 'vrf': RefInfo(object_type='ipam.vrf', field_name='vrf'), + }, + 'vpn.ikepolicy': { + 'proposals': RefInfo(object_type='vpn.ikeproposal', field_name='proposals', is_many=True), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'vpn.ikeproposal': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'vpn.ipsecpolicy': { + 'proposals': RefInfo(object_type='vpn.ipsecproposal', field_name='proposals', is_many=True), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'vpn.ipsecprofile': { + 'ikePolicy': RefInfo(object_type='vpn.ikepolicy', field_name='ike_policy'), + 'ipsecPolicy': RefInfo(object_type='vpn.ipsecpolicy', field_name='ipsec_policy'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'vpn.ipsecproposal': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'vpn.l2vpn': { + 'exportTargets': RefInfo(object_type='ipam.routetarget', field_name='export_targets', is_many=True), + 'importTargets': RefInfo(object_type='ipam.routetarget', field_name='import_targets', is_many=True), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + }, + 'vpn.l2vpntermination': { + 'assignedObjectInterface': RefInfo(object_type='dcim.interface', field_name='assigned_object', is_generic=True), + 'assignedObjectVlan': RefInfo(object_type='ipam.vlan', field_name='assigned_object', is_generic=True), + 'assignedObjectVmInterface': RefInfo(object_type='virtualization.vminterface', field_name='assigned_object', is_generic=True), + 'l2Vpn': RefInfo(object_type='vpn.l2vpn', field_name='l2vpn'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'vpn.tunnel': { + 'group': RefInfo(object_type='vpn.tunnelgroup', field_name='group'), + 'ipsecProfile': RefInfo(object_type='vpn.ipsecprofile', field_name='ipsec_profile'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + }, + 'vpn.tunnelgroup': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'vpn.tunneltermination': { + 'outsideIp': RefInfo(object_type='ipam.ipaddress', field_name='outside_ip'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'terminationAsn': RefInfo(object_type='ipam.asn', field_name='termination', is_generic=True), + 'terminationAsnRange': RefInfo(object_type='ipam.asnrange', field_name='termination', is_generic=True), + 'terminationAggregate': RefInfo(object_type='ipam.aggregate', field_name='termination', is_generic=True), + 'terminationCable': RefInfo(object_type='dcim.cable', field_name='termination', is_generic=True), + 'terminationCablePath': RefInfo(object_type='dcim.cablepath', field_name='termination', is_generic=True), + 'terminationCableTermination': RefInfo(object_type='dcim.cabletermination', field_name='termination', is_generic=True), + 'terminationCircuit': RefInfo(object_type='circuits.circuit', field_name='termination', is_generic=True), + 'terminationCircuitGroup': RefInfo(object_type='circuits.circuitgroup', field_name='termination', is_generic=True), + 'terminationCircuitGroupAssignment': RefInfo(object_type='circuits.circuitgroupassignment', field_name='termination', is_generic=True), + 'terminationCircuitTermination': RefInfo(object_type='circuits.circuittermination', field_name='termination', is_generic=True), + 'terminationCircuitType': RefInfo(object_type='circuits.circuittype', field_name='termination', is_generic=True), + 'terminationCluster': RefInfo(object_type='virtualization.cluster', field_name='termination', is_generic=True), + 'terminationClusterGroup': RefInfo(object_type='virtualization.clustergroup', field_name='termination', is_generic=True), + 'terminationClusterType': RefInfo(object_type='virtualization.clustertype', field_name='termination', is_generic=True), + 'terminationConsolePort': RefInfo(object_type='dcim.consoleport', field_name='termination', is_generic=True), + 'terminationConsoleServerPort': RefInfo(object_type='dcim.consoleserverport', field_name='termination', is_generic=True), + 'terminationContact': RefInfo(object_type='tenancy.contact', field_name='termination', is_generic=True), + 'terminationContactAssignment': RefInfo(object_type='tenancy.contactassignment', field_name='termination', is_generic=True), + 'terminationContactGroup': RefInfo(object_type='tenancy.contactgroup', field_name='termination', is_generic=True), + 'terminationContactRole': RefInfo(object_type='tenancy.contactrole', field_name='termination', is_generic=True), + 'terminationDevice': RefInfo(object_type='dcim.device', field_name='termination', is_generic=True), + 'terminationDeviceBay': RefInfo(object_type='dcim.devicebay', field_name='termination', is_generic=True), + 'terminationDeviceRole': RefInfo(object_type='dcim.devicerole', field_name='termination', is_generic=True), + 'terminationDeviceType': RefInfo(object_type='dcim.devicetype', field_name='termination', is_generic=True), + 'terminationFhrpGroup': RefInfo(object_type='ipam.fhrpgroup', field_name='termination', is_generic=True), + 'terminationFhrpGroupAssignment': RefInfo(object_type='ipam.fhrpgroupassignment', field_name='termination', is_generic=True), + 'terminationFrontPort': RefInfo(object_type='dcim.frontport', field_name='termination', is_generic=True), + 'terminationIkePolicy': RefInfo(object_type='vpn.ikepolicy', field_name='termination', is_generic=True), + 'terminationIkeProposal': RefInfo(object_type='vpn.ikeproposal', field_name='termination', is_generic=True), + 'terminationIpAddress': RefInfo(object_type='ipam.ipaddress', field_name='termination', is_generic=True), + 'terminationIpRange': RefInfo(object_type='ipam.iprange', field_name='termination', is_generic=True), + 'terminationIpSecPolicy': RefInfo(object_type='vpn.ipsecpolicy', field_name='termination', is_generic=True), + 'terminationIpSecProfile': RefInfo(object_type='vpn.ipsecprofile', field_name='termination', is_generic=True), + 'terminationIpSecProposal': RefInfo(object_type='vpn.ipsecproposal', field_name='termination', is_generic=True), + 'terminationInterface': RefInfo(object_type='dcim.interface', field_name='termination', is_generic=True), + 'terminationInventoryItem': RefInfo(object_type='dcim.inventoryitem', field_name='termination', is_generic=True), + 'terminationInventoryItemRole': RefInfo(object_type='dcim.inventoryitemrole', field_name='termination', is_generic=True), + 'terminationL2Vpn': RefInfo(object_type='vpn.l2vpn', field_name='termination', is_generic=True), + 'terminationL2VpnTermination': RefInfo(object_type='vpn.l2vpntermination', field_name='termination', is_generic=True), + 'terminationLocation': RefInfo(object_type='dcim.location', field_name='termination', is_generic=True), + 'terminationMacAddress': RefInfo(object_type='dcim.macaddress', field_name='termination', is_generic=True), + 'terminationManufacturer': RefInfo(object_type='dcim.manufacturer', field_name='termination', is_generic=True), + 'terminationModule': RefInfo(object_type='dcim.module', field_name='termination', is_generic=True), + 'terminationModuleBay': RefInfo(object_type='dcim.modulebay', field_name='termination', is_generic=True), + 'terminationModuleType': RefInfo(object_type='dcim.moduletype', field_name='termination', is_generic=True), + 'terminationPlatform': RefInfo(object_type='dcim.platform', field_name='termination', is_generic=True), + 'terminationPowerFeed': RefInfo(object_type='dcim.powerfeed', field_name='termination', is_generic=True), + 'terminationPowerOutlet': RefInfo(object_type='dcim.poweroutlet', field_name='termination', is_generic=True), + 'terminationPowerPanel': RefInfo(object_type='dcim.powerpanel', field_name='termination', is_generic=True), + 'terminationPowerPort': RefInfo(object_type='dcim.powerport', field_name='termination', is_generic=True), + 'terminationPrefix': RefInfo(object_type='ipam.prefix', field_name='termination', is_generic=True), + 'terminationProvider': RefInfo(object_type='circuits.provider', field_name='termination', is_generic=True), + 'terminationProviderAccount': RefInfo(object_type='circuits.provideraccount', field_name='termination', is_generic=True), + 'terminationProviderNetwork': RefInfo(object_type='circuits.providernetwork', field_name='termination', is_generic=True), + 'terminationRir': RefInfo(object_type='ipam.rir', field_name='termination', is_generic=True), + 'terminationRack': RefInfo(object_type='dcim.rack', field_name='termination', is_generic=True), + 'terminationRackReservation': RefInfo(object_type='dcim.rackreservation', field_name='termination', is_generic=True), + 'terminationRackRole': RefInfo(object_type='dcim.rackrole', field_name='termination', is_generic=True), + 'terminationRackType': RefInfo(object_type='dcim.racktype', field_name='termination', is_generic=True), + 'terminationRearPort': RefInfo(object_type='dcim.rearport', field_name='termination', is_generic=True), + 'terminationRegion': RefInfo(object_type='dcim.region', field_name='termination', is_generic=True), + 'terminationRole': RefInfo(object_type='ipam.role', field_name='termination', is_generic=True), + 'terminationRouteTarget': RefInfo(object_type='ipam.routetarget', field_name='termination', is_generic=True), + 'terminationService': RefInfo(object_type='ipam.service', field_name='termination', is_generic=True), + 'terminationSite': RefInfo(object_type='dcim.site', field_name='termination', is_generic=True), + 'terminationSiteGroup': RefInfo(object_type='dcim.sitegroup', field_name='termination', is_generic=True), + 'terminationTag': RefInfo(object_type='extras.tag', field_name='termination', is_generic=True), + 'terminationTenant': RefInfo(object_type='tenancy.tenant', field_name='termination', is_generic=True), + 'terminationTenantGroup': RefInfo(object_type='tenancy.tenantgroup', field_name='termination', is_generic=True), + 'terminationTunnel': RefInfo(object_type='vpn.tunnel', field_name='termination', is_generic=True), + 'terminationTunnelGroup': RefInfo(object_type='vpn.tunnelgroup', field_name='termination', is_generic=True), + 'terminationTunnelTermination': RefInfo(object_type='vpn.tunneltermination', field_name='termination', is_generic=True), + 'terminationVlan': RefInfo(object_type='ipam.vlan', field_name='termination', is_generic=True), + 'terminationVlanGroup': RefInfo(object_type='ipam.vlangroup', field_name='termination', is_generic=True), + 'terminationVlanTranslationPolicy': RefInfo(object_type='ipam.vlantranslationpolicy', field_name='termination', is_generic=True), + 'terminationVlanTranslationRule': RefInfo(object_type='ipam.vlantranslationrule', field_name='termination', is_generic=True), + 'terminationVmInterface': RefInfo(object_type='virtualization.vminterface', field_name='termination', is_generic=True), + 'terminationVrf': RefInfo(object_type='ipam.vrf', field_name='termination', is_generic=True), + 'terminationVirtualChassis': RefInfo(object_type='dcim.virtualchassis', field_name='termination', is_generic=True), + 'terminationVirtualCircuit': RefInfo(object_type='circuits.virtualcircuit', field_name='termination', is_generic=True), + 'terminationVirtualCircuitTermination': RefInfo(object_type='circuits.virtualcircuittermination', field_name='termination', is_generic=True), + 'terminationVirtualCircuitType': RefInfo(object_type='circuits.virtualcircuittype', field_name='termination', is_generic=True), + 'terminationVirtualDeviceContext': RefInfo(object_type='dcim.virtualdevicecontext', field_name='termination', is_generic=True), + 'terminationVirtualDisk': RefInfo(object_type='virtualization.virtualdisk', field_name='termination', is_generic=True), + 'terminationVirtualMachine': RefInfo(object_type='virtualization.virtualmachine', field_name='termination', is_generic=True), + 'terminationWirelessLan': RefInfo(object_type='wireless.wirelesslan', field_name='termination', is_generic=True), + 'terminationWirelessLanGroup': RefInfo(object_type='wireless.wirelesslangroup', field_name='termination', is_generic=True), + 'terminationWirelessLink': RefInfo(object_type='wireless.wirelesslink', field_name='termination', is_generic=True), + 'tunnel': RefInfo(object_type='vpn.tunnel', field_name='tunnel'), + }, + 'wireless.wirelesslan': { + 'group': RefInfo(object_type='wireless.wirelesslangroup', field_name='group'), + 'scopeLocation': RefInfo(object_type='dcim.location', field_name='scope', is_generic=True), + 'scopeRegion': RefInfo(object_type='dcim.region', field_name='scope', is_generic=True), + 'scopeSite': RefInfo(object_type='dcim.site', field_name='scope', is_generic=True), + 'scopeSiteGroup': RefInfo(object_type='dcim.sitegroup', field_name='scope', is_generic=True), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + 'vlan': RefInfo(object_type='ipam.vlan', field_name='vlan'), + }, + 'wireless.wirelesslangroup': { + 'parent': RefInfo(object_type='wireless.wirelesslangroup', field_name='parent'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'wireless.wirelesslink': { + 'interfaceA': RefInfo(object_type='dcim.interface', field_name='interface_a'), + 'interfaceB': RefInfo(object_type='dcim.interface', field_name='interface_b'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + }, +} + +def get_json_ref_info(object_type: str|Type[models.Model], json_field_name: str) -> RefInfo|None: + if not isinstance(object_type, str): + object_type = get_object_type(object_type) + return _JSON_REF_INFO.get(object_type, {}).get(json_field_name) + +_LEGAL_FIELDS = { + 'circuits.circuit': frozenset(['assignments', 'cid', 'comments', 'commit_rate', 'custom_fields', 'description', 'distance', 'distance_unit', 'install_date', 'provider', 'provider_account', 'status', 'tags', 'tenant', 'termination_date', 'type']), + 'circuits.circuitgroup': frozenset(['custom_fields', 'description', 'name', 'slug', 'tags', 'tenant']), + 'circuits.circuitgroupassignment': frozenset(['group', 'member_id', 'member_type', 'priority', 'tags']), + 'circuits.circuittermination': frozenset(['circuit', 'custom_fields', 'description', 'mark_connected', 'port_speed', 'pp_info', 'tags', 'term_side', 'termination_id', 'termination_type', 'upstream_speed', 'xconnect_id']), + 'circuits.circuittype': frozenset(['color', 'custom_fields', 'description', 'name', 'slug', 'tags']), + 'circuits.provider': frozenset(['accounts', 'asns', 'comments', 'custom_fields', 'description', 'name', 'slug', 'tags']), + 'circuits.provideraccount': frozenset(['account', 'comments', 'custom_fields', 'description', 'name', 'provider', 'tags']), + 'circuits.providernetwork': frozenset(['comments', 'custom_fields', 'description', 'name', 'provider', 'service_id', 'tags']), + 'circuits.virtualcircuit': frozenset(['cid', 'comments', 'custom_fields', 'description', 'provider_account', 'provider_network', 'status', 'tags', 'tenant', 'type']), + 'circuits.virtualcircuittermination': frozenset(['custom_fields', 'description', 'interface', 'role', 'tags', 'virtual_circuit']), + 'circuits.virtualcircuittype': frozenset(['color', 'custom_fields', 'description', 'name', 'slug', 'tags']), + 'dcim.cable': frozenset(['a_terminations', 'b_terminations', 'color', 'comments', 'custom_fields', 'description', 'label', 'length', 'length_unit', 'status', 'tags', 'tenant', 'type']), + 'dcim.cablepath': frozenset(['is_active', 'is_complete', 'is_split']), + 'dcim.cabletermination': frozenset(['cable', 'cable_end', 'termination_id', 'termination_type']), + 'dcim.consoleport': frozenset(['custom_fields', 'description', 'device', 'label', 'mark_connected', 'module', 'name', 'speed', 'tags', 'type']), + 'dcim.consoleserverport': frozenset(['custom_fields', 'description', 'device', 'label', 'mark_connected', 'module', 'name', 'speed', 'tags', 'type']), + 'dcim.device': frozenset(['airflow', 'asset_tag', 'cluster', 'comments', 'custom_fields', 'description', 'device_type', 'face', 'latitude', 'location', 'longitude', 'name', 'oob_ip', 'platform', 'position', 'primary_ip4', 'primary_ip6', 'rack', 'role', 'serial', 'site', 'status', 'tags', 'tenant', 'vc_position', 'vc_priority', 'virtual_chassis']), + 'dcim.devicebay': frozenset(['custom_fields', 'description', 'device', 'installed_device', 'label', 'name', 'tags']), + 'dcim.devicerole': frozenset(['color', 'custom_fields', 'description', 'name', 'slug', 'tags', 'vm_role']), + 'dcim.devicetype': frozenset(['airflow', 'comments', 'custom_fields', 'default_platform', 'description', 'exclude_from_utilization', 'is_full_depth', 'manufacturer', 'model', 'part_number', 'slug', 'subdevice_role', 'tags', 'u_height', 'weight', 'weight_unit']), + 'dcim.frontport': frozenset(['color', 'custom_fields', 'description', 'device', 'label', 'mark_connected', 'module', 'name', 'rear_port', 'rear_port_position', 'tags', 'type']), + 'dcim.interface': frozenset(['bridge', 'custom_fields', 'description', 'device', 'duplex', 'enabled', 'label', 'lag', 'mark_connected', 'mgmt_only', 'mode', 'module', 'mtu', 'name', 'parent', 'poe_mode', 'poe_type', 'primary_mac_address', 'qinq_svlan', 'rf_channel', 'rf_channel_frequency', 'rf_channel_width', 'rf_role', 'speed', 'tagged_vlans', 'tags', 'tx_power', 'type', 'untagged_vlan', 'vdcs', 'vlan_translation_policy', 'vrf', 'wireless_lans', 'wwn']), + 'dcim.inventoryitem': frozenset(['asset_tag', 'component_id', 'component_type', 'custom_fields', 'description', 'device', 'discovered', 'label', 'manufacturer', 'name', 'parent', 'part_id', 'role', 'serial', 'status', 'tags']), + 'dcim.inventoryitemrole': frozenset(['color', 'custom_fields', 'description', 'name', 'slug', 'tags']), + 'dcim.location': frozenset(['custom_fields', 'description', 'facility', 'name', 'parent', 'site', 'slug', 'status', 'tags', 'tenant']), + 'dcim.macaddress': frozenset(['assigned_object_id', 'assigned_object_type', 'comments', 'custom_fields', 'description', 'mac_address', 'tags']), + 'dcim.manufacturer': frozenset(['custom_fields', 'description', 'name', 'slug', 'tags']), + 'dcim.module': frozenset(['asset_tag', 'comments', 'custom_fields', 'description', 'device', 'module_bay', 'module_type', 'serial', 'status', 'tags']), + 'dcim.modulebay': frozenset(['custom_fields', 'description', 'device', 'installed_module', 'label', 'module', 'name', 'position', 'tags']), + 'dcim.moduletype': frozenset(['airflow', 'comments', 'custom_fields', 'description', 'manufacturer', 'model', 'part_number', 'tags', 'weight', 'weight_unit']), + 'dcim.platform': frozenset(['custom_fields', 'description', 'manufacturer', 'name', 'slug', 'tags']), + 'dcim.powerfeed': frozenset(['amperage', 'comments', 'custom_fields', 'description', 'mark_connected', 'max_utilization', 'name', 'phase', 'power_panel', 'rack', 'status', 'supply', 'tags', 'tenant', 'type', 'voltage']), + 'dcim.poweroutlet': frozenset(['color', 'custom_fields', 'description', 'device', 'feed_leg', 'label', 'mark_connected', 'module', 'name', 'power_port', 'tags', 'type']), + 'dcim.powerpanel': frozenset(['comments', 'custom_fields', 'description', 'location', 'name', 'site', 'tags']), + 'dcim.powerport': frozenset(['allocated_draw', 'custom_fields', 'description', 'device', 'label', 'mark_connected', 'maximum_draw', 'module', 'name', 'tags', 'type']), + 'dcim.rack': frozenset(['airflow', 'asset_tag', 'comments', 'custom_fields', 'desc_units', 'description', 'facility_id', 'form_factor', 'location', 'max_weight', 'mounting_depth', 'name', 'outer_depth', 'outer_unit', 'outer_width', 'rack_type', 'role', 'serial', 'site', 'starting_unit', 'status', 'tags', 'tenant', 'u_height', 'weight', 'weight_unit', 'width']), + 'dcim.rackreservation': frozenset(['comments', 'custom_fields', 'description', 'rack', 'tags', 'tenant', 'units']), + 'dcim.rackrole': frozenset(['color', 'custom_fields', 'description', 'name', 'slug', 'tags']), + 'dcim.racktype': frozenset(['comments', 'custom_fields', 'desc_units', 'description', 'form_factor', 'manufacturer', 'max_weight', 'model', 'mounting_depth', 'outer_depth', 'outer_unit', 'outer_width', 'slug', 'starting_unit', 'tags', 'u_height', 'weight', 'weight_unit', 'width']), + 'dcim.rearport': frozenset(['color', 'custom_fields', 'description', 'device', 'label', 'mark_connected', 'module', 'name', 'positions', 'tags', 'type']), + 'dcim.region': frozenset(['custom_fields', 'description', 'name', 'parent', 'slug', 'tags']), + 'dcim.site': frozenset(['asns', 'comments', 'custom_fields', 'description', 'facility', 'group', 'latitude', 'longitude', 'name', 'physical_address', 'region', 'shipping_address', 'slug', 'status', 'tags', 'tenant', 'time_zone']), + 'dcim.sitegroup': frozenset(['custom_fields', 'description', 'name', 'parent', 'slug', 'tags']), + 'dcim.virtualchassis': frozenset(['comments', 'custom_fields', 'description', 'domain', 'master', 'name', 'tags']), + 'dcim.virtualdevicecontext': frozenset(['comments', 'custom_fields', 'description', 'device', 'identifier', 'name', 'primary_ip4', 'primary_ip6', 'status', 'tags', 'tenant']), + 'extras.tag': frozenset(['color', 'name', 'slug']), + 'ipam.aggregate': frozenset(['comments', 'custom_fields', 'date_added', 'description', 'prefix', 'rir', 'tags', 'tenant']), + 'ipam.asn': frozenset(['asn', 'comments', 'custom_fields', 'description', 'rir', 'tags', 'tenant']), + 'ipam.asnrange': frozenset(['custom_fields', 'description', 'end', 'name', 'rir', 'slug', 'start', 'tags', 'tenant']), + 'ipam.fhrpgroup': frozenset(['auth_key', 'auth_type', 'comments', 'custom_fields', 'description', 'group_id', 'name', 'protocol', 'tags']), + 'ipam.fhrpgroupassignment': frozenset(['group', 'interface_id', 'interface_type', 'priority']), + 'ipam.ipaddress': frozenset(['address', 'assigned_object_id', 'assigned_object_type', 'comments', 'custom_fields', 'description', 'dns_name', 'nat_inside', 'role', 'status', 'tags', 'tenant', 'vrf']), + 'ipam.iprange': frozenset(['comments', 'custom_fields', 'description', 'end_address', 'mark_utilized', 'role', 'start_address', 'status', 'tags', 'tenant', 'vrf']), + 'ipam.prefix': frozenset(['comments', 'custom_fields', 'description', 'is_pool', 'mark_utilized', 'prefix', 'role', 'scope_id', 'scope_type', 'status', 'tags', 'tenant', 'vlan', 'vrf']), + 'ipam.rir': frozenset(['custom_fields', 'description', 'is_private', 'name', 'slug', 'tags']), + 'ipam.role': frozenset(['custom_fields', 'description', 'name', 'slug', 'tags', 'weight']), + 'ipam.routetarget': frozenset(['comments', 'custom_fields', 'description', 'name', 'tags', 'tenant']), + 'ipam.service': frozenset(['comments', 'custom_fields', 'description', 'device', 'ipaddresses', 'name', 'ports', 'protocol', 'tags', 'virtual_machine']), + 'ipam.vlan': frozenset(['comments', 'custom_fields', 'description', 'group', 'name', 'qinq_role', 'qinq_svlan', 'role', 'site', 'status', 'tags', 'tenant', 'vid']), + 'ipam.vlangroup': frozenset(['custom_fields', 'description', 'name', 'scope_id', 'scope_type', 'slug', 'tags', 'vid_ranges']), + 'ipam.vlantranslationpolicy': frozenset(['description', 'name']), + 'ipam.vlantranslationrule': frozenset(['description', 'local_vid', 'policy', 'remote_vid']), + 'ipam.vrf': frozenset(['comments', 'custom_fields', 'description', 'enforce_unique', 'export_targets', 'import_targets', 'name', 'rd', 'tags', 'tenant']), + 'tenancy.contact': frozenset(['address', 'comments', 'custom_fields', 'description', 'email', 'group', 'link', 'name', 'phone', 'tags', 'title']), + 'tenancy.contactassignment': frozenset(['contact', 'custom_fields', 'object_id', 'object_type', 'priority', 'role', 'tags']), + 'tenancy.contactgroup': frozenset(['custom_fields', 'description', 'name', 'parent', 'slug', 'tags']), + 'tenancy.contactrole': frozenset(['custom_fields', 'description', 'name', 'slug', 'tags']), + 'tenancy.tenant': frozenset(['comments', 'custom_fields', 'description', 'group', 'name', 'slug', 'tags']), + 'tenancy.tenantgroup': frozenset(['custom_fields', 'description', 'name', 'parent', 'slug', 'tags']), + 'virtualization.cluster': frozenset(['comments', 'custom_fields', 'description', 'group', 'name', 'scope_id', 'scope_type', 'status', 'tags', 'tenant', 'type']), + 'virtualization.clustergroup': frozenset(['custom_fields', 'description', 'name', 'slug', 'tags']), + 'virtualization.clustertype': frozenset(['custom_fields', 'description', 'name', 'slug', 'tags']), + 'virtualization.virtualdisk': frozenset(['custom_fields', 'description', 'name', 'size', 'tags', 'virtual_machine']), + 'virtualization.virtualmachine': frozenset(['cluster', 'comments', 'custom_fields', 'description', 'device', 'disk', 'memory', 'name', 'platform', 'primary_ip4', 'primary_ip6', 'role', 'serial', 'site', 'status', 'tags', 'tenant', 'vcpus']), + 'virtualization.vminterface': frozenset(['bridge', 'custom_fields', 'description', 'enabled', 'mode', 'mtu', 'name', 'parent', 'primary_mac_address', 'qinq_svlan', 'tagged_vlans', 'tags', 'untagged_vlan', 'virtual_machine', 'vlan_translation_policy', 'vrf']), + 'vpn.ikepolicy': frozenset(['comments', 'custom_fields', 'description', 'mode', 'name', 'preshared_key', 'proposals', 'tags', 'version']), + 'vpn.ikeproposal': frozenset(['authentication_algorithm', 'authentication_method', 'comments', 'custom_fields', 'description', 'encryption_algorithm', 'group', 'name', 'sa_lifetime', 'tags']), + 'vpn.ipsecpolicy': frozenset(['comments', 'custom_fields', 'description', 'name', 'pfs_group', 'proposals', 'tags']), + 'vpn.ipsecprofile': frozenset(['comments', 'custom_fields', 'description', 'ike_policy', 'ipsec_policy', 'mode', 'name', 'tags']), + 'vpn.ipsecproposal': frozenset(['authentication_algorithm', 'comments', 'custom_fields', 'description', 'encryption_algorithm', 'name', 'sa_lifetime_data', 'sa_lifetime_seconds', 'tags']), + 'vpn.l2vpn': frozenset(['comments', 'custom_fields', 'description', 'export_targets', 'identifier', 'import_targets', 'name', 'slug', 'tags', 'tenant', 'type']), + 'vpn.l2vpntermination': frozenset(['assigned_object_id', 'assigned_object_type', 'custom_fields', 'l2vpn', 'tags']), + 'vpn.tunnel': frozenset(['comments', 'custom_fields', 'description', 'encapsulation', 'group', 'ipsec_profile', 'name', 'status', 'tags', 'tenant', 'tunnel_id']), + 'vpn.tunnelgroup': frozenset(['custom_fields', 'description', 'name', 'slug', 'tags']), + 'vpn.tunneltermination': frozenset(['custom_fields', 'outside_ip', 'role', 'tags', 'termination_id', 'termination_type', 'tunnel']), + 'wireless.wirelesslan': frozenset(['auth_cipher', 'auth_psk', 'auth_type', 'comments', 'custom_fields', 'description', 'group', 'scope_id', 'scope_type', 'ssid', 'status', 'tags', 'tenant', 'vlan']), + 'wireless.wirelesslangroup': frozenset(['custom_fields', 'description', 'name', 'parent', 'slug', 'tags']), + 'wireless.wirelesslink': frozenset(['auth_cipher', 'auth_psk', 'auth_type', 'comments', 'custom_fields', 'description', 'distance', 'distance_unit', 'interface_a', 'interface_b', 'ssid', 'status', 'tags', 'tenant']), +} + +def legal_fields(object_type: str|Type[models.Model]) -> frozenset[str]: + if not isinstance(object_type, str): + object_type = get_object_type(object_type) + return _LEGAL_FIELDS.get(object_type, frozenset()) + +_OBJECT_TYPE_PRIMARY_VALUE_FIELD_MAP = { + 'ipam.asn': 'asn', + 'dcim.devicetype': 'model', + 'circuits.circuit': 'cid', + 'ipam.ipaddress': 'address', + 'dcim.macaddress': 'mac_address', + 'dcim.moduletype': 'model', + 'ipam.prefix': 'prefix', + 'dcim.racktype': 'model', + 'circuits.virtualcircuit': 'cid', + 'wireless.wirelesslan': 'ssid', +} + +def get_primary_value(data: dict, object_type: str) -> str|None: + field = _OBJECT_TYPE_PRIMARY_VALUE_FIELD_MAP.get(object_type, 'name') + return data.get(field) \ No newline at end of file diff --git a/netbox_diode_plugin/api/serializers.py b/netbox_diode_plugin/api/serializers.py index 838f8d3..60e2860 100644 --- a/netbox_diode_plugin/api/serializers.py +++ b/netbox_diode_plugin/api/serializers.py @@ -2,131 +2,10 @@ # Copyright 2024 NetBox Labs Inc """Diode NetBox Plugin - Serializers.""" -import logging - -from dcim.api.serializers import ( - DeviceRoleSerializer, - DeviceSerializer, - DeviceTypeSerializer, - InterfaceSerializer, - ManufacturerSerializer, - PlatformSerializer, - SiteSerializer, -) -from django.conf import settings from netbox.api.serializers import NetBoxModelSerializer -from packaging import version from netbox_diode_plugin.models import Setting -if version.parse(version.parse(settings.VERSION).base_version) >= version.parse("4.1"): - from core.models import ObjectChange -else: - from extras.models import ObjectChange -from ipam.api.serializers import IPAddressSerializer, PrefixSerializer -from rest_framework import serializers -from utilities.api import get_serializer_for_model -from virtualization.api.serializers import ( - ClusterGroupSerializer, - ClusterSerializer, - ClusterTypeSerializer, - VirtualDiskSerializer, - VirtualMachineSerializer, - VMInterfaceSerializer, -) - -logger = logging.getLogger("netbox.netbox_diode_plugin.api.serializers") - - -def dynamic_import(name): - """Dynamically import a class from an absolute path string.""" - components = name.split(".") - mod = __import__(components[0]) - for comp in components[1:]: - mod = getattr(mod, comp) - return mod - - -def get_diode_serializer(instance): - """Get the Diode serializer based on instance model.""" - serializer = get_serializer_for_model(instance) - - serializer_name = f"netbox_diode_plugin.api.serializers.Diode{serializer.__name__}" - - try: - serializer = dynamic_import(serializer_name) - except AttributeError: - logger.warning(f"Could not find serializer for {serializer_name}") - pass - - return serializer - - -class ObjectStateSerializer(serializers.Serializer): - """Object State Serializer.""" - - object_type = serializers.SerializerMethodField(read_only=True) - object_change_id = serializers.SerializerMethodField(read_only=True) - object = serializers.SerializerMethodField(read_only=True) - - def get_object_type(self, instance): - """ - Get the object type from context sent from view. - - Return a string with the format "app.model". - """ - return self.context.get("object_type") - - def get_object_change_id(self, instance): - """ - Get the object changed based on instance ID. - - Return the ID of last change. - """ - object_changed = ( - ObjectChange.objects.filter(changed_object_id=instance.id) - .order_by("-id") - .values_list("id", flat=True) - ) - return object_changed[0] if len(object_changed) > 0 else None - - def get_object(self, instance): - """ - Get the serializer based on instance model. - - Get the data from the model according to its ID. - Return the object according to serializer defined in the NetBox. - """ - serializer = get_diode_serializer(instance) - - object_data = instance.__class__.objects.filter(id=instance.id) - - context = {"request": self.context.get("request")} - - data = serializer(object_data, context=context, many=True).data[0] - - return data - - -class ChangeSerialiazer(serializers.Serializer): - """ChangeSet Serializer.""" - - change_id = serializers.UUIDField(required=True) - change_type = serializers.CharField(required=True) - object_version = serializers.IntegerField(required=False, allow_null=True) - object_type = serializers.CharField(required=True) - object_id = serializers.IntegerField(required=False, allow_null=True) - data = serializers.DictField(required=True) - - -class ApplyChangeSetRequestSerializer(serializers.Serializer): - """ApplyChangeSet request Serializer.""" - - change_set_id = serializers.UUIDField(required=True) - change_set = serializers.ListField( - child=ChangeSerialiazer(), required=True, allow_empty=False - ) - class SettingSerializer(NetBoxModelSerializer): """Setting Serializer.""" @@ -142,250 +21,3 @@ class Meta: "created", "last_updated", ) - - -class DiodeIPAddressSerializer(IPAddressSerializer): - """Diode IP Address Serializer.""" - - class Meta: - """Meta class.""" - - model = IPAddressSerializer.Meta.model - fields = IPAddressSerializer.Meta.fields - - def get_assigned_object(self, obj): - """Get the assigned object based on the instance model.""" - if obj.assigned_object is None: - return None - - serializer = get_diode_serializer(obj.assigned_object) - - context = {"request": self.context["request"]} - assigned_object = serializer(obj.assigned_object, context=context).data - - if assigned_object.get("device"): - device_serializer = get_diode_serializer(obj.assigned_object.device) - device = device_serializer(obj.assigned_object.device, context=context).data - assigned_object["device"] = device - - if serializer.__name__.endswith("InterfaceSerializer"): - assigned_object = {"interface": assigned_object} - - return assigned_object - - -class DiodeSiteSerializer(SiteSerializer): - """Diode Site Serializer.""" - - status = serializers.CharField() - - class Meta: - """Meta class.""" - - model = SiteSerializer.Meta.model - fields = SiteSerializer.Meta.fields - - -class DiodeDeviceRoleSerializer(DeviceRoleSerializer): - """Diode Device Role Serializer.""" - - class Meta: - """Meta class.""" - - model = DeviceRoleSerializer.Meta.model - fields = DeviceRoleSerializer.Meta.fields - - -class DiodeManufacturerSerializer(ManufacturerSerializer): - """Diode Manufacturer Serializer.""" - - class Meta: - """Meta class.""" - - model = ManufacturerSerializer.Meta.model - fields = ManufacturerSerializer.Meta.fields - - -class DiodePlatformSerializer(PlatformSerializer): - """Diode Platform Serializer.""" - - manufacturer = DiodeManufacturerSerializer(required=False, allow_null=True) - - class Meta: - """Meta class.""" - - model = PlatformSerializer.Meta.model - fields = PlatformSerializer.Meta.fields - - -class DiodeDeviceTypeSerializer(DeviceTypeSerializer): - """Diode Device Type Serializer.""" - - default_platform = DiodePlatformSerializer(required=False, allow_null=True) - manufacturer = DiodeManufacturerSerializer(required=False, allow_null=True) - - class Meta: - """Meta class.""" - - model = DeviceTypeSerializer.Meta.model - fields = DeviceTypeSerializer.Meta.fields - - -class DiodeDeviceSerializer(DeviceSerializer): - """Diode Device Serializer.""" - - site = DiodeSiteSerializer() - device_type = DiodeDeviceTypeSerializer() - role = DiodeDeviceRoleSerializer() - platform = DiodePlatformSerializer(required=False, allow_null=True) - status = serializers.CharField() - - class Meta: - """Meta class.""" - - model = DeviceSerializer.Meta.model - fields = DeviceSerializer.Meta.fields - - -class DiodeNestedInterfaceSerializer(InterfaceSerializer): - """Diode Nested Interface Serializer.""" - - class Meta: - """Meta class.""" - - model = InterfaceSerializer.Meta.model - fields = InterfaceSerializer.Meta.fields - - -class DiodeInterfaceSerializer(InterfaceSerializer): - """Diode Interface Serializer.""" - - device = DiodeDeviceSerializer() - parent = DiodeNestedInterfaceSerializer() - type = serializers.CharField() - mode = serializers.CharField() - - class Meta: - """Meta class.""" - - model = InterfaceSerializer.Meta.model - fields = InterfaceSerializer.Meta.fields - - -class DiodePrefixSerializer(PrefixSerializer): - """Diode Prefix Serializer.""" - - status = serializers.CharField() - site = serializers.SerializerMethodField(read_only=True) - - class Meta: - """Meta class.""" - - model = PrefixSerializer.Meta.model - fields = PrefixSerializer.Meta.fields + ["site"] - - def get_site(self, obj): - """Get the site from the instance scope.""" - if obj.scope is None: - return None - - scope_model_meta = obj.scope_type.model_class()._meta - if scope_model_meta.app_label == "dcim" and scope_model_meta.model_name == "site": - serializer = get_serializer_for_model(obj.scope) - context = {'request': self.context['request']} - return serializer(obj.scope, nested=True, context=context).data - - return None - - -class DiodeClusterGroupSerializer(ClusterGroupSerializer): - """Diode Cluster Group Serializer.""" - - class Meta: - """Meta class.""" - - model = ClusterGroupSerializer.Meta.model - fields = ClusterGroupSerializer.Meta.fields - - -class DiodeClusterTypeSerializer(ClusterTypeSerializer): - """Diode Cluster Type Serializer.""" - - class Meta: - """Meta class.""" - - model = ClusterTypeSerializer.Meta.model - fields = ClusterTypeSerializer.Meta.fields - - -class DiodeClusterSerializer(ClusterSerializer): - """Diode Cluster Serializer.""" - - type = DiodeClusterTypeSerializer() - group = DiodeClusterGroupSerializer() - status = serializers.CharField() - site = serializers.SerializerMethodField(read_only=True) - - class Meta: - """Meta class.""" - - model = ClusterSerializer.Meta.model - fields = ClusterSerializer.Meta.fields + ["site"] - - def get_site(self, obj): - """Get the site from the instance scope.""" - if obj.scope is None: - return None - - scope_model_meta = obj.scope_type.model_class()._meta - if scope_model_meta.app_label == "dcim" and scope_model_meta.model_name == "site": - serializer = get_serializer_for_model(obj.scope) - context = {'request': self.context['request']} - return serializer(obj.scope, nested=True, context=context).data - - return None - - -class DiodeVirtualMachineSerializer(VirtualMachineSerializer): - """Diode Virtual Machine Serializer.""" - - status = serializers.CharField() - site = DiodeSiteSerializer() - cluster = DiodeClusterSerializer() - device = DiodeDeviceSerializer() - role = DiodeDeviceRoleSerializer() - tenant = serializers.CharField() - platform = DiodePlatformSerializer() - primary_ip = DiodeIPAddressSerializer() - primary_ip4 = DiodeIPAddressSerializer() - primary_ip6 = DiodeIPAddressSerializer() - - class Meta: - """Meta class.""" - - model = VirtualMachineSerializer.Meta.model - fields = VirtualMachineSerializer.Meta.fields - - -class DiodeVirtualDiskSerializer(VirtualDiskSerializer): - """Diode Virtual Disk Serializer.""" - - virtual_machine = DiodeVirtualMachineSerializer() - - class Meta: - """Meta class.""" - - model = VirtualDiskSerializer.Meta.model - fields = VirtualDiskSerializer.Meta.fields - - -class DiodeVMInterfaceSerializer(VMInterfaceSerializer): - """Diode VM Interface Serializer.""" - - virtual_machine = DiodeVirtualMachineSerializer() - - class Meta: - """Meta class.""" - - model = VMInterfaceSerializer.Meta.model - fields = VMInterfaceSerializer.Meta.fields diff --git a/netbox_diode_plugin/api/supported_models.py b/netbox_diode_plugin/api/supported_models.py new file mode 100644 index 0000000..3ec47ce --- /dev/null +++ b/netbox_diode_plugin/api/supported_models.py @@ -0,0 +1,292 @@ +#!/usr/bin/env python +# Copyright 2025 NetBox Labs Inc +"""NetBox Diode Data - API supported models.""" + +import importlib +import logging +import time +from functools import lru_cache +from typing import List, Type + +from django.apps import apps +from django.db import models +from django.db.models import ManyToOneRel +from django.db.models.fields import NOT_PROVIDED +from rest_framework import serializers +from utilities.api import get_serializer_for_model as netbox_get_serializer_for_model + +logger = logging.getLogger(__name__) + +# Supported apps +SUPPORTED_APPS = [ + "circuits", + "dcim", + "extras", + "ipam", + "virtualization", + "vpn", + "wireless", + "tenancy", +] + +# Models that are not supported +EXCLUDED_MODELS = [ + "TaggedItem", + "Subscription", + "ScriptModule", + "Dashboard", + "Notification", +] + + +def extract_supported_models() -> dict[str, dict]: + """Extract supported models from NetBox.""" + supported_models = discover_models(SUPPORTED_APPS) + + logger.debug(f"Supported models: {supported_models}") + + models_to_process = supported_models + extracted_models: dict[str, dict] = {} + + start_ts = time.time() + while models_to_process: + model = models_to_process.pop() + try: + fields, related_models = get_model_fields(model) + if not fields: + continue + + prerequisites = get_prerequisites(model, fields) + object_type = f"{model._meta.app_label}.{model._meta.model_name}" + extracted_models[object_type] = { + "fields": fields, + "prerequisites": prerequisites, + } + for related_model in related_models: + related_object_type = f"{related_model._meta.app_label}.{related_model._meta.model_name}" + if ( + related_object_type not in extracted_models + and related_object_type not in models_to_process + ): + models_to_process.append(related_model) + except Exception as e: + logger.error(f"extract_supported_models: {model.__name__} error: {e}") + + finish_ts = time.time() + lapsed_millis = (finish_ts - start_ts) * 1000 + logger.info( + f"done extracting supported models in {lapsed_millis:.2f} milliseconds - extracted_models: {len(extracted_models)}" + ) + + return extracted_models + + +def get_prerequisites(model_class, fields) -> List[dict[str, str]]: + """Get the prerequisite models for the model.""" + prerequisites: List[dict[str, str]] = [] + prerequisite_models = getattr(model_class, "prerequisite_models", []) + + for prereq in prerequisite_models: + prereq_model = apps.get_model(prereq) + + for field_name, field_info in fields.items(): + related_model = field_info.get("related_model") + prerequisite_info = { + "field_name": field_name, + "prerequisite_model": prereq_model, + } + if ( + prerequisite_info not in prerequisites + and related_model + and related_model.get("model_class_name") == prereq_model.__name__ + ): + prerequisites.append(prerequisite_info) + break + + return prerequisites + + +@lru_cache(maxsize=128) +def get_model_fields(model_class) -> tuple[dict, list]: + """Get the fields for the model ordered as they are in the serializer.""" + related_models_to_process = [] + + # Skip unsupported apps and excluded models + if ( + model_class._meta.app_label not in SUPPORTED_APPS + or model_class.__name__ in EXCLUDED_MODELS + ): + return {}, [] + + try: + # Get serializer fields to maintain order + serializer_class = get_serializer_for_model(model_class) + serializer_fields = serializer_class().get_fields() + serializer_fields_names = list(serializer_fields.keys()) + except Exception as e: + logger.error(f"Error getting serializer fields for model {model_class}: {e}") + return {}, [] + + # Get all model fields + model_fields = { + field.name: field + for field in model_class._meta.get_fields() + if field.__class__.__name__ not in ["CounterCacheField", "GenericRelation"] + } + + # Reorder fields to match serializer order + ordered_fields = { + field_name: model_fields[field_name] + for field_name in serializer_fields_names + if field_name in model_fields + } + + # Add remaining fields + ordered_fields.update( + { + field_name: field + for field_name, field in model_fields.items() + if field_name not in ordered_fields + } + ) + + fields_info = {} + + for field_name, field in ordered_fields.items(): + field_info = { + "type": field.get_internal_type(), + "required": not field.null and not field.blank, + "is_many_to_one_rel": isinstance(field, ManyToOneRel), + "is_numeric": field.get_internal_type() + in [ + "IntegerField", + "FloatField", + "DecimalField", + "PositiveIntegerField", + "PositiveSmallIntegerField", + "SmallIntegerField", + "BigIntegerField", + ], + } + + # Handle default values + default_value = None + if hasattr(field, "default"): + default_value = ( + field.default if field.default not in (NOT_PROVIDED, dict) else None + ) + field_info["default"] = default_value + + # Handle related fields + if field.is_relation: + related_model = field.related_model + if related_model: + related_model_key = ( + f"{related_model._meta.app_label}.{related_model._meta.model_name}" + ) + related_model_info = { + "app_label": related_model._meta.app_label, + "model_name": related_model._meta.model_name, + "model_class_name": related_model.__name__, + "object_type": related_model_key, + "filters": get_field_filters(model_class, field_name), + } + field_info["related_model"] = related_model_info + if ( + related_model.__name__ not in EXCLUDED_MODELS + and related_model not in related_models_to_process + ): + related_models_to_process.append(related_model) + + fields_info[field_name] = field_info + + return fields_info, related_models_to_process + + +@lru_cache(maxsize=128) +def get_field_filters(model_class, field_name): + """Get filters for a field.""" + if hasattr(model_class, "_netbox_private"): + return None + + try: + filterset_name = f"{model_class.__name__}FilterSet" + filterset_module = importlib.import_module( + f"{model_class._meta.app_label}.filtersets" + ) + filterset_class = getattr(filterset_module, filterset_name) + + _filters = set() + field_filters = [] + for filter_name, filter_instance in filterset_class.get_filters().items(): + filter_by = getattr(filter_instance, "field_name", None) + filter_field_extra = getattr(filter_instance, "extra", None) + + if not filter_name.startswith(field_name) or filter_by.endswith("_id"): + continue + + if filter_by and filter_by not in _filters: + _filters.add(filter_by) + field_filters.append( + { + "filter_by": filter_by, + "filter_to_field_name": ( + filter_field_extra.get("to_field_name", None) + if filter_field_extra + else None + ), + } + ) + return list(field_filters) if field_filters else None + except Exception as e: + logger.error( + f"Error getting field filters for model {model_class.__name__} and field {field_name}: {e}" + ) + return None + + +@lru_cache(maxsize=128) +def get_serializer_for_model(model, prefix=""): + """Cached wrapper for NetBox's get_serializer_for_model function.""" + return netbox_get_serializer_for_model(model, prefix) + + +def discover_models(root_packages: List[str]) -> list[Type[models.Model]]: + """Discovers all model classes in specified root packages.""" + discovered_models = [] + + # Look through all modules that might contain serializers + module_names = [ + "api.serializers", + ] + + for root_package in root_packages: + logger.debug(f"Searching in root package: {root_package}") + + for module_name in module_names: + full_module_path = f"{root_package}.{module_name}" + try: + module = __import__(full_module_path, fromlist=["*"]) + except ImportError: + logger.error(f"Could not import {full_module_path}") + continue + + # Find all serializer classes in the module + for serializer_name in dir(module): + serializer = getattr(module, serializer_name) + if ( + isinstance(serializer, type) + and issubclass(serializer, serializers.Serializer) + and serializer != serializers.Serializer + and serializer != serializers.ModelSerializer + and hasattr(serializer, "Meta") + and hasattr(serializer.Meta, "model") + ): + model = serializer.Meta.model + if model not in discovered_models: + discovered_models.append(model) + logger.debug( + f"Discovered model: {model.__module__}.{model.__name__}" + ) + + return discovered_models diff --git a/netbox_diode_plugin/api/transformer.py b/netbox_diode_plugin/api/transformer.py new file mode 100644 index 0000000..12e3518 --- /dev/null +++ b/netbox_diode_plugin/api/transformer.py @@ -0,0 +1,396 @@ +#!/usr/bin/env python +# Copyright 2024 NetBox Labs Inc +"""Diode NetBox Plugin - API - Object resolution for diffing.""" + +import copy +import json +import logging +import re +from collections import defaultdict +from functools import lru_cache +from uuid import uuid4 + +import graphlib +from django.core.exceptions import ValidationError +from django.utils.text import slugify + +from .common import ChangeSetException, UnresolvedReference +from .matcher import find_existing_object, fingerprint +from .plugin_utils import get_json_ref_info, get_primary_value + +logger = logging.getLogger("netbox.diode_data") + +@lru_cache(maxsize=128) +def _camel_to_snake_case(name): + """Convert camelCase string to snake_case.""" + name = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", name) + return re.sub("([a-z0-9])([A-Z])", r"\1_\2", name).lower() + + +# these are implied values pushed down to referenced objects. +_NESTED_CONTEXT = { + "dcim.interface": { + # interface.primary_mac_address -> mac_address.assigned_object = interface + "primary_mac_address": lambda object_type, uuid: { + "assigned_object_type": object_type, + "assigned_object_id": UnresolvedReference(object_type=object_type, uuid=uuid), + }, + }, + "virtualization.vminterface": { + # interface.primary_mac_address -> mac_address.assigned_object = vinterface + "primary_mac_address": lambda object_type, uuid: { + "assigned_object_type": object_type, + "assigned_object_id": UnresolvedReference(object_type=object_type, uuid=uuid), + }, + }, +} + +def _no_context(object_type, uuid): + return None + +def _nested_context(object_type, uuid, field_name): + return _NESTED_CONTEXT.get(object_type, {}).get(field_name, _no_context)(object_type, uuid) + +_IS_CIRCULAR_REFERENCE = { + "dcim.interface": frozenset(["primary_mac_address"]), + "virtualization.vminterface": frozenset(["primary_mac_address"]), + "dcim.device": frozenset(["primary_ip4", "primary_ip6"]), + "dcim.virtualdevicecontext": frozenset(["primary_ip4", "primary_ip6"]), + "virtualization.virtualmachine": frozenset(["primary_ip4", "primary_ip6"]), +} + +def _is_circular_reference(object_type, field_name): + return field_name in _IS_CIRCULAR_REFERENCE.get(object_type, frozenset()) + +def transform_proto_json(proto_json: dict, object_type: str, supported_models: dict) -> list[dict]: + """ + Transform keys of proto json dict to flattened dictionaries with model field keys. + + This also handles placing `_type` fields for generic references, + a certain form of deduplication and resolution of existing objects. + """ + entities = _transform_proto_json_1(proto_json, object_type) + logger.error(f"_transform_proto_json_1 entities: {json.dumps(entities, default=lambda o: str(o), indent=4)}") + entities = _topo_sort(entities) + logger.error(f"_topo_sort: {json.dumps(entities, default=lambda o: str(o), indent=4)}") + deduplicated = _fingerprint_dedupe(entities) + logger.error(f"_fingerprint_dedupe: {json.dumps(deduplicated, default=lambda o: str(o), indent=4)}") + deduplicated = _topo_sort(deduplicated) + logger.error(f"_topo_sort: {json.dumps(deduplicated, default=lambda o: str(o), indent=4)}") + _set_slugs(deduplicated, supported_models) + logger.error(f"_set_slugs: {json.dumps(deduplicated, default=lambda o: str(o), indent=4)}") + resolved = _resolve_existing_references(deduplicated) + logger.error(f"_resolve_references: {json.dumps(resolved, default=lambda o: str(o), indent=4)}") + _set_defaults(resolved, supported_models) + logger.error(f"_set_defaults: {json.dumps(resolved, default=lambda o: str(o), indent=4)}") + + # handle post-create steps + output = _handle_post_creates(resolved) + logger.error(f"_handle_post_creates: {json.dumps(output, default=lambda o: str(o), indent=4)}") + + _check_unresolved_refs(output) + for entity in output: + entity.pop('_refs', None) + + return output + +def _transform_proto_json_1(proto_json: dict, object_type: str, context=None) -> list[dict]: # noqa: C901 + uuid = str(uuid4()) + node = { + "_object_type": object_type, + "_uuid": uuid, + "_refs": set(), + } + + # context pushed down from parent nodes + if context is not None: + for k, v in context.items(): + node[k] = v + if isinstance(v, UnresolvedReference): + node['_refs'].add(v.uuid) + + nodes = [node] + post_create = None + + for key, value in proto_json.items(): + ref_info = get_json_ref_info(object_type, key) + if ref_info is None: + node[_camel_to_snake_case(key)] = copy.deepcopy(value) + continue + + nested_context = _nested_context(object_type, uuid, ref_info.field_name) + field_name = ref_info.field_name + is_circular = _is_circular_reference(object_type, field_name) + + if ref_info.is_generic: + node[field_name + "_type"] = ref_info.object_type + field_name = field_name + "_id" + + refs = [] + ref_value = None + if isinstance(value, list): + ref_value = [] + for item in value: + nested = _transform_proto_json_1(item, ref_info.object_type, nested_context) + nodes += nested + ref_uuid = nested[0]['_uuid'] + ref_value.append(UnresolvedReference( + object_type=ref_info.object_type, + uuid=ref_uuid, + )) + refs.append(ref_uuid) + else: + nested = _transform_proto_json_1(value, ref_info.object_type, nested_context) + nodes += nested + ref_uuid = nested[0]['_uuid'] + ref_value = UnresolvedReference( + object_type=ref_info.object_type, + uuid=ref_uuid, + ) + refs.append(ref_uuid) + + if is_circular: + if post_create is None: + post_create = { + "_uuid": str(uuid4()), + "_object_type": object_type, + "_refs": set(), + "_instance": node['_uuid'], + "_is_post_create": True, + } + post_create[field_name] = ref_value + post_create['_refs'].update(refs) + post_create['_refs'].add(node['_uuid']) + continue + + node[field_name] = ref_value + node['_refs'].update(refs) + + if post_create: + nodes.append(post_create) + + return nodes + + +def _topo_sort(entities: list[dict]) -> list[dict]: + """Topologically sort entities by reference.""" + by_uuid = {e['_uuid']: e for e in entities} + graph = defaultdict(set) + for entity in entities: + graph[entity['_uuid']] = entity['_refs'].copy() + + try: + ts = graphlib.TopologicalSorter(graph) + order = tuple(ts.static_order()) + return [by_uuid[uuid] for uuid in order] + except graphlib.CycleError as e: + # TODO the cycle error references the cycle here ... + raise ChangeSetException(f"Circular reference in entities: {e}", errors={ + "__all__": { + "message": "Unable to resolve circular reference in entities", + } + }) + + +def _set_defaults(entities: list[dict], supported_models: dict): + for entity in entities: + model_fields = supported_models.get(entity['_object_type']) + if model_fields is None: + raise ValidationError(f"Model for object type {entity['_object_type']} is not supported") + + for field_name, field_info in model_fields.get('fields', {}).items(): + if entity.get(field_name) is None and field_info.get("default") is not None: + entity[field_name] = field_info["default"] + +def _set_slugs(entities: list[dict], supported_models: dict): + for entity in entities: + model_fields = supported_models.get(entity['_object_type']) + if model_fields is None: + raise ValidationError(f"Model for object type {entity['_object_type']} is not supported") + + for field_name, field_info in model_fields.get('fields', {}).items(): + if field_info["type"] == "SlugField" and entity.get(field_name) is None: + entity[field_name] = _generate_slug(entity['_object_type'], entity) + +def _generate_slug(object_type, data): + """Generate a slug for a model instance.""" + source_value = get_primary_value(data, object_type) + if source_value is not None: + return slugify(str(source_value)) + return None + +def _fingerprint_dedupe(entities: list[dict]) -> list[dict]: + """ + Deduplicates/merges entities by fingerprint. + + *list must be in topo order by reference already* + """ + by_fp = {} + deduplicated = [] + new_refs = {} # uuid -> uuid + + for entity in entities: + if entity.get('_is_post_create'): + fp = entity['_uuid'] + existing = None + else: + fp = fingerprint(entity, entity['_object_type']) + existing = by_fp.get(fp) + + if existing is None: + logger.debug(" * entity is new.") + new_entity = copy.deepcopy(entity) + _update_unresolved_refs(new_entity, new_refs) + by_fp[fp] = new_entity + deduplicated.append(fp) + else: + logger.debug(" * entity already exists.") + new_refs[entity['_uuid']] = existing['_uuid'] + merged = _merge_nodes(existing, entity) + _update_unresolved_refs(merged, new_refs) + by_fp[fp] = merged + + return [by_fp[fp] for fp in deduplicated] + +def _merge_nodes(a: dict, b: dict) -> dict: + """ + Merges two nodes. + + If there are any conflicts, an error is raised. + Ignores conflicts in fields that start with an underscore, + preferring a's value. + """ + merged = copy.deepcopy(a) + merged['_refs'] = a['_refs'] | b['_refs'] + + for k, v in b.items(): + if k.startswith("_"): + continue + if k in merged and merged[k] != v: + raise ValueError(f"Conflict merging {a} and {b} on {k}: {merged[k]} and {v}") + merged[k] = v + return merged + + +def _update_unresolved_refs(entity, new_refs): + if entity.get('_is_post_create'): + instance_uuid = entity['_instance'] + entity['_instance'] = new_refs.get(instance_uuid, instance_uuid) + + entity['_refs'] = {new_refs.get(r,r) for r in entity['_refs']} + + for k, v in entity.items(): + if isinstance(v, UnresolvedReference) and v.uuid in new_refs: + v.uuid = new_refs[v.uuid] + elif isinstance(v, (list, tuple)): + for item in v: + if isinstance(item, UnresolvedReference) and item.uuid in new_refs: + item.uuid = new_refs[item.uuid] + # TODO maps ... + +def _resolve_existing_references(entities: list[dict]) -> list[dict]: + seen = {} + new_refs = {} + resolved = [] + + for data in entities: + object_type = data['_object_type'] + data = copy.deepcopy(data) + _update_resolved_refs(data, new_refs) + + existing = find_existing_object(data, object_type) + if existing is not None: + logger.error(f"existing {data} -> {existing}") + fp = (object_type, existing.id) + if fp in seen: + logger.warning(f"objects resolved to the same existing id after deduplication: {seen[fp]} and {data}") + else: + seen[fp] = data + data['id'] = existing.id + data['_instance'] = existing + new_refs[data['_uuid']] = existing.id + resolved.append(data) + else: + data['id'] = UnresolvedReference(object_type, data['_uuid']) + _update_resolved_refs(data, new_refs) + resolved.append(data) + return resolved + +def _update_resolved_refs(data, new_refs): + for k, v in data.items(): + if isinstance(v, UnresolvedReference) and v.uuid in new_refs: + data[k] = new_refs[v.uuid] + elif isinstance(v, (list, tuple)): + new_items = [] + for item in v: + if isinstance(item, UnresolvedReference) and item.uuid in new_refs: + new_items.append(new_refs[item.uuid]) + else: + new_items.append(item) + data[k] = new_items + # TODO maps ... + +def cleanup_unresolved_references(data: dict) -> list[str]: + """Find and stringify unresolved references in fields.""" + unresolved = set() + for k, v in data.items(): + if isinstance(v, UnresolvedReference): + if k != 'id': + unresolved.add(k) + data[k] = str(v) + elif isinstance(v, (list, tuple)): + items = [] + for item in v: + if isinstance(item, UnresolvedReference): + unresolved.add(k) + items.append(str(item)) + else: + items.append(item) + data[k] = items + # TODO maps + return sorted(unresolved) + +def _handle_post_creates(entities: list[dict]) -> list[str]: + """Merges any unnecessary post-create steps for existing objects.""" + by_uuid = {e['_uuid']: (i, e) for i, e in enumerate(entities)} + out = [] + for entity in entities: + is_post_create = entity.pop('_is_post_create', False) + if not is_post_create: + out.append(entity) + continue + + instance = entity.get('_instance') + prior_index, prior_entity = by_uuid[instance] + + # a post create can be merged whenever the entities it relies on + # already exist (were resolved) or there are no dependencies between + # the object being updated and the post-create. + can_merge = all( + by_uuid[r][1].get('_instance') is not None + for r in entity['_refs'] + ) or sorted(by_uuid[r][0] for r in entity['_refs'])[-1] == prior_index + + if can_merge: + prior_entity.update([x for x in entity.items() if not x[0].startswith('_')]) + else: + entity['id'] = prior_entity['id'] + out.append(entity) + + return out + +def _check_unresolved_refs(entities: list[dict]) -> list[str]: + seen = set() + for e in entities: + seen.add((e['_object_type'], e['_uuid'])) + for k, v in e.items(): + if isinstance(v, UnresolvedReference): + if (v.object_type, v.uuid) not in seen: + raise ChangeSetException( + f"Unresolved reference {v} in {e} does not refer to a prior created object (circular reference?)", + errors={ + e['_object_type']: { + k: ["unable to resolve reference"], + } + } + ) diff --git a/netbox_diode_plugin/api/urls.py b/netbox_diode_plugin/api/urls.py index 9fff272..cb6b3d4 100644 --- a/netbox_diode_plugin/api/urls.py +++ b/netbox_diode_plugin/api/urls.py @@ -5,12 +5,12 @@ from django.urls import include, path from netbox.api.routers import NetBoxRouter -from .views import ApplyChangeSetView, ObjectStateView +from .views import ApplyChangeSetView, GenerateDiffView router = NetBoxRouter() urlpatterns = [ - path("object-state/", ObjectStateView.as_view()), path("apply-change-set/", ApplyChangeSetView.as_view()), + path("generate-diff/", GenerateDiffView.as_view()), path("", include(router.urls)), ] diff --git a/netbox_diode_plugin/api/views.py b/netbox_diode_plugin/api/views.py index e791ab8..5f6d004 100644 --- a/netbox_diode_plugin/api/views.py +++ b/netbox_diode_plugin/api/views.py @@ -1,236 +1,99 @@ #!/usr/bin/env python # Copyright 2024 NetBox Labs Inc """Diode NetBox Plugin - API Views.""" -from typing import Any, Dict, Optional +import json +import logging +import re from django.apps import apps -from django.conf import settings -from packaging import version - -if version.parse(settings.VERSION).major >= 4: - from core.models import ObjectType as NetBoxType -else: - from django.contrib.contenttypes.models import ContentType as NetBoxType - -from django.core.exceptions import FieldError -from django.core.exceptions import ValidationError as DjangoValidationError -from django.db import models, transaction -from django.db.models import Q -from rest_framework import status, views +from django.db import transaction +from rest_framework import views from rest_framework.exceptions import ValidationError from rest_framework.permissions import IsAuthenticated from rest_framework.response import Response -from utilities.api import get_serializer_for_model - -from netbox_diode_plugin.api.permissions import IsDiodeReader, IsDiodeWriter -from netbox_diode_plugin.api.serializers import ApplyChangeSetRequestSerializer, ObjectStateSerializer - - -def dynamic_import(name): - """Dynamically import a class from an absolute path string.""" - components = name.split(".") - mod = __import__(components[0]) - for comp in components[1:]: - mod = getattr(mod, comp) - return mod - - -def _get_index_class_fields(object_type: str | NetBoxType): - """ - Given an object type name (e.g., 'dcim.site'), dynamically find and return the corresponding Index class fields. - - :param object_type: Object type name in the format 'app_label.model_name' - :return: The corresponding model and its Index class (e.g., SiteIndex) field names or None. - """ - try: - if isinstance(object_type, str): - app_label, model_name = object_type.split('.') - else: - app_label, model_name = object_type.app_label, object_type.model - - model = apps.get_model(app_label, model_name) - - if app_label == "extras" and model_name == "tag": - app_label = "netbox_diode_plugin" - - index_module = dynamic_import(f"{app_label}.search.{model.__name__}Index") - fields = getattr(index_module, "fields", None) - field_names = [field[0] for field in fields] - - return model, field_names - - except (LookupError, ModuleNotFoundError, AttributeError, ValueError): - return None, None - -def _validate_model_instance_fields(instance, fields, value): - """ - Validate the model instance fields against the value. - - :param instance: The model instance. - :param fields: The fields of the model instance. - :param value: The value to validate against the model instance fields. - :return: fields list passed validation - """ - errors = {} - - # Set provided values to the instance fields - for field in fields: - if hasattr(instance, field): - # get the field type - field_cls = instance._meta.get_field(field).__class__ - field_value = _convert_field_value(field_cls, value) - setattr(instance, field, field_value) +from netbox_diode_plugin.api.applier import apply_changeset +from netbox_diode_plugin.api.common import Change, ChangeSet, ChangeSetException, ChangeSetResult +from netbox_diode_plugin.api.differ import generate_changeset +from netbox_diode_plugin.api.permissions import IsDiodeWriter - # Attempt to validate the instance - try: - instance.clean_fields() - except DjangoValidationError as e: - errors = e.message_dict - return errors +logger = logging.getLogger("netbox.diode_data") -def _convert_field_value(field_cls, value): - """Return the converted field value based on the field type.""" - if value is None: - return value +# Try to import Branch model at module level +Branch = None +try: + if apps.is_installed("netbox_branching"): + from netbox_branching.models import Branch +except ImportError: + logger.warning( + "netbox_branching plugin is installed but models could not be imported" + ) - try: - if issubclass(field_cls, (models.FloatField, models.DecimalField)): - return float(value) - if issubclass(field_cls, models.IntegerField): - return int(value) - except (ValueError, TypeError): - pass - return value +def get_entity_key(model_name): + """Get the entity key for a model name.""" + s = re.sub(r'([A-Z0-9]{2,})([A-Z])([a-z])', r'\1_\2\3', model_name) + s = re.sub(r'([a-z])([A-Z])', r'\1_\2', s) + s = re.sub(r'_+', '_', s.lower()) # snake + s = ''.join([word.capitalize() for word in s.split("_")]) # upperCamelCase + return s[0].lower() + s[1:] # lowerCamelCase -class ObjectStateView(views.APIView): - """ObjectState view.""" +class GenerateDiffView(views.APIView): + """GenerateDiff view.""" - permission_classes = [IsAuthenticated, IsDiodeReader] - - def _get_lookups(self, object_type_model: str) -> tuple: - """ - This method returns a tuple of related object lookups based on the provided object type model. - - Args: - ---- - object_type_model (str): The name of the object type model. - - Returns: - ------- - tuple: A tuple of related object lookups. The tuple is empty if the object type model does not match any - of the specified models. - - """ - if "'ipam.models.ip.ipaddress'" in object_type_model: - return ( - "assigned_object", - "assigned_object__device", - "assigned_object__device__site", - ) - if "'dcim.models.device_components.interface'" in object_type_model: - return "device", "device__site" - if "'dcim.models.devices.device'" in object_type_model: - return ("site",) - return () - - def _search_queryset(self, request): - """Search for objects according to object type using search index classes.""" - object_type = request.GET.get("object_type", None) - object_id = request.GET.get("id", None) - query = request.GET.get("q", None) + permission_classes = [IsAuthenticated, IsDiodeWriter] + def post(self, request, *args, **kwargs): + """Generate diff for entity.""" + try: + return self._post(request, *args, **kwargs) + except Exception: + import traceback + traceback.print_exc() + raise + + def _post(self, request, *args, **kwargs): + entity = request.data.get("entity") + object_type = request.data.get("object_type") + + if not entity: + raise ValidationError("Entity is required") if not object_type: - raise ValidationError("object_type parameter is required") - - if not object_id and not query: - raise ValidationError("id or q parameter is required") - - model, fields = _get_index_class_fields(object_type) - - if object_id: - queryset = model.objects.filter(id=object_id) - else: - q = Q() + raise ValidationError("Object type is required") - invalid_fields = _validate_model_instance_fields(model(), fields, query) + app_label, model_name = object_type.split(".") + model_class = apps.get_model(app_label, model_name) - fields = [field for field in fields if field not in invalid_fields] - - for field in fields: - q |= Q(**{f"{field}__exact": query}) # Exact match - - try: - queryset = model.objects.filter(q) - except DjangoValidationError: - queryset = model.objects.none() - pass + # Convert model name to lowerCamelCase for entity lookup + entity_key = get_entity_key(model_class.__name__) + original_entity_data = entity.get(entity_key) - lookups = self._get_lookups(str(model).lower()) - - if lookups: - queryset = queryset.prefetch_related(*lookups) - - additional_attributes_query_filter = ( - self._additional_attributes_query_filter() + if original_entity_data is None: + raise ValidationError( + f"No data found for {entity_key} in entity got: {entity.keys()}" ) - if additional_attributes_query_filter: - queryset = queryset.filter(**additional_attributes_query_filter) - - return queryset - - def get(self, request, *args, **kwargs): - """ - Return a JSON with object_type, object_change_id, and object. - - Search for objects according to object type. - If the obj_type parameter is not in the parameters, raise a ValidationError. - When object ID is provided in the request, search using it in the model specified by object type. - If ID is not provided, use the q parameter for searching. - Lookup is iexact - """ try: - queryset = self._search_queryset(request) - except (FieldError, ValueError): - return Response( - {"errors": ["invalid additional attributes provided"]}, - status=status.HTTP_400_BAD_REQUEST, + result = generate_changeset(original_entity_data, object_type) + except ChangeSetException as e: + logger.error(f"Error generating change set: {e}") + result = ChangeSetResult( + errors=e.errors, ) + return Response(result.to_dict(), status=result.get_status_code()) - self.check_object_permissions(request, queryset) + branch_id = request.headers.get("X-NetBox-Branch") - object_type = request.GET.get("object_type", None) - - serializer = ObjectStateSerializer( - queryset, - many=True, - context={ - "request": request, - "object_type": f"{object_type}", - }, - ) - - try: - if len(serializer.data) > 0: - return Response(serializer.data[0]) - return Response({}) - except AttributeError as e: - return Response( - {"errors": [f"Serializer error: {e.args[0]}"]}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def _additional_attributes_query_filter(self): - """Get the additional attributes query filter.""" - additional_attributes = {} - for attr in self.request.query_params: - if attr not in ["object_type", "id", "q", "_branch"]: - additional_attributes[attr] = self.request.query_params.get(attr) + # If branch ID is provided and branching plugin is installed, get branch name + if branch_id and Branch is not None: + try: + branch = Branch.objects.get(id=branch_id) + result.branch = {"id": branch.id, "name": branch.name} + except Branch.DoesNotExist: + logger.warning(f"Branch with ID {branch_id} does not exist") - return dict(additional_attributes.items()) + return Response(result.to_dict(), status=result.get_status_code()) class ApplyChangeSetView(views.APIView): @@ -238,438 +101,44 @@ class ApplyChangeSetView(views.APIView): permission_classes = [IsAuthenticated, IsDiodeWriter] - @staticmethod - def _get_object_type_model(object_type: str | NetBoxType): - """Get the object type model from object_type.""" - if isinstance(object_type, str): - app_label, model_name = object_type.split(".") - object_content_type = NetBoxType.objects.get_by_natural_key(app_label, model_name) - else: - object_content_type = object_type - return object_content_type, object_content_type.model_class() - - def _get_assigned_object_type(self, model_name: str): - """Get the object type model from applied IPAddress assigned object.""" - assignable_object_types = { - "interface": "dcim.interface", - } - return assignable_object_types.get(model_name.lower(), None) - - def _add_nested_opts(self, fields, key, value): - if isinstance(value, dict): - for nested_key, nested_value in value.items(): - self._add_nested_opts(fields, f"{key}__{nested_key}", nested_value) - elif not isinstance(value, list): - fields[key] = value - - def _get_serializer( - self, - change_type: str, - object_id: int, - object_type: str, - object_data: dict, - ): - """Get the serializer for the object type.""" - _, object_type_model_class = self._get_object_type_model(object_type) - - if change_type == "create": - return self._get_serializer_to_create(object_data, object_type, object_type_model_class) - - if change_type == "update": - return self._get_serializer_to_update(object_data, object_id, object_type, object_type_model_class) - - raise ValidationError("Invalid change_type") - - def _get_serializer_to_create(self, object_data, object_type, object_type_model_class): - # Get object data fields that are not dictionaries or lists - fields = self._get_fields_to_find_existing_objects(object_data, object_type) - # Check if the object already exists - try: - instance = object_type_model_class.objects.get(**fields) - return get_serializer_for_model(object_type_model_class)( - instance, data=object_data, context={"request": self.request, "pk": instance.pk} - ) - except object_type_model_class.DoesNotExist: - pass - serializer = get_serializer_for_model(object_type_model_class)( - data=object_data, context={"request": self.request} - ) - return serializer - - def _get_serializer_to_update(self, object_data, object_id, object_type, object_type_model_class): - lookups = () - fields = {} - primary_ip_to_set: Optional[dict] = None - if object_id: - fields["id"] = object_id - elif object_type == "dcim.device" and any( - object_data.get(attr) for attr in ("primary_ip4", "primary_ip6") - ): - ip_address = self._retrieve_primary_ip_address( - "primary_ip4", object_data - ) - - if ip_address is None: - ip_address = self._retrieve_primary_ip_address( - "primary_ip6", object_data - ) - - if ip_address is None: - raise ValidationError("primary IP not found") - - if ip_address: - primary_ip_to_set = { - "id": ip_address.id, - "family": ip_address.family, - } - - lookups = ("site",) - fields["name"] = object_data.get("name") - fields["site__name"] = object_data.get("site").get("name") - else: - raise ValidationError("object_id parameter is required") + def post(self, request, *args, **kwargs): + """Apply change set for entity.""" try: - instance = object_type_model_class.objects.prefetch_related(*lookups).get(**fields) - if object_type == "dcim.device" and primary_ip_to_set: - object_data = { - "id": instance.id, - "device_type": instance.device_type.id, - "role": instance.role.id, - "site": instance.site.id, - f'primary_ip{primary_ip_to_set.get("family")}': primary_ip_to_set.get( - "id" - ), - } - except object_type_model_class.DoesNotExist: - raise ValidationError(f"object with id {object_id} does not exist") - serializer = get_serializer_for_model(object_type_model_class)( - instance, data=object_data, context={"request": self.request} + return self._post(request, *args, **kwargs) + except Exception: + import traceback + + traceback.print_exc() + raise + + def _post(self, request, *args, **kwargs): + data = request.data.copy() + + changes = [] + if 'changes' in data: + changes = [ + Change( + change_type=change.get('change_type'), + object_type=change.get('object_type'), + object_id=change.get('object_id'), + ref_id=change.get('ref_id'), + data=change.get('data'), + before=change.get('before'), + new_refs=change.get('new_refs', []), + ) for change in data['changes'] + ] + change_set = ChangeSet( + id=data.get('id'), + changes=changes, ) - return serializer - - def _get_fields_to_find_existing_objects(self, object_data, object_type): - fields = {} - for key, value in object_data.items(): - self._add_nested_opts(fields, key, value) - - match object_type: - case "dcim.interface" | "virtualization.vminterface": - mac_address = fields.pop("mac_address", None) - if mac_address is not None: - fields["primary_mac_address__mac_address"] = mac_address - case "ipam.ipaddress": - fields.pop("assigned_object_type") - fields["assigned_object_type_id"] = fields.pop("assigned_object_id") - case "ipam.prefix" | "virtualization.cluster": - if scope_type := object_data.get("scope_type"): - scope_type_model, _ = self._get_object_type_model(scope_type) - fields["scope_type"] = scope_type_model - case "virtualization.virtualmachine": - if cluster_scope_type := fields.get("cluster__scope_type"): - cluster_scope_type_model, _ = self._get_object_type_model(cluster_scope_type) - fields["cluster__scope_type"] = cluster_scope_type_model - case "virtualization.vminterface": - if cluster_scope_type := fields.get("virtual_machine__cluster__scope_type"): - cluster_scope_type_model, _ = self._get_object_type_model(cluster_scope_type) - fields["virtual_machine__cluster__scope_type"] = cluster_scope_type_model - - return fields - - def _retrieve_primary_ip_address(self, primary_ip_attr: str, object_data: dict): - """Retrieve the primary IP address object.""" - ip_address = object_data.get(primary_ip_attr) - if ip_address is None: - return None - - ipaddress_assigned_object = object_data.get(primary_ip_attr, {}).get( - "assigned_object", None - ) - if ipaddress_assigned_object is None: - return None - - interface = ipaddress_assigned_object.get("interface") - if interface is None: - return None - - interface_device = interface.get("device") - if interface_device is None: - return None - object_type_mode, object_type_model_class = self._get_object_type_model("ipam.ipaddress") - ip_address_object = object_type_model_class.objects.get( - address=ip_address.get("address"), - interface__name=interface.get("name"), - interface__device__name=interface_device.get("name"), - interface__device__site__name=interface_device.get("site").get("name"), - ) - return ip_address_object - - @staticmethod - def _get_error_response(change_set_id, error): - """Get the error response.""" - return Response( - { - "change_set_id": change_set_id, - "result": "failed", - "errors": error, - }, - status=status.HTTP_400_BAD_REQUEST, - ) - - def _retrieve_assigned_object_interface_device_lookup_args( - self, device: dict - ) -> dict: - """ - This method retrieves the lookup arguments for the interface device of an assigned object. - - Args: - ---- - device (dict): A dictionary containing the details of the device. It should contain either 'id' or 'name' - of the device and 'site' which is another dictionary containing either 'id' or 'name' of the site. - - Returns: - ------- - dict: A dictionary containing the lookup arguments for the interface device. - - Raises: - ------ - ValidationError: If neither 'id' nor 'name' is provided for the device or the site. - - """ - args = {} - if device.get("id"): - args["device__id"] = device.get("id") - elif device.get("name"): - args["device__name"] = device.get("name") - else: - raise ValidationError( - "Interface device needs to have either id or name provided" - ) - - site = device.get("site", {}) - if site: - if site.get("id"): - args["device__site__id"] = site.get("id") - elif site.get("name"): - args["device__site__name"] = site.get("name") - else: - raise ValidationError( - "Interface device site needs to have either id or name provided" - ) - return args - - def _handle_ipaddress_assigned_object(self, object_data: dict) -> Optional[Dict[str, Any]]: - """Handle IPAM IP address assigned object.""" - ipaddress_assigned_object = object_data.get("assigned_object", None) - - if ipaddress_assigned_object is not None: - assigned_object_keys = list(ipaddress_assigned_object.keys()) - model_name = assigned_object_keys[0] - assigned_object_type = self._get_assigned_object_type(model_name) - assigned_object_model, object_type_model_class = self._get_object_type_model(assigned_object_type) - assigned_object_properties_dict = dict( - ipaddress_assigned_object[model_name].items() - ) - - if len(assigned_object_properties_dict) == 0: - return {"assigned_object": f"properties not provided for {model_name}"} - - try: - lookups = ( - ("device", "device__site") if model_name == "interface" else () - ) - args = {} - - if model_name == "interface": - if assigned_object_properties_dict.get("id"): - args["id"] = assigned_object_properties_dict.get("id") - elif assigned_object_properties_dict.get("name"): - try: - device = assigned_object_properties_dict.get("device", {}) - args = self._retrieve_assigned_object_interface_device_lookup_args( - device - ) - args["name"] = assigned_object_properties_dict.get("name") - except ValidationError as e: - return {"assigned_object": str(e)} - else: - error = f"provided properties '{assigned_object_properties_dict}' not sufficient to retrieve {model_name}" - return {"assigned_object": error} - - assigned_object_instance = ( - object_type_model_class.objects.prefetch_related(*lookups).get(**args) - ) - except object_type_model_class.DoesNotExist: - return { - "assigned_object": f"Assigned object with name {ipaddress_assigned_object[model_name]} does not exist" - } - - object_data.pop("assigned_object") - object_data["assigned_object_type"] = assigned_object_type - object_data["assigned_object_id"] = assigned_object_instance.id - return None - - def _handle_interface_mac_address_compat(self, instance, object_type: str, object_data: dict) -> Optional[Dict[str, Any]]: - """Handle interface mac address backward compatibility.""" - # TODO(ltucker): deprecate. - if object_type != "dcim.interface" and object_type != "virtualization.vminterface": - return None - - if object_data.get("mac_address"): - mac_address_value = object_data.pop("mac_address") - mac_address_instance, _ = instance.mac_addresses.get_or_create( - mac_address=mac_address_value, - ) - instance.primary_mac_address = mac_address_instance - instance.save() - return None - - def _handle_scope(self, object_data: dict, is_nested: bool = False) -> Optional[Dict[str, Any]]: - """Handle scope object.""" - if object_data.get("site"): - site = object_data.pop("site") - scope_type = "dcim.site" - object_type_model, object_type_model_class = self._get_object_type_model(scope_type) - # Scope type of the nested object happens to be resolved differently than in the top-level object - # and is expected to be a content type object instead of "app_label.model_name" string format - if is_nested: - object_data["scope_type"] = object_type_model - else: - object_data["scope_type"] = scope_type - site_id = site.get("id", None) - if site_id is None: - try: - site = object_type_model_class.objects.get( - name=site.get("name") - ) - site_id = site.id - except object_type_model_class.DoesNotExist: - return {"site": f"site with name {site.get('name')} does not exist"} - - object_data["scope_id"] = site_id - - return None - - def _transform_object_data(self, object_type: str, object_data: dict) -> Optional[Dict[str, Any]]: - """Transform object data.""" - errors = None - - match object_type: - case "ipam.ipaddress": - errors = self._handle_ipaddress_assigned_object(object_data) - case "ipam.prefix": - errors = self._handle_scope(object_data, False) - case "virtualization.cluster": - errors = self._handle_scope(object_data, False) - case "virtualization.virtualmachine": - if cluster_object_data := object_data.get("cluster"): - errors = self._handle_scope(cluster_object_data, True) - object_data["cluster"] = cluster_object_data - case "virtualization.vminterface": - cluster_object_data = object_data.get("virtual_machine", {}).get("cluster") - if cluster_object_data is not None: - errors = self._handle_scope(cluster_object_data, True) - object_data["virtual_machine"]["cluster"] = cluster_object_data - case _: - pass - - return errors - - def post(self, request, *args, **kwargs): - """ - Create a new change set and apply it to the current state. - - The request body should contain a list of changes to be applied. - """ - serializer_errors = [] - - request_serializer = ApplyChangeSetRequestSerializer(data=request.data) - - change_set_id = self.request.data.get("change_set_id", None) - - if not request_serializer.is_valid(): - for field_error_name in request_serializer.errors: - self._extract_serializer_errors( - field_error_name, request_serializer, serializer_errors - ) - - return self._get_error_response(change_set_id, serializer_errors) - - change_set = request_serializer.data.get("change_set", None) - try: with transaction.atomic(): - for change in change_set: - change_id = change.get("change_id", None) - change_type = change.get("change_type", None) - object_type = change.get("object_type", None) - object_data = change.get("data", None) - object_id = change.get("object_id", None) - - errors = self._transform_object_data(object_type, object_data) - - if errors is not None: - serializer_errors.append({"change_id": change_id, **errors}) - continue - - serializer = self._get_serializer(change_type, object_id, object_type, object_data) - - # Skip creating an object if it already exists - if change_type == "create" and serializer.context.get("pk"): - continue - - if serializer.is_valid(): - serializer.save() - else: - errors_dict = { - field_name: f"{field_name}: {str(field_errors[0])}" - for field_name, field_errors in serializer.errors.items() - } - - serializer_errors.append( - {"change_id": change_id, **errors_dict} - ) - continue - - errors = self._handle_interface_mac_address_compat(serializer.instance, object_type, object_data) - if errors is not None: - serializer_errors.append({"change_id": change_id, **errors}) - continue - if len(serializer_errors) > 0: - raise ApplyChangeSetException - except ApplyChangeSetException: - return self._get_error_response(change_set_id, serializer_errors) - - data = {"change_set_id": change_set_id, "result": "success"} - return Response(data, status=status.HTTP_200_OK) - - def _extract_serializer_errors( - self, field_error_name, request_serializer, serializer_errors - ): - """Extract serializer errors.""" - if isinstance(request_serializer.errors[field_error_name], dict): - for error_index, error_values in request_serializer.errors[ - field_error_name - ].items(): - errors_dict = { - "change_id": request_serializer.data.get("change_set")[ - error_index - ].get("change_id") - } - - for field_name, field_errors in error_values.items(): - errors_dict[field_name] = f"{str(field_errors[0])}" - - serializer_errors.append(errors_dict) - else: - errors = { - field_error_name: f"{str(field_errors)}" - for field_errors in request_serializer.errors[field_error_name] - } - - serializer_errors.append(errors) - - -class ApplyChangeSetException(Exception): - """ApplyChangeSetException used to cause atomic transaction rollback.""" + result = apply_changeset(change_set) + except ChangeSetException as e: + logger.error(f"Error applying change set: {e}") + result = ChangeSetResult( + id=change_set.id, + errors=e.errors, + ) - pass + return Response(result.to_dict(), status=result.get_status_code()) diff --git a/netbox_diode_plugin/tests/test_api_apply_change_set.py b/netbox_diode_plugin/tests/test_api_apply_change_set.py index 62950d4..b2d27c0 100644 --- a/netbox_diode_plugin/tests/test_api_apply_change_set.py +++ b/netbox_diode_plugin/tests/test_api_apply_change_set.py @@ -29,6 +29,8 @@ User = get_user_model() +def _get_error(response, object_name, field): + return response.json().get("errors", {}).get(object_name, {}).get(field, []) class BaseApplyChangeSet(APITestCase): """Base ApplyChangeSet test case.""" @@ -178,19 +180,20 @@ class ApplyChangeSetTestCase(BaseApplyChangeSet): @staticmethod def get_change_id(payload, index): """Get change_id from payload.""" - return payload.get("change_set")[index].get("change_id") + return payload.get("changes")[index].get("change_id") def test_change_type_create_return_200(self): """Test create change_type with successful.""" payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ { "change_id": str(uuid.uuid4()), "change_type": "create", "object_version": None, "object_type": "dcim.site", "object_id": None, + "ref_id": "1", "data": { "name": "Site A", "slug": "site-a", @@ -208,6 +211,7 @@ def test_change_type_create_return_200(self): "object_version": None, "object_type": "dcim.interface", "object_id": None, + "ref_id": "2", "data": { "name": "Interface 1", "device": self.devices[1].pk, @@ -220,25 +224,23 @@ def test_change_type_create_return_200(self): "object_version": None, "object_type": "ipam.ipaddress", "object_id": None, + "ref_id": "3", "data": { "address": "192.163.2.1/24", - "assigned_object": { - "interface": {"id": self.interfaces[2].pk}, - }, + "assigned_object_type": "dcim.interface", + "assigned_object_id": self.interfaces[2].pk }, }, ], } - response = self.send_request(payload) - - self.assertEqual(response.json().get("result"), "success") + _ = self.send_request(payload) def test_change_type_update_return_200(self): """Test update change_type with successful.""" payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ { "change_id": str(uuid.uuid4()), "change_type": "update", @@ -259,26 +261,26 @@ def test_change_type_update_return_200(self): ], } - response = self.client.post( + _ = self.client.post( self.url, payload, format="json", **self.user_header ) site_updated = Site.objects.get(id=20) - self.assertEqual(response.json().get("result"), "success") self.assertEqual(site_updated.name, "Site A") def test_change_type_create_with_error_return_400(self): """Test create change_type with wrong payload.""" payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ { "change_id": str(uuid.uuid4()), "change_type": "create", "object_version": None, "object_type": "dcim.site", "object_id": None, + "ref_id": "1", "data": { "name": "Site A", "slug": "site-a", @@ -294,25 +296,19 @@ def test_change_type_create_with_error_return_400(self): } response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) - site_created = Site.objects.filter(name="Site A") - self.assertEqual(response.json().get("result"), "failed") - self.assertEqual( - response.json().get("errors")[0].get("change_id"), - self.get_change_id(payload, 0), - ) self.assertIn( 'Expected a list of items but got type "int".', - response.json().get("errors")[0].get("asns"), + _get_error(response, "changes[0]", "asns"), ) self.assertFalse(site_created.exists()) def test_change_type_update_with_error_return_400(self): """Test update change_type with wrong payload.""" payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ { "change_id": str(uuid.uuid4()), "change_type": "update", @@ -336,29 +332,24 @@ def test_change_type_update_with_error_return_400(self): response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) site_updated = Site.objects.get(id=20) - - self.assertEqual(response.json().get("result"), "failed") - self.assertEqual( - response.json().get("errors")[0].get("change_id"), - self.get_change_id(payload, 0), - ) self.assertIn( 'Expected a list of items but got type "int".', - response.json().get("errors")[0].get("asns"), + _get_error(response, "changes[0]", "asns") ) self.assertEqual(site_updated.name, "Site 2") def test_change_type_create_with_multiples_objects_return_200(self): """Test create change type with two objects.""" payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ { "change_id": str(uuid.uuid4()), "change_type": "create", "object_version": None, "object_type": "dcim.site", "object_id": None, + "ref_id": "1", "data": { "name": "Site Z", "slug": "site-z", @@ -376,6 +367,7 @@ def test_change_type_create_with_multiples_objects_return_200(self): "object_version": None, "object_type": "dcim.device", "object_id": None, + "ref_id": "2", "data": { "device_type": self.device_types[1].pk, "role": self.roles[1].pk, @@ -388,15 +380,13 @@ def test_change_type_create_with_multiples_objects_return_200(self): ], } - response = self.send_request(payload) - - self.assertEqual(response.json().get("result"), "success") + _ = self.send_request(payload) def test_change_type_update_with_multiples_objects_return_200(self): """Test update change type with two objects.""" payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ { "change_id": str(uuid.uuid4()), "change_type": "update", @@ -432,26 +422,26 @@ def test_change_type_update_with_multiples_objects_return_200(self): ], } - response = self.send_request(payload) + _ = self.send_request(payload) site_updated = Site.objects.get(id=20) device_updated = Device.objects.get(id=10) - self.assertEqual(response.json().get("result"), "success") self.assertEqual(site_updated.name, "Site A") self.assertEqual(device_updated.name, "Test Device 3") def test_change_type_create_and_update_with_error_in_one_object_return_400(self): """Test create and update change type with one object with error.""" payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ { "change_id": str(uuid.uuid4()), "change_type": "create", "object_version": None, "object_type": "dcim.site", "object_id": None, + "ref_id": "1", "data": { "name": "Site Z", "slug": "site-z", @@ -486,14 +476,9 @@ def test_change_type_create_and_update_with_error_in_one_object_return_400(self) site_created = Site.objects.filter(name="Site Z") device_created = Device.objects.filter(name="Test Device 4") - self.assertEqual(response.json().get("result"), "failed") - self.assertEqual( - response.json().get("errors")[0].get("change_id"), - self.get_change_id(payload, 1), - ) self.assertIn( - "Related object not found using the provided numeric ID", - response.json().get("errors")[0].get("device_type"), + "Related object not found using the provided numeric ID: 3", + _get_error(response, "changes[1]", "device_type"), ) self.assertFalse(site_created.exists()) self.assertFalse(device_created.exists()) @@ -501,14 +486,15 @@ def test_change_type_create_and_update_with_error_in_one_object_return_400(self) def test_multiples_create_type_error_in_two_objects_return_400(self): """Test create with error in two objects.""" payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ { "change_id": str(uuid.uuid4()), "change_type": "create", "object_version": None, "object_type": "dcim.site", "object_id": None, + "ref_id": "1", "data": { "name": "Site Z", "slug": "site-z", @@ -526,6 +512,7 @@ def test_multiples_create_type_error_in_two_objects_return_400(self): "object_version": None, "object_type": "dcim.device", "object_id": None, + "ref_id": "2", "data": { "device_type": 3, "role": self.roles[1].pk, @@ -541,6 +528,7 @@ def test_multiples_create_type_error_in_two_objects_return_400(self): "object_version": None, "object_type": "dcim.device", "object_id": None, + "ref_id": "3", "data": { "device_type": 100, "role": 10, @@ -558,24 +546,9 @@ def test_multiples_create_type_error_in_two_objects_return_400(self): site_created = Site.objects.filter(name="Site Z") device_created = Device.objects.filter(name="Test Device 4") - self.assertEqual(response.json().get("result"), "failed") - - self.assertEqual( - response.json().get("errors")[0].get("change_id"), - self.get_change_id(payload, 1), - ) self.assertIn( - "Related object not found using the provided numeric ID", - response.json().get("errors")[0].get("device_type"), - ) - - self.assertEqual( - response.json().get("errors")[1].get("change_id"), - self.get_change_id(payload, 2), - ) - self.assertIn( - "Related object not found using the provided numeric ID", - response.json().get("errors")[1].get("device_type"), + "Related object not found using the provided numeric ID: 3", + _get_error(response, "changes[1]", "device_type"), ) self.assertFalse(site_created.exists()) @@ -584,8 +557,8 @@ def test_multiples_create_type_error_in_two_objects_return_400(self): def test_change_type_update_with_object_id_not_exist_return_400(self): """Test update object with nonexistent object_id.""" payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ { "change_id": str(uuid.uuid4()), "change_type": "update", @@ -612,14 +585,17 @@ def test_change_type_update_with_object_id_not_exist_return_400(self): site_updated = Site.objects.get(id=20) - self.assertEqual(response.json()[0], "object with id 30 does not exist") + self.assertIn( + "dcim.site with id 30 does not exist", + _get_error(response, "changes[0]", "object_id"), + ) self.assertEqual(site_updated.name, "Site 2") def test_change_set_id_field_not_provided_return_400(self): """Test update object with change_set_id incorrect.""" payload = { - "change_set_id": None, - "change_set": [ + "id": None, + "changes": [ { "change_id": str(uuid.uuid4()), "change_type": "update", @@ -642,21 +618,21 @@ def test_change_set_id_field_not_provided_return_400(self): response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) - self.assertIsNone(response.json().get("errors")[0].get("change_id")) - self.assertEqual( - response.json().get("errors")[0].get("change_set_id"), - "This field may not be null.", + self.assertIsNone(response.json().get("errors", {}).get("change_id", None)) + self.assertIn( + "Change set ID is required", + _get_error(response, "changeset", "id"), ) - def test_change_set_id_change_id_and_change_type_field_not_provided_return_400( + def test_change_type_field_not_provided_return_400( self, ): - """Test update object with change_set_id, change_id, and change_type incorrect.""" + """Test update object with change_type incorrect.""" payload = { - "change_set_id": "", - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ { - "change_id": "", + "change_id": str(uuid.uuid4()), "change_type": "", "object_version": None, "object_type": "dcim.site", @@ -677,35 +653,23 @@ def test_change_set_id_change_id_and_change_type_field_not_provided_return_400( response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) - self.assertEqual( - response.json().get("errors")[0].get("change_set_id"), - "Must be a valid UUID.", - ) - self.assertEqual( - response.json().get("errors")[1].get("change_id"), - "Must be a valid UUID.", - ) - self.assertEqual( - response.json().get("errors")[1].get("change_type"), - "This field may not be blank.", + self.assertIn( + "Unsupported change type ''", + _get_error(response, "changes[0]", "change_type"), ) def test_change_set_id_field_and_change_set_not_provided_return_400(self): """Test update object with change_set_id and change_set incorrect.""" payload = { - "change_set_id": "", - "change_set": [], + "id": "", + "changes": [], } response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) - self.assertEqual( - response.json().get("errors")[0].get("change_set_id"), - "Must be a valid UUID.", - ) - self.assertEqual( - response.json().get("errors")[1].get("change_set"), - "This list may not be empty.", + self.assertIn( + "Change set ID is required", + _get_error(response, "changeset", "id"), ) def test_change_type_and_object_type_provided_return_400( @@ -713,14 +677,15 @@ def test_change_type_and_object_type_provided_return_400( ): """Test change_type and object_type incorrect.""" payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ { "change_id": str(uuid.uuid4()), "change_type": None, "object_version": None, "object_type": "", "object_id": None, + "ref_id": "1", "data": { "name": "Site A", "slug": "site-a", @@ -737,6 +702,7 @@ def test_change_type_and_object_type_provided_return_400( "object_version": None, "object_type": "dcim.site", "object_id": None, + "ref_id": "2", "data": { "name": "Site Z", "slug": "site-z", @@ -752,472 +718,340 @@ def test_change_type_and_object_type_provided_return_400( response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) - # First item of change_set - self.assertEqual( - response.json().get("errors")[0].get("change_id"), - self.get_change_id(payload, 0), - ) - self.assertEqual( - response.json().get("errors")[0].get("change_type"), - "This field may not be null.", - ) - self.assertEqual( - response.json().get("errors")[0].get("object_type"), - "This field may not be blank.", - ) - - # Second item of change_set - self.assertEqual( - response.json().get("errors")[1].get("change_id"), - self.get_change_id(payload, 1), - ) - self.assertEqual( - response.json().get("errors")[1].get("change_type"), - "This field may not be blank.", + self.assertIn( + "Unsupported change type 'None'", + _get_error(response, "changes[0]", "change_type"), ) + # self.assertEqual( + # response.json().get("errors")[0].get("change_type"), + # "This field may not be null.", + # ) + # self.assertEqual( + # response.json().get("errors")[0].get("object_type"), + # "This field may not be blank.", + # ) + + # # Second item of change_set + # self.assertEqual( + # response.json().get("errors")[1].get("change_id"), + # self.get_change_id(payload, 1), + # ) + # self.assertEqual( + # response.json().get("errors")[1].get("change_type"), + # "This field may not be blank.", + # ) def test_create_ip_address_return_200(self): """Test create ip_address with successful.""" payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ { "change_id": str(uuid.uuid4()), "change_type": "create", "object_version": None, "object_type": "ipam.ipaddress", "object_id": None, + "ref_id": "1", "data": { "address": "192.161.3.1/24", - "assigned_object": { - "interface": { - "name": self.interfaces[3].name, - "device": { - "name": self.devices[0].name, - "site": {"name": self.sites[0].name}, - }, - }, - }, + "assigned_object_id": self.interfaces[3].pk, + "assigned_object_type": "dcim.interface", }, }, ], } - response = self.send_request(payload) - - self.assertEqual(response.json().get("result"), "success") - - def test_create_ip_address_return_400(self): - """Test create ip_address with missing interface name.""" - payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ - { - "change_id": str(uuid.uuid4()), - "change_type": "create", - "object_version": None, - "object_type": "ipam.ipaddress", - "object_id": None, - "data": { - "address": "192.161.3.1/24", - "assigned_object": { - "interface": { - # Forcing to miss the name of the interface - "device": { - "name": self.devices[0].name, - "site": {"name": self.sites[0].name}, - }, - }, - }, - }, - }, - ], - } - response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) - - self.assertIn( - "not sufficient to retrieve interface", - response.json().get("errors")[0].get("assigned_object"), - ) - - def test_create_ip_address_not_exist_interface_return_400(self): - """Test create ip_address with not valid interface.""" - payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ - { - "change_id": str(uuid.uuid4()), - "change_type": "create", - "object_version": None, - "object_type": "ipam.ipaddress", - "object_id": None, - "data": { - "address": "192.161.3.1/24", - "assigned_object": { - "interface": { - "name": "not_exist", - "device": { - "name": self.devices[0].name, - "site": {"name": self.sites[0].name}, - }, - }, - }, - }, - }, - ], - } - response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) - - self.assertIn( - "does not exist", - response.json().get("errors")[0].get("assigned_object"), - ) - - def test_create_ip_address_missing_device_interface_return_400(self): - """Test create ip_address with missing device interface name.""" - payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ - { - "change_id": str(uuid.uuid4()), - "change_type": "create", - "object_version": None, - "object_type": "ipam.ipaddress", - "object_id": None, - "data": { - "address": "192.161.3.1/24", - "assigned_object": { - "interface": { - "name": "not_exist", - "device": { - "site": {"name": self.sites[0].name}, - }, - }, - }, - }, - }, - ], - } - response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) - - self.assertIn( - "Interface device needs to have either id or name provided", - response.json().get("errors")[0].get("assigned_object"), - ) - - def test_create_ip_address_missing_interface_device_site_return_400(self): - """Test create ip_address with missing interface device site name.""" - payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ - { - "change_id": str(uuid.uuid4()), - "change_type": "create", - "object_version": None, - "object_type": "ipam.ipaddress", - "object_id": None, - "data": { - "address": "192.161.3.1/24", - "assigned_object": { - "interface": { - "name": "not_exist", - "device": { - "name": self.devices[0].name, - "site": {"facility": "Betha"}, - }, - }, - }, - }, - }, - ], - } - response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) - - self.assertIn( - "Interface device site needs to have either id or name provided", - response.json().get("errors")[0].get("assigned_object"), - ) - - def test_primary_ip_address_not_found_return_400(self): - """Test update primary ip address with site name.""" - payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ - { - "change_id": str(uuid.uuid4()), - "change_type": "update", - "object_version": None, - "object_type": "dcim.device", - "data": { - "name": self.devices[0].name, - "site": {"name": self.sites[0].name}, - "primary_ip6": { - "address": "2001:DB8:0000:0000:244:17FF:FEB6:D37D/64", - }, - }, - }, - ], - } - response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) - - self.assertEqual(response.json()[0], "primary IP not found") + _ = self.send_request(payload) + + # def test_create_ip_address_return_400(self): + # """Test create ip_address with missing interface name.""" + # payload = { + # "id": str(uuid.uuid4()), + # "change_set": [ + # { + # "change_id": str(uuid.uuid4()), + # "change_type": "create", + # "object_version": None, + # "object_type": "ipam.ipaddress", + # "object_id": None, + # "data": { + # "address": "192.161.3.1/24", + # "assigned_object": { + # "interface": { + # # Forcing to miss the name of the interface + # "device": { + # "name": self.devices[0].name, + # "site": {"name": self.sites[0].name}, + # }, + # }, + # }, + # }, + # }, + # ], + # } + # response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) + + # self.assertIn( + # "not sufficient to retrieve interface", + # response.json().get("errors")[0].get("assigned_object"), + # ) + + # def test_create_ip_address_not_exist_interface_return_400(self): + # """Test create ip_address with not valid interface.""" + # payload = { + # "id": str(uuid.uuid4()), + # "changes": [ + # { + # "change_id": str(uuid.uuid4()), + # "change_type": "create", + # "object_version": None, + # "object_type": "ipam.ipaddress", + # "object_id": None, + # "data": { + # "address": "192.161.3.1/24", + # "assigned_object": { + # "interface": { + # "name": "not_exist", + # "device": { + # "name": self.devices[0].name, + # "site": {"name": self.sites[0].name}, + # }, + # }, + # }, + # }, + # }, + # ], + # } + # response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) + + # self.assertIn( + # "does not exist", + # response.json().get("errors")[0].get("assigned_object"), + # ) + + # def test_create_ip_address_missing_device_interface_return_400(self): + # """Test create ip_address with missing device interface name.""" + # payload = { + # "id": str(uuid.uuid4()), + # "changes": [ + # { + # "change_id": str(uuid.uuid4()), + # "change_type": "create", + # "object_version": None, + # "object_type": "ipam.ipaddress", + # "object_id": None, + # "ref_id": "1", + # "data": { + # "address": "192.161.3.1/24", + # "assigned_object": { + # "interface": { + # "name": "not_exist", + # "device": { + # "site": {"name": self.sites[0].name}, + # }, + # }, + # }, + # }, + # }, + # ], + # } + # response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) + + # self.assertIn( + # "Interface device needs to have either id or name provided", + # response.json().get("errors", {}) # .get("assigned_object"), + # ) + + # def test_create_ip_address_missing_interface_device_site_return_400(self): + # """Test create ip_address with missing interface device site name.""" + # payload = { + # "id": str(uuid.uuid4()), + # "changes": [ + # { + # "change_id": str(uuid.uuid4()), + # "change_type": "create", + # "object_version": None, + # "object_type": "ipam.ipaddress", + # "object_id": None, + # "ref_id": "1", + # "data": { + # "address": "192.161.3.1/24", + # "assigned_object": { + # "interface": { + # "name": "not_exist", + # "device": { + # "name": self.devices[0].name, + # "site": {"facility": "Betha"}, + # }, + # }, + # }, + # }, + # }, + # ], + # } + # response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) + + # self.assertIn( + # "Interface device site needs to have either id or name provided", + # response.json().get("errors")[0].get("assigned_object"), + # ) + + # def test_primary_ip_address_not_found_return_400(self): + # """Test update primary ip address with site name.""" + # payload = { + # "id": str(uuid.uuid4()), + # "changes": [ + # { + # "change_id": str(uuid.uuid4()), + # "change_type": "update", + # "object_version": None, + # "object_type": "dcim.device", + # "data": { + # "name": self.devices[0].name, + # "site": {"name": self.sites[0].name}, + # "primary_ip6": { + # "address": "2001:DB8:0000:0000:244:17FF:FEB6:D37D/64", + # }, + # }, + # }, + # ], + # } + # response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) + + # self.assertEqual(response.json()[0], "primary IP not found") def test_add_primary_ip_address_to_device(self): """Add primary ip address to device.""" payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ { "change_id": str(uuid.uuid4()), "change_type": "update", "object_version": None, "object_type": "dcim.device", + "object_id": self.devices[0].pk, "data": { "name": self.devices[0].name, "site": {"name": self.sites[0].name}, - "primary_ip4": { - "address": str(self.ip_addresses[0].address), - "assigned_object": { - "interface": { - "name": self.interfaces[0].name, - "device": { - "name": self.devices[0].name, - "site": {"name": self.sites[0].name}, - }, - }, - }, - }, + "primary_ip4": self.ip_addresses[0].pk }, }, ], } - response = self.send_request(payload) - + _ = self.send_request(payload) device_updated = Device.objects.get(id=10) - self.assertEqual(response.json().get("result"), "success") self.assertEqual(device_updated.name, self.devices[0].name) self.assertEqual(device_updated.primary_ip4, self.ip_addresses[0]) - def test_create_and_update_interface_with_compat_mac_address_field(self): - """Test create interface using backward compatible mac_address field.""" - payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ - { - "change_id": str(uuid.uuid4()), - "change_type": "create", - "object_version": None, - "object_type": "dcim.interface", - "object_id": None, - "data": { - "name": "Interface 6", - "type": "virtual", - "mac_address": "00:00:00:00:00:01", - "device": { - "id": self.devices[1].pk, - }, - }, - }, - ], - } - - response = self.send_request(payload) - self.assertEqual(response.json().get("result"), "success") - self.assertEqual(Interface.objects.count(), 6) - interface_id = Interface.objects.order_by('-id').first().id - self.assertEqual(Interface.objects.get(id=interface_id).mac_address, "00:00:00:00:00:01") - - payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ - { - "change_id": str(uuid.uuid4()), - "change_type": "update", - "object_version": None, - "object_type": "dcim.interface", - "object_id": interface_id, - "data": { - "name": "Interface 6", - "mac_address": "00:00:00:00:00:02", - "type": "virtual", - "device": { - "id": self.devices[1].pk, - }, - }, - }, - ], - } - response = self.send_request(payload) - self.assertEqual(response.json().get("result"), "success") - self.assertEqual(response.json().get("result"), "success") - self.assertEqual(Interface.objects.count(), 6) - self.assertEqual(Interface.objects.get(id=interface_id).mac_address, "00:00:00:00:00:02") - - def test_create_and_update_vminterface_with_compat_mac_address_field(self): - """Test create vminterface using backward compatible mac_address field.""" - payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ - { - "change_id": str(uuid.uuid4()), - "change_type": "create", - "object_version": None, - "object_type": "virtualization.vminterface", - "object_id": None, - "data": { - "name": "VM Interface 1", - "mac_address": "00:00:00:00:00:01", - "virtual_machine": { - "id": self.virtual_machines[0].pk, - }, - }, - }, - ], - } - - response = self.send_request(payload) - self.assertEqual(response.json().get("result"), "success") - self.assertEqual(VMInterface.objects.count(), 1) - interface_id = VMInterface.objects.order_by('-id').first().id - self.assertEqual(VMInterface.objects.get(id=interface_id).mac_address, "00:00:00:00:00:01") - - payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ - { - "change_id": str(uuid.uuid4()), - "change_type": "update", - "object_version": None, - "object_type": "virtualization.vminterface", - "object_id": interface_id, - "data": { - "name": "VM Interface 1", - "mac_address": "00:00:00:00:00:02", - "virtual_machine": { - "id": self.virtual_machines[0].pk, - }, - }, - }, - ], - } - response = self.send_request(payload) - self.assertEqual(response.json().get("result"), "success") - self.assertEqual(VMInterface.objects.count(), 1) - self.assertEqual(VMInterface.objects.get(id=interface_id).mac_address, "00:00:00:00:00:02") - def test_create_prefix_with_site_stored_as_scope(self): """Test create prefix with site stored as scope.""" payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ { "change_id": str(uuid.uuid4()), "change_type": "create", "object_version": None, "object_type": "ipam.prefix", "object_id": None, + "ref_id": "1", "data": { "prefix": "192.168.0.0/24", - "site": { - "name": self.sites[0].name, - }, + "scope_id": self.sites[0].pk, + "scope_type": "dcim.site", }, }, ], } - response = self.send_request(payload) - - self.assertEqual(response.json().get("result"), "success") + _ = self.send_request(payload) self.assertEqual(Prefix.objects.get(prefix="192.168.0.0/24").scope, self.sites[0]) def test_create_prefix_with_unknown_site_fails(self): """Test create prefix with unknown site fails.""" payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ { "change_id": str(uuid.uuid4()), "change_type": "create", "object_version": None, "object_type": "ipam.prefix", "object_id": None, + "ref_id": "1", "data": { "prefix": "192.168.0.0/24", - "site": { - "name": "unknown site" - }, + "scope_id": 99, + "scope_type": "dcim.site", }, }, ], } response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) - - self.assertEqual(response.json().get("result"), "failed") self.assertIn( - 'site with name unknown site does not exist', - response.json().get("errors")[0].get("site"), + 'Please select a site.', + _get_error(response, "changes[0]", "scope"), ) self.assertFalse(Prefix.objects.filter(prefix="192.168.0.0/24").exists()) def test_create_virtualization_cluster_with_site_stored_as_scope(self): """Test create cluster with site stored as scope.""" payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ { "change_id": str(uuid.uuid4()), "change_type": "create", "object_version": None, "object_type": "virtualization.cluster", "object_id": None, + "ref_id": "1", "data": { "name": "Cluster 3", "type": { "name": self.cluster_types[0].name, }, - "site": { - "name": self.sites[0].name, - }, + "scope_id": self.sites[0].pk, + "scope_type": "dcim.site", }, }, ], } - response = self.send_request(payload) - - self.assertEqual(response.json().get("result"), "success") + _ = self.send_request(payload) self.assertEqual(Cluster.objects.get(name="Cluster 3").scope, self.sites[0]) def test_create_virtualmachine_with_cluster_site_stored_as_scope(self): """Test create virtualmachine with cluster site stored as scope.""" payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ + { + "change_id": str(uuid.uuid4()), + "change_type": "update", + "object_version": None, + "object_type": "virtualization.cluster", + "object_id": self.clusters[0].pk, + "data": { + "scope_id": self.sites[0].pk, + "scope_type": "dcim.site", + }, + }, { "change_id": str(uuid.uuid4()), "change_type": "create", "object_version": None, "object_type": "virtualization.virtualmachine", "object_id": None, + "ref_id": "1", "data": { "name": "VM foobar", - "site": { - "name": self.sites[0].name, - }, - "cluster": { - "name": self.clusters[0].name, - "type": { - "name": self.cluster_types[0].name, - }, - "site": { - "name": self.sites[0].name, - }, - }, + "site": self.sites[0].pk, + "cluster": self.clusters[0].pk }, }, ], } - response = self.send_request(payload) - - self.assertEqual(response.json().get("result"), "success") + _ = self.send_request(payload) self.assertEqual(VirtualMachine.objects.get(name="VM foobar", site_id=self.sites[0].id).cluster.scope, self.sites[0]) diff --git a/netbox_diode_plugin/tests/test_api_diff_and_apply.py b/netbox_diode_plugin/tests/test_api_diff_and_apply.py new file mode 100644 index 0000000..c4ca36e --- /dev/null +++ b/netbox_diode_plugin/tests/test_api_diff_and_apply.py @@ -0,0 +1,184 @@ +#!/usr/bin/env python +# Copyright 2024 NetBox Labs Inc +"""Diode NetBox Plugin - Tests.""" + +import logging +from uuid import uuid4 + +from dcim.models import Device, Interface, Site +from django.contrib.auth import get_user_model +from ipam.models import IPAddress +from rest_framework import status +from users.models import Token +from utilities.testing import APITestCase + +logger = logging.getLogger(__name__) + +User = get_user_model() + + +class GenerateDiffAndApplyTestCase(APITestCase): + """GenerateDiff -> ApplyChangeSet test cases.""" + + def setUp(self): + """Set up the test case.""" + self.diff_url = "/netbox/api/plugins/diode/generate-diff/" + self.apply_url = "/netbox/api/plugins/diode/apply-change-set/" + self.user = User.objects.create_user(username="testcommonuser") + self.user_token = Token.objects.create(user=self.user) + self.user_header = {"HTTP_AUTHORIZATION": f"Token {self.user_token.key}"} + + self.add_permissions("netbox_diode_plugin.add_diode") + + def test_generate_diff_and_apply_create_interface_with_tags(self): + """Test generate diff and apply create interface with tags.""" + interface_uuid = str(uuid4()) + payload = { + "timestamp": 1, + "object_type": "dcim.interface", + "entity": { + "interface": { + "name": f"Interface {interface_uuid}", + "mtu": "1500", + "mode": "access", + "tags": [ + {"name": "tag 1"} + ], + "type": "1000base-t", + "device": { + "name": f"Device {uuid4()}", + "deviceType": { + "model": f"Device Type {uuid4()}", + "manufacturer": { + "name": f"Manufacturer {uuid4()}" + } + }, + "role": { + "name": f"Role {uuid4()}" + }, + "site": { + "name": f"Site {uuid4()}" + } + }, + "enabled": True, + "description": "Physical interface" + } + } + } + _, response = self.diff_and_apply(payload) + new_interface = Interface.objects.get(name=f"Interface {interface_uuid}") + self.assertEqual(new_interface.tags.count(), 1) + self.assertEqual(new_interface.tags.first().name, "tag 1") + + + def test_generate_diff_and_apply_create_site(self): + """Test generate diff and apply create site.""" + """Test generate diff create site.""" + site_uuid = str(uuid4()) + payload = { + "timestamp": 1, + "object_type": "dcim.site", + "entity": { + "site": { + "name": f"Site {site_uuid}", + "slug": f"site-{site_uuid}", + }, + } + } + + _, response = self.diff_and_apply(payload) + new_site = Site.objects.get(name=f"Site {site_uuid}") + self.assertEqual(new_site.slug, f"site-{site_uuid}") + + def test_generate_diff_and_apply_create_interface_with_primay_mac_address(self): + """Test generate diff and apply create interface with primary mac address.""" + interface_uuid = str(uuid4()) + payload = { + "timestamp": 1, + "object_type": "dcim.interface", + "entity": { + "interface": { + "name": f"Interface {interface_uuid}", + "type": "1000base-t", + "device": { + "name": f"Device {uuid4()}", + "role": { + "Name": f"Role {uuid4()}", + }, + "site": { + "Name": f"Site {uuid4()}", + }, + "deviceType": { + "manufacturer": { + "Name": f"Manufacturer {uuid4()}", + }, + "model": f"Device Type {uuid4()}", + }, + }, + "primaryMacAddress": { + "mac_address": "00:00:00:00:00:01", + }, + }, + } + } + + _, response = self.diff_and_apply(payload) + new_interface = Interface.objects.get(name=f"Interface {interface_uuid}") + self.assertEqual(new_interface.primary_mac_address.mac_address, "00:00:00:00:00:01") + + def test_generate_diff_and_apply_create_device_with_primary_ip4(self): + """Test generate diff and apply create device with primary ip4.""" + device_uuid = str(uuid4()) + interface_uuid = str(uuid4()) + addr = "192.168.1.1" + payload = { + "timestamp": 1, + "object_type": "ipam.ipaddress", + "entity": { + "ipAddress": { + "address": addr, + "assignedObjectInterface": { + "name": f"Interface {interface_uuid}", + "type": "1000base-t", + "device": { + "name": f"Device {device_uuid}", + "role": { + "name": f"Role {uuid4()}", + }, + "site": { + "name": f"Site {uuid4()}", + }, + "deviceType": { + "manufacturer": { + "name": f"Manufacturer {uuid4()}", + }, + "model": f"Device Type {uuid4()}", + }, + "primaryIp4": { + "address": addr, + }, + }, + }, + }, + }, + } + + _, response = self.diff_and_apply(payload) + new_ipaddress = IPAddress.objects.get(address=addr) + self.assertEqual(new_ipaddress.assigned_object.name, f"Interface {interface_uuid}") + device = Device.objects.get(name=f"Device {device_uuid}") + self.assertEqual(device.primary_ip4.pk, new_ipaddress.pk) + + def diff_and_apply(self, payload): + """Diff and apply the payload.""" + response1 = self.client.post( + self.diff_url, data=payload, format="json", **self.user_header + ) + self.assertEqual(response1.status_code, status.HTTP_200_OK) + diff = response1.json() + + response2 = self.client.post( + self.apply_url, data=diff, format="json", **self.user_header + ) + self.assertEqual(response2.status_code, status.HTTP_200_OK) + return (response1, response2) diff --git a/netbox_diode_plugin/tests/test_api_generate_diff.py b/netbox_diode_plugin/tests/test_api_generate_diff.py new file mode 100644 index 0000000..014a9cf --- /dev/null +++ b/netbox_diode_plugin/tests/test_api_generate_diff.py @@ -0,0 +1,107 @@ +#!/usr/bin/env python +# Copyright 2024 NetBox Labs Inc +"""Diode NetBox Plugin - Tests.""" + +from dcim.models import Site +from django.contrib.auth import get_user_model +from rest_framework import status +from users.models import Token +from utilities.testing import APITestCase + +User = get_user_model() + +class GenerateDiffTestCase(APITestCase): + """GenerateDiff test cases.""" + + def setUp(self): + """Set up the test case.""" + self.url = "/netbox/api/plugins/diode/generate-diff/" + + self.user = User.objects.create_user(username="testcommonuser") + self.add_permissions("netbox_diode_plugin.add_diode") + self.user_token = Token.objects.create(user=self.user) + + self.user_header = {"HTTP_AUTHORIZATION": f"Token {self.user_token.key}"} + + self.site = Site.objects.create( + name="Site Generate Diff 1", + slug="site-generate-diff-1", + facility="Alpha", + description="First test site", + physical_address="123 Fake St Lincoln NE 68588", + shipping_address="123 Fake St Lincoln NE 68588", + comments="Lorem ipsum etcetera", + ) + + + def test_generate_diff_create_site(self): + """Test generate diff create site.""" + payload = { + "timestamp": 1, + "object_type": "dcim.site", + "entity": { + "site": { + "name": "A New Site", + "slug": "a-new-site", + }, + } + } + + response = self.send_request(payload) + self.assertEqual(response.status_code, status.HTTP_200_OK) + cs = response.json() + self.assertIsNotNone(cs.get("id")) + changes = cs.get("changes", []) + self.assertEqual(len(changes), 1) + change = changes[0] + self.assertEqual(change.get("object_type"), "dcim.site") + self.assertEqual(change.get("change_type"), "create") + self.assertEqual(change.get("object_id"), None) + self.assertIsNotNone(change.get("ref_id")) + + data = change.get("data", {}) + self.assertEqual(data.get("name"), "A New Site") + self.assertEqual(data.get("slug"), "a-new-site") + + def test_generate_diff_update_site(self): + """Test generate diff update site.""" + """Test generate diff create site.""" + payload = { + "timestamp": 1, + "object_type": "dcim.site", + "entity": { + "site": { + "name": "Site Generate Diff 1", + "slug": "site-generate-diff-1", + "comments": "An updated comment", + }, + } + } + + response = self.send_request(payload) + self.assertEqual(response.status_code, status.HTTP_200_OK) + cs = response.json() + self.assertIsNotNone(cs.get("id")) + changes = cs.get("changes", []) + self.assertEqual(len(changes), 1) + change = changes[0] + self.assertEqual(change.get("object_type"), "dcim.site") + self.assertEqual(change.get("change_type"), "update") + self.assertEqual(change.get("object_id"), self.site.id) + self.assertEqual(change.get("ref_id"), None) + self.assertEqual(change.get("data").get("name"), "Site Generate Diff 1") + + data = change.get("data", {}) + self.assertEqual(data.get("name"), "Site Generate Diff 1") + self.assertEqual(data.get("slug"), "site-generate-diff-1") + self.assertEqual(data.get("comments"), "An updated comment") + + + + def send_request(self, payload, status_code=status.HTTP_200_OK): + """Post the payload to the url and return the response.""" + response = self.client.post( + self.url, data=payload, format="json", **self.user_header + ) + self.assertEqual(response.status_code, status_code) + return response diff --git a/netbox_diode_plugin/tests/test_api_object_state.py b/netbox_diode_plugin/tests/test_api_object_state.py deleted file mode 100644 index d13ef35..0000000 --- a/netbox_diode_plugin/tests/test_api_object_state.py +++ /dev/null @@ -1,391 +0,0 @@ -#!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc -"""Diode NetBox Plugin - Tests.""" - -from dcim.models import ( - Device, - DeviceRole, - DeviceType, - Interface, - Manufacturer, - Rack, - Site, -) -from django.contrib.auth import get_user_model -from ipam.models import IPAddress -from netaddr import IPNetwork -from rest_framework import status -from users.models import Token -from utilities.testing import APITestCase -from virtualization.models import Cluster, ClusterType - -User = get_user_model() - - -class ObjectStateTestCase(APITestCase): - """ObjectState test cases.""" - - @classmethod - def setUpClass(cls): - """Set up class.""" - super().setUpClass() - - cls.sites = ( - Site( - name="Site 1", - slug="site-1", - facility="Alpha", - description="First test site", - physical_address="123 Fake St Lincoln NE 68588", - shipping_address="123 Fake St Lincoln NE 68588", - comments="Lorem ipsum etcetera", - ), - Site( - name="Site 2", - slug="site-2", - facility="Bravo", - description="Second test site", - physical_address="725 Cyrus Valleys Suite 761 Douglasfort NE 57761", - shipping_address="725 Cyrus Valleys Suite 761 Douglasfort NE 57761", - comments="Lorem ipsum etcetera", - ), - Site( - name="Site 3", - slug="site-3", - facility="Charlie", - description="Third test site", - physical_address="2321 Dovie Dale East Cristobal AK 71959", - shipping_address="2321 Dovie Dale East Cristobal AK 71959", - comments="Lorem ipsum etcetera", - ), - ) - Site.objects.bulk_create(cls.sites) - - cls.manufacturer = ( - Manufacturer(name="Cisco", slug="cisco"), - Manufacturer(name="Manufacturer 2", slug="manufacturer-2"), - ) - - Manufacturer.objects.bulk_create(cls.manufacturer) - - cls.device_types = ( - DeviceType( - manufacturer=cls.manufacturer[0], - model="ISR4321", - slug="isr4321", - ), - DeviceType( - manufacturer=cls.manufacturer[1], - model="ISR4321", - slug="isr4321", - ), - DeviceType( - manufacturer=cls.manufacturer[1], - model="Device Type 2", - slug="device-type-2", - u_height=2, - ), - ) - DeviceType.objects.bulk_create(cls.device_types) - - cls.roles = ( - DeviceRole(name="Device Role 1", slug="device-role-1", color="ff0000"), - DeviceRole(name="Device Role 2", slug="device-role-2", color="00ff00"), - ) - DeviceRole.objects.bulk_create(cls.roles) - - cls.racks = ( - Rack(name="Rack 1", site=cls.sites[0]), - Rack(name="Rack 2", site=cls.sites[1]), - ) - Rack.objects.bulk_create(cls.racks) - - cluster_type = ClusterType.objects.create( - name="Cluster Type 1", slug="cluster-type-1" - ) - - cls.clusters = ( - Cluster(name="Cluster 1", type=cluster_type), - Cluster(name="Cluster 2", type=cluster_type), - ) - Cluster.objects.bulk_create(cls.clusters) - - cls.devices = ( - Device( - id=10, - device_type=cls.device_types[0], - role=cls.roles[0], - name="Device 1", - site=cls.sites[0], - rack=cls.racks[0], - cluster=cls.clusters[0], - local_context_data={"A": 1}, - ), - Device( - id=20, - device_type=cls.device_types[0], - role=cls.roles[0], - name="Device 2", - site=cls.sites[0], - rack=cls.racks[0], - cluster=cls.clusters[0], - local_context_data={"B": 2}, - ), - ) - Device.objects.bulk_create(cls.devices) - - cls.interfaces = ( - Interface(name="Interface 1", device=cls.devices[0], type="1000baset"), - Interface(name="Interface 2", device=cls.devices[0], type="1000baset"), - Interface(name="Interface 3", device=cls.devices[0], type="1000baset"), - Interface(name="Interface 4", device=cls.devices[0], type="1000baset"), - Interface(name="Interface 5", device=cls.devices[0], type="1000baset"), - ) - Interface.objects.bulk_create(cls.interfaces) - - cls.ip_addresses = ( - IPAddress( - address=IPNetwork("10.0.0.1/24"), assigned_object=cls.interfaces[0] - ), - IPAddress( - address=IPNetwork("192.0.2.1/24"), assigned_object=cls.interfaces[1] - ), - ) - IPAddress.objects.bulk_create(cls.ip_addresses) - - def setUp(self): - """Set up test.""" - self.root_user = User.objects.create_user( - username="root_user", is_staff=True, is_superuser=True - ) - self.root_token = Token.objects.create(user=self.root_user) - - self.user = User.objects.create_user(username="testcommonuser") - self.add_permissions("netbox_diode_plugin.view_diode") - self.user_token = Token.objects.create(user=self.user) - - # another_user does not have permission. - self.another_user = User.objects.create_user(username="another_user") - self.another_user_token = Token.objects.create(user=self.another_user) - - self.root_header = {"HTTP_AUTHORIZATION": f"Token {self.root_token.key}"} - self.user_header = {"HTTP_AUTHORIZATION": f"Token {self.user_token.key}"} - self.another_user_header = { - "HTTP_AUTHORIZATION": f"Token {self.another_user_token.key}" - } - - self.url = "/netbox/api/plugins/diode/object-state/" - - def test_return_object_state_using_id(self): - """Test searching using id parameter - Root User.""" - site_id = Site.objects.get(name=self.sites[0].name).id - query_parameters = {"id": site_id, "object_type": "dcim.site"} - - response = self.client.get(self.url, query_parameters, **self.root_header) - - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(response.json().get("object").get("name"), "Site 1") - - def test_return_object_state_using_q(self): - """Test searching using q parameter - Root User.""" - query_parameters = {"q": "Site 2", "object_type": "dcim.site"} - - response = self.client.get(self.url, query_parameters, **self.root_header) - - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(response.json().get("object").get("name"), "Site 2") - - def test_object_not_found_return_empty(self): - """Test empty searching - Root User.""" - query_parameters = {"q": "Site 10", "object_type": "dcim.site"} - - response = self.client.get(self.url, query_parameters, **self.root_header) - - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(response.json(), {}) - - def test_missing_object_type_return_400(self): - """Test API behavior with missing object type - Root User.""" - query_parameters = {"q": "Site 10", "object_type": ""} - - response = self.client.get(self.url, query_parameters, **self.root_header) - - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - - def test_missing_q_and_id_parameters_return_400(self): - """Test API behavior with missing q and ID parameters - Root User.""" - query_parameters = {"object_type": "dcim.site"} - - response = self.client.get(self.url, query_parameters, **self.root_header) - - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - - def test_request_user_not_authenticated_return_403(self): - """Test API behavior with user unauthenticated.""" - query_parameters = {"id": 1, "object_type": "dcim.site"} - - response = self.client.get(self.url, query_parameters) - - self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - - def test_common_user_with_permissions_get_object_state_using_id(self): - """Test searching using id parameter for Common User with permission.""" - site_id = Site.objects.get(name=self.sites[0]).id - query_parameters = {"id": site_id, "object_type": "dcim.site"} - - response = self.client.get(self.url, query_parameters, **self.user_header) - - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(response.json().get("object").get("name"), "Site 1") - - def test_common_user_without_permissions_get_object_state_using_id_return_403(self): - """ - Test searching using id parameter for Common User without permission. - - User has no permissions. - """ - query_parameters = {"id": 1, "object_type": "dcim.device"} - - response = self.client.get( - self.url, query_parameters, **self.another_user_header - ) - - self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - - def test_return_object_state_using_q_objects_with_different_manufacturer_return_cisco_manufacturer( - self, - ): - """Test searching using q parameter - DevicesTypes with different manufacturer.""" - query_parameters = { - "q": "ISR4321", - "object_type": "dcim.devicetype", - "manufacturer__name": "Cisco", - } - - response = self.client.get(self.url, query_parameters, **self.root_header) - - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(response.json().get("object").get("model"), "ISR4321") - self.assertEqual( - response.json().get("object").get("manufacturer").get("name"), "Cisco" - ) - - def test_invalid_object_state_using_q_objects_and_wrong_additional_attributes_return_400( - self, - ): - """Test searching using q parameter - invalid additional attributes.""" - query_parameters = { - "q": "ISR4321", - "object_type": "dcim.devicetype", - "attr_name": "manufacturer.name", - "attr_value": "Cisco", - } - - response = self.client.get(self.url, query_parameters, **self.root_header) - - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - - def test_common_user_with_permissions_get_ip_state_using_id(self): - """Test searching for ip using id.""" - query_parameters = { - "id": self.ip_addresses[0].id, - "object_type": "ipam.ipaddress", - } - - response = self.client.get(self.url, query_parameters, **self.user_header) - self.assertEqual(response.status_code, status.HTTP_200_OK) - - self.assertEqual(response.json().get("object_type"), "ipam.ipaddress") - self.assertEqual( - response.json().get("object").get("address"), - self.ip_addresses[0].address.__str__(), - ) - self.assertEqual( - response.json() - .get("object") - .get("assigned_object") - .get("interface") - .get("name"), - self.interfaces[0].name, - ) - - def test_common_user_with_permissions_get_device_state_using_q_objects(self): - """Test searching for device using q parameter.""" - query_parameters = { - "q": self.devices[0].name, - "object_type": "dcim.device", - "site": self.sites[0].id, - } - - response = self.client.get(self.url, query_parameters, **self.user_header) - self.assertEqual(response.status_code, status.HTTP_200_OK) - - self.assertEqual(response.json().get("object_type"), "dcim.device") - self.assertEqual( - response.json().get("object").get("name"), self.devices[0].name - ) - self.assertEqual( - response.json().get("object").get("site").get("name"), self.sites[0].name - ) - - def test_common_user_with_permissions_get_interface_state_using_q_objects(self): - """Test searching for interface using q parameter.""" - query_parameters = { - "q": self.interfaces[0].name, - "object_type": "dcim.interface", - "device": self.devices[0].id, - "device__site": self.sites[0].id, - } - - response = self.client.get(self.url, query_parameters, **self.user_header) - self.assertEqual(response.status_code, status.HTTP_200_OK) - - self.assertEqual(response.json().get("object_type"), "dcim.interface") - self.assertEqual( - response.json().get("object").get("name"), self.interfaces[0].name - ) - self.assertEqual( - response.json().get("object").get("device").get("name"), - self.devices[0].name, - ) - - def test_common_user_with_permissions_get_ip_state_using_q_objects(self): - """Test searching for ip using q parameter.""" - query_parameters = { - "q": self.ip_addresses[0].address.__str__(), - "object_type": "ipam.ipaddress", - "interface": self.interfaces[0].id, - "interface__device": self.devices[0].id, - "interface__device__site": self.sites[0].id, - } - - response = self.client.get(self.url, query_parameters, **self.user_header) - self.assertEqual(response.status_code, status.HTTP_200_OK) - - self.assertEqual(response.json().get("object_type"), "ipam.ipaddress") - self.assertEqual( - response.json().get("object").get("address"), - self.ip_addresses[0].address.__str__(), - ) - self.assertEqual( - response.json() - .get("object") - .get("assigned_object") - .get("interface") - .get("name"), - self.interfaces[0].name, - ) - - def test_common_user_get_object_state_with_branch_parameter_specified(self): - """Test searching accepts _branch parameter with additional attributes specified.""" - query_parameters = { - "q": self.ip_addresses[0].address.__str__(), - "object_type": "ipam.ipaddress", - "interface": self.interfaces[0].id, - "_branch": "" - } - - response = self.client.get(self.url, query_parameters, **self.user_header) - - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(response.json().get("object_type"), "ipam.ipaddress") diff --git a/netbox_diode_plugin/tests/test_api_serializers.py b/netbox_diode_plugin/tests/test_api_serializers.py deleted file mode 100644 index 00e9547..0000000 --- a/netbox_diode_plugin/tests/test_api_serializers.py +++ /dev/null @@ -1,32 +0,0 @@ -#!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc -"""Diode NetBox Plugin - Tests.""" -from unittest.mock import MagicMock - -from dcim.models import Site -from django.test import TestCase -from extras.api.serializers import TagSerializer -from extras.models import Tag - -from netbox_diode_plugin.api.serializers import DiodeIPAddressSerializer, DiodeSiteSerializer, get_diode_serializer - - -class SerializersTestCase(TestCase): - """Test case for the serializers.""" - - def test_get_diode_serializer(self): - """Check the diode serializer is found.""" - site = Site.objects.create(name="test") - assert get_diode_serializer(site) == DiodeSiteSerializer - - tag = Tag.objects.create(name="test") - assert get_diode_serializer(tag) == TagSerializer - - - def test_get_assigned_object_returns_none_if_no_assigned_object(self): - """Check the assigned object is None if not provided.""" - obj = MagicMock() - obj.assigned_object = None - serializer = DiodeIPAddressSerializer() - result = serializer.get_assigned_object(obj) - self.assertIsNone(result) diff --git a/pyproject.toml b/pyproject.toml index 35f4dea..2a99b4e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -56,6 +56,8 @@ build-backend = "setuptools.build_meta" line-length = 140 exclude = [ "*_pb2*", + "netbox_diode_plugin/api/plugin_utils.py", + "docker/*", ] [tool.ruff.format] From fd79324b2e98bc6ac11a1a4397c4f19513d95907 Mon Sep 17 00:00:00 2001 From: Luke Tucker <64618+ltucker@users.noreply.github.com> Date: Tue, 8 Apr 2025 10:42:16 -0400 Subject: [PATCH 18/52] fix: adjust result structure (#74) --- netbox_diode_plugin/api/common.py | 10 ++++++---- netbox_diode_plugin/tests/test_api_diff_and_apply.py | 2 +- netbox_diode_plugin/tests/test_api_generate_diff.py | 4 ++-- 3 files changed, 9 insertions(+), 7 deletions(-) diff --git a/netbox_diode_plugin/api/common.py b/netbox_diode_plugin/api/common.py index 9bcb6b2..bf33c75 100644 --- a/netbox_diode_plugin/api/common.py +++ b/netbox_diode_plugin/api/common.py @@ -162,14 +162,16 @@ class ChangeSetResult: def to_dict(self) -> dict: """Convert the result to a dictionary.""" - if self.change_set: - return self.change_set.to_dict() - - return { + result = { "id": self.id, "errors": self.errors, } + if self.change_set: + result["change_set"] = self.change_set.to_dict() + + return result + def get_status_code(self) -> int: """Get the status code for the result.""" return status.HTTP_200_OK if not self.errors else status.HTTP_400_BAD_REQUEST diff --git a/netbox_diode_plugin/tests/test_api_diff_and_apply.py b/netbox_diode_plugin/tests/test_api_diff_and_apply.py index c4ca36e..f2d501d 100644 --- a/netbox_diode_plugin/tests/test_api_diff_and_apply.py +++ b/netbox_diode_plugin/tests/test_api_diff_and_apply.py @@ -175,7 +175,7 @@ def diff_and_apply(self, payload): self.diff_url, data=payload, format="json", **self.user_header ) self.assertEqual(response1.status_code, status.HTTP_200_OK) - diff = response1.json() + diff = response1.json().get("change_set", {}) response2 = self.client.post( self.apply_url, data=diff, format="json", **self.user_header diff --git a/netbox_diode_plugin/tests/test_api_generate_diff.py b/netbox_diode_plugin/tests/test_api_generate_diff.py index 014a9cf..4f70149 100644 --- a/netbox_diode_plugin/tests/test_api_generate_diff.py +++ b/netbox_diode_plugin/tests/test_api_generate_diff.py @@ -49,7 +49,7 @@ def test_generate_diff_create_site(self): response = self.send_request(payload) self.assertEqual(response.status_code, status.HTTP_200_OK) - cs = response.json() + cs = response.json().get("change_set", {}) self.assertIsNotNone(cs.get("id")) changes = cs.get("changes", []) self.assertEqual(len(changes), 1) @@ -80,7 +80,7 @@ def test_generate_diff_update_site(self): response = self.send_request(payload) self.assertEqual(response.status_code, status.HTTP_200_OK) - cs = response.json() + cs = response.json().get("change_set", {}) self.assertIsNotNone(cs.get("id")) changes = cs.get("changes", []) self.assertEqual(len(changes), 1) From 905e2c8f00248561e3b7a75c9c86e0ef1e9475c8 Mon Sep 17 00:00:00 2001 From: Luke Tucker <64618+ltucker@users.noreply.github.com> Date: Thu, 10 Apr 2025 11:09:17 -0400 Subject: [PATCH 19/52] feat: basic support for custom fields (#75) basic support for custom fields --- netbox_diode_plugin/api/applier.py | 36 ++-- netbox_diode_plugin/api/common.py | 46 ++++- netbox_diode_plugin/api/differ.py | 73 ++++++-- netbox_diode_plugin/api/matcher.py | 160 +++++++++++++---- netbox_diode_plugin/api/plugin_utils.py | 94 +++++++++- netbox_diode_plugin/api/supported_models.py | 1 + netbox_diode_plugin/api/transformer.py | 166 +++++++++++++++--- netbox_diode_plugin/api/views.py | 2 +- .../tests/test_api_diff_and_apply.py | 157 ++++++++++++++++- .../tests/test_api_generate_diff.py | 151 +++++++++++++++- 10 files changed, 805 insertions(+), 81 deletions(-) diff --git a/netbox_diode_plugin/api/applier.py b/netbox_diode_plugin/api/applier.py index 101f30f..4c974d8 100644 --- a/netbox_diode_plugin/api/applier.py +++ b/netbox_diode_plugin/api/applier.py @@ -18,7 +18,7 @@ logger = logging.getLogger(__name__) -def apply_changeset(change_set: ChangeSet) -> ChangeSetResult: +def apply_changeset(change_set: ChangeSet, request) -> ChangeSetResult: """Apply a change set.""" _validate_change_set(change_set) @@ -33,7 +33,7 @@ def apply_changeset(change_set: ChangeSet) -> ChangeSetResult: try: model_class = get_object_type_model(object_type) data = _pre_apply(model_class, change, created) - _apply_change(data, model_class, change, created) + _apply_change(data, model_class, change, created, request) except ValidationError as e: raise _err_from_validation_error(e, f"changes[{i}]") except ObjectDoesNotExist: @@ -45,11 +45,11 @@ def apply_changeset(change_set: ChangeSet) -> ChangeSetResult: id=change_set.id, ) -def _apply_change(data: dict, model_class: models.Model, change: Change, created: dict): +def _apply_change(data: dict, model_class: models.Model, change: Change, created: dict, request): serializer_class = get_serializer_for_model(model_class) change_type = change.change_type if change_type == ChangeType.CREATE.value: - serializer = serializer_class(data=data) + serializer = serializer_class(data=data, context={"request": request}) serializer.is_valid(raise_exception=True) instance = serializer.save() created[change.ref_id] = instance @@ -57,30 +57,46 @@ def _apply_change(data: dict, model_class: models.Model, change: Change, created elif change_type == ChangeType.UPDATE.value: if object_id := change.object_id: instance = model_class.objects.get(id=object_id) - serializer = serializer_class(instance, data=data, partial=True) + serializer = serializer_class(instance, data=data, partial=True, context={"request": request}) serializer.is_valid(raise_exception=True) serializer.save() # create and update in a same change set elif change.ref_id and (instance := created[change.ref_id]): - serializer = serializer_class(instance, data=data, partial=True) + serializer = serializer_class(instance, data=data, partial=True, context={"request": request}) serializer.is_valid(raise_exception=True) serializer.save() +def _set_path(data, path, value): + path = path.split(".") + key = path.pop(0) + while len(path) > 0: + data = data[key] + key = path.pop(0) + data[key] = value + +def _get_path(data, path): + path = path.split(".") + v = data + for p in path: + v = v[p] + return v + def _pre_apply(model_class: models.Model, change: Change, created: dict): data = change.data.copy() # resolve foreign key references to new objects for ref_field in change.new_refs: - if isinstance(data[ref_field], (list, tuple)): + v = _get_path(data, ref_field) + if isinstance(v, (list, tuple)): ref_list = [] - for ref in data[ref_field]: + for ref in v: if isinstance(ref, str): ref_list.append(created[ref].pk) elif isinstance(ref, int): ref_list.append(ref) - data[ref_field] = ref_list + _set_path(data, ref_field, ref_list) else: - data[ref_field] = created[data[ref_field]].pk + _set_path(data, ref_field, created[v].pk) # ignore? fields that are not in the data model (error?) allowed_fields = legal_fields(model_class) diff --git a/netbox_diode_plugin/api/common.py b/netbox_diode_plugin/api/common.py index bf33c75..65a9a1f 100644 --- a/netbox_diode_plugin/api/common.py +++ b/netbox_diode_plugin/api/common.py @@ -13,6 +13,7 @@ from django.contrib.contenttypes.models import ContentType from django.core.exceptions import ValidationError from django.db import models +from extras.models import CustomField from rest_framework import status logger = logging.getLogger("netbox.diode_data") @@ -114,13 +115,41 @@ def validate(self) -> dict[str, list[str]]: errors[change.object_type] = rel_errors try: + custom_fields = change_data.pop('custom_fields', None) + if custom_fields: + self._validate_custom_fields(custom_fields, model) + instance = model(**change_data) instance.clean_fields(exclude=excluded_relation_fields) except ValidationError as e: - errors[change.object_type].update(e.error_dict) + errors[change.object_type].update(_error_dict(e)) return errors or None + def _validate_custom_fields(self, data: dict, model: models.Model) -> None: + custom_fields = { + cf.name: cf for cf in CustomField.objects.get_for_model(model) + } + + unknown_errors = [] + for field_name, value in data.items(): + if field_name not in custom_fields: + unknown_errors.append(f"Unknown field name '{field_name}' in custom field data.") + continue + if unknown_errors: + raise ValidationError({ + "custom_fields": unknown_errors + }) + + req_errors = [] + for field_name, cf in custom_fields.items(): + if cf.required and field_name not in data: + req_errors.append(f"Custom field '{field_name}' is required.") + if req_errors: + raise ValidationError({ + "custom_fields": req_errors + }) + def _validate_relations(self, change_data: dict, model: models.Model) -> tuple[list[str], dict]: # check that there is some value for every required # reference field, but don't validate the actual reference. @@ -191,3 +220,18 @@ def __str__(self): if self.errors: return f"{self.message}: {self.errors}" return self.message + +def _error_dict(e: ValidationError) -> dict: + """Convert a ValidationError to a dictionary.""" + if hasattr(e, "error_dict"): + return e.error_dict + return { + "__all__": e.error_list + } + +@dataclass +class AutoSlug: + """A class that marks an auto-generated slug.""" + + field_name: str + value: str diff --git a/netbox_diode_plugin/api/differ.py b/netbox_diode_plugin/api/differ.py index a1721a0..55a990f 100644 --- a/netbox_diode_plugin/api/differ.py +++ b/netbox_diode_plugin/api/differ.py @@ -3,6 +3,7 @@ """Diode NetBox Plugin - API - Differ.""" import copy +import datetime import logging from django.contrib.contenttypes.models import ContentType @@ -12,7 +13,7 @@ from .common import Change, ChangeSet, ChangeSetException, ChangeSetResult, ChangeType from .plugin_utils import get_primary_value, legal_fields from .supported_models import extract_supported_models -from .transformer import cleanup_unresolved_references, transform_proto_json +from .transformer import cleanup_unresolved_references, set_custom_field_defaults, transform_proto_json logger = logging.getLogger(__name__) @@ -68,9 +69,31 @@ def prechange_data_from_instance(instance) -> dict: # noqa: C901 else: prechange_data[field_name] = value + if hasattr(instance, "get_custom_fields"): + custom_field_values = instance.get_custom_fields() + cfmap = {} + for cf, value in custom_field_values.items(): + if isinstance(value, (datetime.datetime, datetime.date)): + cfmap[cf.name] = value + else: + cfmap[cf.name] = cf.serialize(value) + prechange_data["custom_fields"] = cfmap + return prechange_data +def _harmonize_formats(prechange_data: dict, postchange_data: dict): + for k, v in prechange_data.items(): + if isinstance(v, datetime.datetime): + prechange_data[k] = v.strftime("%Y-%m-%dT%H:%M:%SZ") + elif isinstance(v, datetime.date): + prechange_data[k] = v.strftime("%Y-%m-%d") + elif isinstance(v, int) and k in postchange_data: + postchange_data[k] = int(postchange_data[k]) + elif isinstance(v, dict): + _harmonize_formats(v, postchange_data.get(k, {})) + + def clean_diff_data(data: dict, exclude_empty_values: bool = True) -> dict: """Clean diff data by removing null values.""" result = {} @@ -80,8 +103,10 @@ def clean_diff_data(data: dict, exclude_empty_values: bool = True) -> dict: continue if isinstance(v, list) and len(v) == 0: continue - if isinstance(v, dict) and len(v) == 0: - continue + if isinstance(v, dict): + if len(v) == 0: + continue + v = clean_diff_data(v, exclude_empty_values) if isinstance(v, str) and v == "": continue result[k] = v @@ -100,7 +125,7 @@ def diff_to_change( if change_type == ChangeType.UPDATE and not len(changed_attrs) > 0: change_type = ChangeType.NOOP - primary_value = get_primary_value(prechange_data | postchange_data, object_type) + primary_value = str(get_primary_value(prechange_data | postchange_data, object_type)) if primary_value is None: primary_value = "(unnamed)" @@ -111,6 +136,8 @@ def diff_to_change( change = Change( change_type=change_type, + before=_tidy(prechange_data), + data={}, object_type=object_type, object_id=prior_id if isinstance(prior_id, int) else None, ref_id=ref_id, @@ -119,17 +146,13 @@ def diff_to_change( ) if change_type != ChangeType.NOOP: - postchange_data_clean = clean_diff_data(postchange_data) - change.data = sort_dict_recursively(postchange_data_clean) - else: - change.data = {} - - if change_type == ChangeType.UPDATE or change_type == ChangeType.NOOP: - prechange_data_clean = clean_diff_data(prechange_data) - change.before = sort_dict_recursively(prechange_data_clean) + change.data = _tidy(postchange_data) return change +def _tidy(data: dict) -> dict: + return sort_dict_recursively(clean_diff_data(data)) + def sort_dict_recursively(d): """Recursively sorts a dictionary by keys.""" if isinstance(d, dict): @@ -161,7 +184,11 @@ def generate_changeset(entity: dict, object_type: str) -> ChangeSetResult: # prior state is a model instance else: prechange_data = prechange_data_from_instance(instance) - + # merge the prior state that we don't want to overwrite with the new state + # this is also important for custom fields because they do not appear to + # respsect paritial update serialization. + entity = _partially_merge(prechange_data, entity, instance) + _harmonize_formats(prechange_data, entity) changed_data = shallow_compare_dict( prechange_data, entity, ) @@ -187,7 +214,25 @@ def generate_changeset(entity: dict, object_type: str) -> ChangeSetResult: if errors := change_set.validate(): raise ChangeSetException("Invalid change set", errors) - return ChangeSetResult( + + cs = ChangeSetResult( id=change_set.id, change_set=change_set, ) + return cs + +def _partially_merge(prechange_data: dict, postchange_data: dict, instance) -> dict: + """Merge lists and custom_fields rather than replacing the full value...""" + result = {} + for key, value in postchange_data.items(): + # TODO: partially merge lists like tags? all lists? + result[key] = value + + # these are fully merged in from the prechange state because + # they don't respect partial update serialization. + if "custom_fields" in postchange_data: + for key, value in prechange_data.get("custom_fields", {}).items(): + if value is not None and key not in postchange_data["custom_fields"]: + result["custom_fields"][key] = value + set_custom_field_defaults(result, instance) + return result diff --git a/netbox_diode_plugin/api/matcher.py b/netbox_diode_plugin/api/matcher.py index 8f11735..8fe0a1b 100644 --- a/netbox_diode_plugin/api/matcher.py +++ b/netbox_diode_plugin/api/matcher.py @@ -14,10 +14,12 @@ from django.core.exceptions import FieldDoesNotExist from django.db import models from django.db.models import F, Value +from django.db.models.fields import SlugField from django.db.models.lookups import Exact from django.db.models.query_utils import Q +from extras.models.customfields import CustomField -from .common import UnresolvedReference +from .common import AutoSlug, UnresolvedReference from .plugin_utils import content_type_id, get_object_type, get_object_type_model logger = logging.getLogger(__name__) @@ -73,7 +75,6 @@ ], } - @dataclass class ObjectMatchCriteria: """ @@ -100,10 +101,10 @@ def __hash__(self): def has_required_fields(self, data) -> bool: """Returns True if the data given contains a value for all fields referenced by the constraint.""" - return all(field in data for field in self.get_refs()) + return all(field in data for field in self._get_refs()) @cache - def get_refs(self) -> set[str]: + def _get_refs(self) -> set[str]: """Returns a set of all field names referenced by the constraint.""" refs = set() if self.fields: @@ -114,7 +115,7 @@ def get_refs(self) -> set[str]: return frozenset(refs) @cache - def get_insensitive_refs(self) -> set[str]: + def _get_insensitive_refs(self) -> set[str]: """ Returns a set of all field names that should be compared in a case insensitive manner. @@ -149,8 +150,8 @@ def fingerprint(self, data: dict) -> str|None: return None # sort the fields by name - sorted_fields = sorted(self.get_refs()) - insensitive = self.get_insensitive_refs() + sorted_fields = sorted(self._get_refs()) + insensitive = self._get_insensitive_refs() values = [] for field in sorted_fields: value = data[field] @@ -160,9 +161,8 @@ def fingerprint(self, data: dict) -> str|None: if field in insensitive: value = value.lower() values.append(value) - # logger.debug(f"fingerprint {self}: {data} -> values: {tuple(values)}") - return hash(tuple(values)) + return hash((self.model_class.__name__, self.name, tuple(values))) def _check_condition(self, data) -> bool: if self.condition is None: @@ -170,15 +170,15 @@ def _check_condition(self, data) -> bool: # TODO: handle evaluating complex conditions, # there are only simple ones currently if self.condition.connector != Q.AND: - logger.error(f"Unhandled condition {self.condition}") + logger.warning(f"Unhandled condition {self.condition}") return False if len(self.condition.children) != 1: - logger.error(f"Unhandled condition {self.condition}") + logger.warning(f"Unhandled condition {self.condition}") return False if len(self.condition.children[0]) != 2: - logger.error(f"Unhandled condition {self.condition}") + logger.warning(f"Unhandled condition {self.condition}") return False k, v = self.condition.children[0] @@ -209,18 +209,17 @@ def _build_fields_queryset(self, data) -> models.QuerySet: for field_name in self.fields: field = self.model_class._meta.get_field(field_name) if field_name not in data: - logger.error(f" * cannot build fields queryset for {self.name} (missing field {field_name})") + logger.debug(f" * cannot build fields queryset for {self.name} (missing field {field_name})") return None # cannot match, missing field data lookup_value = data.get(field_name) if isinstance(lookup_value, UnresolvedReference): - logger.error(f" * cannot build fields queryset for {self.name} ({field_name} is unresolved reference)") + logger.debug(f" * cannot build fields queryset for {self.name} ({field_name} is unresolved reference)") return None # cannot match, missing field data if isinstance(lookup_value, dict): - logger.error(f" * cannot build fields queryset for {self.name} ({field_name} is dict)") + logger.debug(f" * cannot build fields queryset for {self.name} ({field_name} is dict)") return None # cannot match, missing field data lookup_kwargs[field.name] = lookup_value - # logger.error(f" * query kwargs: {lookup_kwargs}") qs = self.model_class.objects.filter(**lookup_kwargs) if self.condition: qs = qs.filter(self.condition) @@ -242,10 +241,10 @@ def _build_expressions_queryset(self, data) -> models.QuerySet: refs = [F(ref) for ref in _get_refs(expr)] for ref in refs: if ref not in replacements: - logger.error(f" * cannot build expr queryset for {self.name} (missing field {ref})") + logger.debug(f" * cannot build expr queryset for {self.name} (missing field {ref})") return None # cannot match, missing field data if isinstance(replacements[ref], UnresolvedReference): - logger.error(f" * cannot build expr queryset for {self.name} ({ref} is unresolved reference)") + logger.debug(f" * cannot build expr queryset for {self.name} ({ref} is unresolved reference)") return None # cannot match, missing field data rhs = expr.replace_expressions(replacements) @@ -270,12 +269,114 @@ def _prepare_data(self, data: dict) -> dict: except FieldDoesNotExist: continue - # logger.error(f"prepared data: {data} -> {prepared}") return prepared -@lru_cache(maxsize=256) -def get_model_matchers(model_class) -> list[ObjectMatchCriteria]: +@dataclass +class CustomFieldMatcher: + """A matcher for a unique custom field.""" + + name: str + custom_field: str + model_class: Type[models.Model] + + def fingerprint(self, data: dict) -> str|None: + """Fingerprint the custom field value.""" + if not self.has_required_fields(data): + return None + + value = data.get("custom_fields", {}).get(self.custom_field) + if value is None: + return None + + return hash((self.model_class.__name__, self.name, value)) + + def build_queryset(self, data: dict) -> models.QuerySet: + """Build a queryset for the custom field.""" + if not self.has_required_fields(data): + return None + + value = data.get("custom_fields", {}).get(self.custom_field) + if value is None: + return None + + return self.model_class.objects.filter(**{f'custom_field_data__{self.custom_field}': value}) + + def has_required_fields(self, data: dict) -> bool: + """Returns True if the data given contains a value for all fields referenced by the constraint.""" + return self.custom_field in data.get("custom_fields", {}) + +@dataclass +class AutoSlugMatcher: + """A special matcher that tries to match on auto generated slugs.""" + + name: str + slug_field: str + model_class: Type[models.Model] + + def fingerprint(self, data: dict) -> str|None: + """Fingerprint the custom field value.""" + if not self.has_required_fields(data): + return None + + slug = data.get('_auto_slug', None) + if slug is None: + return None + + return hash((self.model_class.__name__, self.name, slug.value)) + + def build_queryset(self, data: dict) -> models.QuerySet: + """Build a queryset for the custom field.""" + if not self.has_required_fields(data): + return None + + slug = data.get('_auto_slug', None) + if slug is None: + return None + + return self.model_class.objects.filter(**{f'{self.slug_field}': str(slug.value)}) + + def has_required_fields(self, data: dict) -> bool: + """Returns True if the data given contains a value for all fields referenced by the constraint.""" + return '_auto_slug' in data + + +def get_model_matchers(model_class) -> list: """Extract unique constraints from a Django model.""" + matchers = [] + matchers += _get_model_matchers(model_class) + + # TODO(ltucker): this should also be cacheable, but we need a signal to invalidate + if hasattr(model_class, "get_custom_fields"): + unique_custom_fields = CustomField.objects.get_for_model(model_class).filter(unique=True) + if unique_custom_fields: + for cf in unique_custom_fields: + matchers.append( + CustomFieldMatcher( + model_class=model_class, + custom_field=cf.name, + name=f"unique_custom_field_{cf.name}", + ) + ) + matchers += _get_autoslug_matchers(model_class) + return matchers + +@lru_cache(maxsize=256) +def _get_autoslug_matchers(model_class) -> list: + matchers = [] + for field in model_class._meta.fields: + if isinstance(field, SlugField): + matchers.append( + AutoSlugMatcher( + model_class=model_class, + slug_field=field.name, + name=f"unique_autoslug_{field.name}", + ) + ) + break + return matchers + +@lru_cache(maxsize=256) +def _get_model_matchers(model_class) -> list[ObjectMatchCriteria]: object_type = get_object_type(model_class) matchers = _LOGICAL_MATCHERS.get(object_type, lambda: [])() @@ -317,7 +418,7 @@ def get_model_matchers(model_class) -> list[ObjectMatchCriteria]: ) ) else: - logger.error( + logger.debug( f"Constraint {constraint.name} on {model_class.__name__} had no fields or expressions (skipped)" ) # (this shouldn't happen / enforced by django) @@ -383,7 +484,6 @@ def _fingerprint_all(data: dict) -> str: values.append(_fingerprint_all(v)) else: values.append(v) - # logger.error(f"_fingerprint_all: {data} -> values: {tuple(values)}") return hash(tuple(values)) @@ -420,21 +520,21 @@ def find_existing_object(data: dict, object_type: str): Returns the object if found, otherwise None. """ - logger.error(f"resolving {data}") + logger.debug(f"resolving {data}") model_class = get_object_type_model(object_type) for matcher in get_model_matchers(model_class): if not matcher.has_required_fields(data): - logger.error(f" * skipped matcher {matcher.name} (missing fields)") + logger.debug(f" * skipped matcher {matcher.name} (missing fields)") continue q = matcher.build_queryset(data) if q is None: - logger.error(f" * skipped matcher {matcher.name} (no queryset)") + logger.debug(f" * skipped matcher {matcher.name} (no queryset)") continue - logger.error(f" * trying query {q.query}") + logger.debug(f" * trying query {q.query}") existing = q.order_by('pk').first() if existing is not None: - logger.error(f" -> Found object {existing} via {matcher.name}") + logger.debug(f" -> Found object {existing} via {matcher.name}") return existing - logger.error(f" -> No object found for matcher {matcher.name}") - logger.error(" * No matchers found an existing object") + logger.debug(f" -> No object found for matcher {matcher.name}") + logger.debug(" * No matchers found an existing object") return None diff --git a/netbox_diode_plugin/api/plugin_utils.py b/netbox_diode_plugin/api/plugin_utils.py index b526a5c..9571f47 100644 --- a/netbox_diode_plugin/api/plugin_utils.py +++ b/netbox_diode_plugin/api/plugin_utils.py @@ -1,7 +1,7 @@ """Diode plugin helpers.""" # Generated code. DO NOT EDIT. -# Timestamp: 2025-04-01 21:05:16Z +# Timestamp: 2025-04-10 14:44:19Z from dataclasses import dataclass from functools import lru_cache @@ -39,7 +39,99 @@ class RefInfo: is_generic: bool = False is_many: bool = False +CUSTOM_FIELD_OBJECT_REFERENCE_TYPE = 'diode.custom_field_object_reference' + _JSON_REF_INFO = { + 'diode.custom_field_object_reference': { + 'asn': RefInfo(object_type='ipam.asn', field_name='object', is_generic=True), + 'asnRange': RefInfo(object_type='ipam.asnrange', field_name='object', is_generic=True), + 'aggregate': RefInfo(object_type='ipam.aggregate', field_name='object', is_generic=True), + 'cable': RefInfo(object_type='dcim.cable', field_name='object', is_generic=True), + 'cablePath': RefInfo(object_type='dcim.cablepath', field_name='object', is_generic=True), + 'cableTermination': RefInfo(object_type='dcim.cabletermination', field_name='object', is_generic=True), + 'circuit': RefInfo(object_type='circuits.circuit', field_name='object', is_generic=True), + 'circuitGroup': RefInfo(object_type='circuits.circuitgroup', field_name='object', is_generic=True), + 'circuitGroupAssignment': RefInfo(object_type='circuits.circuitgroupassignment', field_name='object', is_generic=True), + 'circuitTermination': RefInfo(object_type='circuits.circuittermination', field_name='object', is_generic=True), + 'circuitType': RefInfo(object_type='circuits.circuittype', field_name='object', is_generic=True), + 'cluster': RefInfo(object_type='virtualization.cluster', field_name='object', is_generic=True), + 'clusterGroup': RefInfo(object_type='virtualization.clustergroup', field_name='object', is_generic=True), + 'clusterType': RefInfo(object_type='virtualization.clustertype', field_name='object', is_generic=True), + 'consolePort': RefInfo(object_type='dcim.consoleport', field_name='object', is_generic=True), + 'consoleServerPort': RefInfo(object_type='dcim.consoleserverport', field_name='object', is_generic=True), + 'contact': RefInfo(object_type='tenancy.contact', field_name='object', is_generic=True), + 'contactAssignment': RefInfo(object_type='tenancy.contactassignment', field_name='object', is_generic=True), + 'contactGroup': RefInfo(object_type='tenancy.contactgroup', field_name='object', is_generic=True), + 'contactRole': RefInfo(object_type='tenancy.contactrole', field_name='object', is_generic=True), + 'device': RefInfo(object_type='dcim.device', field_name='object', is_generic=True), + 'deviceBay': RefInfo(object_type='dcim.devicebay', field_name='object', is_generic=True), + 'deviceRole': RefInfo(object_type='dcim.devicerole', field_name='object', is_generic=True), + 'deviceType': RefInfo(object_type='dcim.devicetype', field_name='object', is_generic=True), + 'fhrpGroup': RefInfo(object_type='ipam.fhrpgroup', field_name='object', is_generic=True), + 'fhrpGroupAssignment': RefInfo(object_type='ipam.fhrpgroupassignment', field_name='object', is_generic=True), + 'frontPort': RefInfo(object_type='dcim.frontport', field_name='object', is_generic=True), + 'ikePolicy': RefInfo(object_type='vpn.ikepolicy', field_name='object', is_generic=True), + 'ikeProposal': RefInfo(object_type='vpn.ikeproposal', field_name='object', is_generic=True), + 'ipAddress': RefInfo(object_type='ipam.ipaddress', field_name='object', is_generic=True), + 'ipRange': RefInfo(object_type='ipam.iprange', field_name='object', is_generic=True), + 'ipSecPolicy': RefInfo(object_type='vpn.ipsecpolicy', field_name='object', is_generic=True), + 'ipSecProfile': RefInfo(object_type='vpn.ipsecprofile', field_name='object', is_generic=True), + 'ipSecProposal': RefInfo(object_type='vpn.ipsecproposal', field_name='object', is_generic=True), + 'interface': RefInfo(object_type='dcim.interface', field_name='object', is_generic=True), + 'inventoryItem': RefInfo(object_type='dcim.inventoryitem', field_name='object', is_generic=True), + 'inventoryItemRole': RefInfo(object_type='dcim.inventoryitemrole', field_name='object', is_generic=True), + 'l2Vpn': RefInfo(object_type='vpn.l2vpn', field_name='object', is_generic=True), + 'l2VpnTermination': RefInfo(object_type='vpn.l2vpntermination', field_name='object', is_generic=True), + 'location': RefInfo(object_type='dcim.location', field_name='object', is_generic=True), + 'macAddress': RefInfo(object_type='dcim.macaddress', field_name='object', is_generic=True), + 'manufacturer': RefInfo(object_type='dcim.manufacturer', field_name='object', is_generic=True), + 'module': RefInfo(object_type='dcim.module', field_name='object', is_generic=True), + 'moduleBay': RefInfo(object_type='dcim.modulebay', field_name='object', is_generic=True), + 'moduleType': RefInfo(object_type='dcim.moduletype', field_name='object', is_generic=True), + 'platform': RefInfo(object_type='dcim.platform', field_name='object', is_generic=True), + 'powerFeed': RefInfo(object_type='dcim.powerfeed', field_name='object', is_generic=True), + 'powerOutlet': RefInfo(object_type='dcim.poweroutlet', field_name='object', is_generic=True), + 'powerPanel': RefInfo(object_type='dcim.powerpanel', field_name='object', is_generic=True), + 'powerPort': RefInfo(object_type='dcim.powerport', field_name='object', is_generic=True), + 'prefix': RefInfo(object_type='ipam.prefix', field_name='object', is_generic=True), + 'provider': RefInfo(object_type='circuits.provider', field_name='object', is_generic=True), + 'providerAccount': RefInfo(object_type='circuits.provideraccount', field_name='object', is_generic=True), + 'providerNetwork': RefInfo(object_type='circuits.providernetwork', field_name='object', is_generic=True), + 'rir': RefInfo(object_type='ipam.rir', field_name='object', is_generic=True), + 'rack': RefInfo(object_type='dcim.rack', field_name='object', is_generic=True), + 'rackReservation': RefInfo(object_type='dcim.rackreservation', field_name='object', is_generic=True), + 'rackRole': RefInfo(object_type='dcim.rackrole', field_name='object', is_generic=True), + 'rackType': RefInfo(object_type='dcim.racktype', field_name='object', is_generic=True), + 'rearPort': RefInfo(object_type='dcim.rearport', field_name='object', is_generic=True), + 'region': RefInfo(object_type='dcim.region', field_name='object', is_generic=True), + 'role': RefInfo(object_type='ipam.role', field_name='object', is_generic=True), + 'routeTarget': RefInfo(object_type='ipam.routetarget', field_name='object', is_generic=True), + 'service': RefInfo(object_type='ipam.service', field_name='object', is_generic=True), + 'site': RefInfo(object_type='dcim.site', field_name='object', is_generic=True), + 'siteGroup': RefInfo(object_type='dcim.sitegroup', field_name='object', is_generic=True), + 'tag': RefInfo(object_type='extras.tag', field_name='object', is_generic=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='object', is_generic=True), + 'tenantGroup': RefInfo(object_type='tenancy.tenantgroup', field_name='object', is_generic=True), + 'tunnel': RefInfo(object_type='vpn.tunnel', field_name='object', is_generic=True), + 'tunnelGroup': RefInfo(object_type='vpn.tunnelgroup', field_name='object', is_generic=True), + 'tunnelTermination': RefInfo(object_type='vpn.tunneltermination', field_name='object', is_generic=True), + 'vlan': RefInfo(object_type='ipam.vlan', field_name='object', is_generic=True), + 'vlanGroup': RefInfo(object_type='ipam.vlangroup', field_name='object', is_generic=True), + 'vlanTranslationPolicy': RefInfo(object_type='ipam.vlantranslationpolicy', field_name='object', is_generic=True), + 'vlanTranslationRule': RefInfo(object_type='ipam.vlantranslationrule', field_name='object', is_generic=True), + 'vmInterface': RefInfo(object_type='virtualization.vminterface', field_name='object', is_generic=True), + 'vrf': RefInfo(object_type='ipam.vrf', field_name='object', is_generic=True), + 'virtualChassis': RefInfo(object_type='dcim.virtualchassis', field_name='object', is_generic=True), + 'virtualCircuit': RefInfo(object_type='circuits.virtualcircuit', field_name='object', is_generic=True), + 'virtualCircuitTermination': RefInfo(object_type='circuits.virtualcircuittermination', field_name='object', is_generic=True), + 'virtualCircuitType': RefInfo(object_type='circuits.virtualcircuittype', field_name='object', is_generic=True), + 'virtualDeviceContext': RefInfo(object_type='dcim.virtualdevicecontext', field_name='object', is_generic=True), + 'virtualDisk': RefInfo(object_type='virtualization.virtualdisk', field_name='object', is_generic=True), + 'virtualMachine': RefInfo(object_type='virtualization.virtualmachine', field_name='object', is_generic=True), + 'wirelessLan': RefInfo(object_type='wireless.wirelesslan', field_name='object', is_generic=True), + 'wirelessLanGroup': RefInfo(object_type='wireless.wirelesslangroup', field_name='object', is_generic=True), + 'wirelessLink': RefInfo(object_type='wireless.wirelesslink', field_name='object', is_generic=True), + }, 'circuits.circuit': { 'assignments': RefInfo(object_type='circuits.circuitgroupassignment', field_name='assignments', is_many=True), 'provider': RefInfo(object_type='circuits.provider', field_name='provider'), diff --git a/netbox_diode_plugin/api/supported_models.py b/netbox_diode_plugin/api/supported_models.py index 3ec47ce..b2b7a09 100644 --- a/netbox_diode_plugin/api/supported_models.py +++ b/netbox_diode_plugin/api/supported_models.py @@ -61,6 +61,7 @@ def extract_supported_models() -> dict[str, dict]: extracted_models[object_type] = { "fields": fields, "prerequisites": prerequisites, + "model": model, } for related_model in related_models: related_object_type = f"{related_model._meta.app_label}.{related_model._meta.model_name}" diff --git a/netbox_diode_plugin/api/transformer.py b/netbox_diode_plugin/api/transformer.py index 12e3518..cfb246d 100644 --- a/netbox_diode_plugin/api/transformer.py +++ b/netbox_diode_plugin/api/transformer.py @@ -3,6 +3,7 @@ """Diode NetBox Plugin - API - Object resolution for diffing.""" import copy +import datetime import json import logging import re @@ -13,10 +14,11 @@ import graphlib from django.core.exceptions import ValidationError from django.utils.text import slugify +from extras.models.customfields import CustomField -from .common import ChangeSetException, UnresolvedReference +from .common import AutoSlug, ChangeSetException, UnresolvedReference from .matcher import find_existing_object, fingerprint -from .plugin_utils import get_json_ref_info, get_primary_value +from .plugin_utils import CUSTOM_FIELD_OBJECT_REFERENCE_TYPE, get_json_ref_info, get_primary_value logger = logging.getLogger("netbox.diode_data") @@ -70,23 +72,23 @@ def transform_proto_json(proto_json: dict, object_type: str, supported_models: d a certain form of deduplication and resolution of existing objects. """ entities = _transform_proto_json_1(proto_json, object_type) - logger.error(f"_transform_proto_json_1 entities: {json.dumps(entities, default=lambda o: str(o), indent=4)}") + logger.debug(f"_transform_proto_json_1 entities: {json.dumps(entities, default=lambda o: str(o), indent=4)}") entities = _topo_sort(entities) - logger.error(f"_topo_sort: {json.dumps(entities, default=lambda o: str(o), indent=4)}") + logger.debug(f"_topo_sort: {json.dumps(entities, default=lambda o: str(o), indent=4)}") deduplicated = _fingerprint_dedupe(entities) - logger.error(f"_fingerprint_dedupe: {json.dumps(deduplicated, default=lambda o: str(o), indent=4)}") + logger.debug(f"_fingerprint_dedupe: {json.dumps(deduplicated, default=lambda o: str(o), indent=4)}") deduplicated = _topo_sort(deduplicated) - logger.error(f"_topo_sort: {json.dumps(deduplicated, default=lambda o: str(o), indent=4)}") - _set_slugs(deduplicated, supported_models) - logger.error(f"_set_slugs: {json.dumps(deduplicated, default=lambda o: str(o), indent=4)}") + logger.debug(f"_topo_sort: {json.dumps(deduplicated, default=lambda o: str(o), indent=4)}") + _set_auto_slugs(deduplicated, supported_models) + logger.debug(f"_set_auto_slugs: {json.dumps(deduplicated, default=lambda o: str(o), indent=4)}") resolved = _resolve_existing_references(deduplicated) - logger.error(f"_resolve_references: {json.dumps(resolved, default=lambda o: str(o), indent=4)}") + logger.debug(f"_resolve_references: {json.dumps(resolved, default=lambda o: str(o), indent=4)}") _set_defaults(resolved, supported_models) - logger.error(f"_set_defaults: {json.dumps(resolved, default=lambda o: str(o), indent=4)}") + logger.debug(f"_set_defaults: {json.dumps(resolved, default=lambda o: str(o), indent=4)}") # handle post-create steps output = _handle_post_creates(resolved) - logger.error(f"_handle_post_creates: {json.dumps(output, default=lambda o: str(o), indent=4)}") + logger.debug(f"_handle_post_creates: {json.dumps(output, default=lambda o: str(o), indent=4)}") _check_unresolved_refs(output) for entity in output: @@ -112,6 +114,14 @@ def _transform_proto_json_1(proto_json: dict, object_type: str, context=None) -> nodes = [node] post_create = None + # special handling for custom fields + custom_fields = dict.pop(proto_json, "customFields", {}) + if custom_fields: + custom_fields, custom_fields_refs, nested = _prepare_custom_fields(object_type, custom_fields) + node['custom_fields'] = custom_fields + node['_refs'].update(custom_fields_refs) + nodes += nested + for key, value in proto_json.items(): ref_info = get_json_ref_info(object_type, key) if ref_info is None: @@ -171,7 +181,6 @@ def _transform_proto_json_1(proto_json: dict, object_type: str, context=None) -> return nodes - def _topo_sort(entities: list[dict]) -> list[dict]: """Topologically sort entities by reference.""" by_uuid = {e['_uuid']: e for e in entities} @@ -198,11 +207,33 @@ def _set_defaults(entities: list[dict], supported_models: dict): if model_fields is None: raise ValidationError(f"Model for object type {entity['_object_type']} is not supported") + auto_slug = entity.pop("_auto_slug", None) + if entity.get("_instance"): + continue + + if auto_slug: + if auto_slug.field_name not in entity: + entity[auto_slug.field_name] = auto_slug.value + for field_name, field_info in model_fields.get('fields', {}).items(): if entity.get(field_name) is None and field_info.get("default") is not None: entity[field_name] = field_info["default"] - -def _set_slugs(entities: list[dict], supported_models: dict): + set_custom_field_defaults(entity, model_fields['model']) + + +def set_custom_field_defaults(entity: dict, model): + """Set default values for custom fields in an entity.""" + custom_fields = CustomField.objects.get_for_model(model) + if custom_fields: + custom_field_data = entity.get('custom_fields') + if custom_field_data is None: + custom_field_data = {} + entity['custom_fields'] = custom_field_data + for cf in custom_fields: + if cf.name not in custom_field_data or custom_field_data[cf.name] is None: + custom_field_data[cf.name] = cf.default + +def _set_auto_slugs(entities: list[dict], supported_models: dict): for entity in entities: model_fields = supported_models.get(entity['_object_type']) if model_fields is None: @@ -210,7 +241,11 @@ def _set_slugs(entities: list[dict], supported_models: dict): for field_name, field_info in model_fields.get('fields', {}).items(): if field_info["type"] == "SlugField" and entity.get(field_name) is None: - entity[field_name] = _generate_slug(entity['_object_type'], entity) + slug = _generate_slug(entity['_object_type'], entity) + if slug is not None: + # this is provisionally set but will not be used + # if the entity is identified by other means... + entity['_auto_slug'] = AutoSlug(field_name=field_name, value=slug) def _generate_slug(object_type, data): """Generate a slug for a model instance.""" @@ -278,15 +313,20 @@ def _update_unresolved_refs(entity, new_refs): entity['_instance'] = new_refs.get(instance_uuid, instance_uuid) entity['_refs'] = {new_refs.get(r,r) for r in entity['_refs']} + _update_dict_refs(entity, new_refs) + - for k, v in entity.items(): +def _update_dict_refs(data, new_refs): + for k, v in data.items(): if isinstance(v, UnresolvedReference) and v.uuid in new_refs: - v.uuid = new_refs[v.uuid] + data[k] = new_refs[v.uuid] elif isinstance(v, (list, tuple)): for item in v: if isinstance(item, UnresolvedReference) and item.uuid in new_refs: item.uuid = new_refs[item.uuid] - # TODO maps ... + elif isinstance(v, dict): + _update_dict_refs(v, new_refs) + def _resolve_existing_references(entities: list[dict]) -> list[dict]: seen = {} @@ -300,7 +340,7 @@ def _resolve_existing_references(entities: list[dict]) -> list[dict]: existing = find_existing_object(data, object_type) if existing is not None: - logger.error(f"existing {data} -> {existing}") + logger.debug(f"existing {data} -> {existing}") fp = (object_type, existing.id) if fp in seen: logger.warning(f"objects resolved to the same existing id after deduplication: {seen[fp]} and {data}") @@ -328,7 +368,8 @@ def _update_resolved_refs(data, new_refs): else: new_items.append(item) data[k] = new_items - # TODO maps ... + elif isinstance(v, dict): + _update_resolved_refs(v, new_refs) def cleanup_unresolved_references(data: dict) -> list[str]: """Find and stringify unresolved references in fields.""" @@ -347,7 +388,9 @@ def cleanup_unresolved_references(data: dict) -> list[str]: else: items.append(item) data[k] = items - # TODO maps + elif isinstance(v, dict): + for uu in cleanup_unresolved_references(v): + unresolved.add(f"{k}.{uu}") return sorted(unresolved) def _handle_post_creates(entities: list[dict]) -> list[str]: @@ -394,3 +437,84 @@ def _check_unresolved_refs(entities: list[dict]) -> list[str]: } } ) + + +def _prepare_custom_fields(object_type: str, custom_fields: dict) -> tuple[dict, set, list]: + """Prepare custom fields for transformation.""" + out = {} + refs = set() + nodes = [] + for key, value in custom_fields.items(): + keyname = key + try: + value_type, value = _pop_custom_field_type_and_value(value) + if value_type in ("text", "longText", "decimal", "boolean", "datetime", "selection", "url", "multipleSelection"): + out[key] = value + elif value_type == "date": + # truncate to YYYY-MM-DD + out[key] = datetime.datetime.fromisoformat(value).strftime("%Y-%m-%d") + elif value_type == "integer": + out[key] = int(value) + elif value_type == "json": + out[key] = _prepare_custom_json(value) + elif value_type == "object": + nested = _prepare_custom_ref(value) + ref = nested[0] + refs.add(ref['_uuid']) + nodes += nested + out[key] = UnresolvedReference( + object_type=ref['_object_type'], + uuid=ref['_uuid'], + ) + elif value_type == "multipleObjects": + vals = [] + for i, item in enumerate(value): + keyname = f"{key}[{i}]" + nested = _prepare_custom_ref(value) + ref = nested[0] + refs.add(ref['_uuid']) + nodes += nested + vals.append(UnresolvedReference( + object_type=ref['_object_type'], + uuid=ref['_uuid'], + )) + out[key] = vals + else: + raise ValueError(f"Custom field {keyname} has unknown type: {value_type}") + except ValueError as e: + raise ChangeSetException( + f"Custom field {keyname} is invalid: {value}", + errors={ + object_type: {keyname: [str(e)]}, + } + ) + return out, refs, nodes + + +def _prepare_custom_json(data: dict) -> dict: + try: + return json.loads(data) + except json.JSONDecodeError: + raise ValueError("failed to parse as JSON") + + +def _pop_custom_field_type_and_value(data: dict): + if not isinstance(data, dict) or len(data) != 1: + raise ValueError("custom field value must be a dictionary with a single key") + value_type, value = data.popitem() + return value_type, value + + +def _prepare_custom_ref(data: dict) -> list[dict]: + if not isinstance(data, dict) or len(data) != 1: + raise ValueError("must be a dictionary with a single key") + + field_name, value = data.popitem() + if not isinstance(value, dict): + raise ValueError(f"{field_name} must be a dictionary") + ref_info = get_json_ref_info(CUSTOM_FIELD_OBJECT_REFERENCE_TYPE, field_name) + if ref_info is None: + raise ValueError(f"{field_name} is not a supported custom field reference type") + + object_type = ref_info.object_type + return _transform_proto_json_1(value, object_type) diff --git a/netbox_diode_plugin/api/views.py b/netbox_diode_plugin/api/views.py index 5f6d004..ce5c879 100644 --- a/netbox_diode_plugin/api/views.py +++ b/netbox_diode_plugin/api/views.py @@ -133,7 +133,7 @@ def _post(self, request, *args, **kwargs): ) try: with transaction.atomic(): - result = apply_changeset(change_set) + result = apply_changeset(change_set, request) except ChangeSetException as e: logger.error(f"Error applying change set: {e}") result = ChangeSetResult( diff --git a/netbox_diode_plugin/tests/test_api_diff_and_apply.py b/netbox_diode_plugin/tests/test_api_diff_and_apply.py index f2d501d..97bc38b 100644 --- a/netbox_diode_plugin/tests/test_api_diff_and_apply.py +++ b/netbox_diode_plugin/tests/test_api_diff_and_apply.py @@ -2,11 +2,15 @@ # Copyright 2024 NetBox Labs Inc """Diode NetBox Plugin - Tests.""" +import datetime import logging from uuid import uuid4 +from core.models import ObjectType from dcim.models import Device, Interface, Site from django.contrib.auth import get_user_model +from extras.models import CustomField +from extras.models.customfields import CustomFieldTypeChoices from ipam.models import IPAddress from rest_framework import status from users.models import Token @@ -30,6 +34,44 @@ def setUp(self): self.add_permissions("netbox_diode_plugin.add_diode") + self.object_type = ObjectType.objects.get_for_model(Site) + + self.uuid_field = CustomField.objects.create( + name='myuuid', + type=CustomFieldTypeChoices.TYPE_TEXT, + required=False, + unique=True, + ) + self.uuid_field.object_types.set([self.object_type]) + self.uuid_field.save() + + self.json_field = CustomField.objects.create( + name='some_json', + type=CustomFieldTypeChoices.TYPE_JSON, + required=False, + unique=False, + ) + self.json_field.object_types.set([self.object_type]) + self.json_field.save() + + self.datetime_field = CustomField.objects.create( + name='mydatetime', + type=CustomFieldTypeChoices.TYPE_DATETIME, + required=False, + unique=False, + ) + self.datetime_field.object_types.set([self.object_type]) + self.datetime_field.save() + + self.date_field = CustomField.objects.create( + name='mydate', + type=CustomFieldTypeChoices.TYPE_DATE, + required=False, + unique=False, + ) + self.date_field.object_types.set([self.object_type]) + self.date_field.save() + def test_generate_diff_and_apply_create_interface_with_tags(self): """Test generate diff and apply create interface with tags.""" interface_uuid = str(uuid4()) @@ -71,7 +113,7 @@ def test_generate_diff_and_apply_create_interface_with_tags(self): self.assertEqual(new_interface.tags.first().name, "tag 1") - def test_generate_diff_and_apply_create_site(self): + def test_generate_diff_and_apply_create_site_autoslug(self): """Test generate diff and apply create site.""" """Test generate diff create site.""" site_uuid = str(uuid4()) @@ -81,7 +123,6 @@ def test_generate_diff_and_apply_create_site(self): "entity": { "site": { "name": f"Site {site_uuid}", - "slug": f"site-{site_uuid}", }, } } @@ -169,6 +210,118 @@ def test_generate_diff_and_apply_create_device_with_primary_ip4(self): device = Device.objects.get(name=f"Device {device_uuid}") self.assertEqual(device.primary_ip4.pk, new_ipaddress.pk) + def test_generate_diff_and_apply_create_and_update_site_with_custom_field(self): + """Test generate diff and apply create and update site with custom field.""" + site_uuid = str(uuid4()) + payload = { + "timestamp": 1, + "object_type": "dcim.site", + "entity": { + "site": { + "name": "A New Custom Site", + "slug": "a-new-custom-site", + "customFields": { + "myuuid": { + "text": site_uuid, + }, + "some_json": { + "json": '{"some_key": 9876543210}', + }, + }, + }, + } + } + + _, response = self.diff_and_apply(payload) + new_site = Site.objects.get(name="A New Custom Site") + self.assertEqual(new_site.custom_field_data[self.uuid_field.name], site_uuid) + self.assertEqual(new_site.custom_field_data[self.json_field.name], {"some_key": 9876543210}) + + payload = { + "timestamp": 1, + "object_type": "dcim.site", + "entity": { + "site": { + "comments": "An updated comment", + "customFields": { + "myuuid": { + "text": site_uuid, + }, + "some_json": { + "json": '{"some_key": 1234567890}', + }, + "mydatetime": { + "datetime": "2026-01-01T09:00:00Z", + }, + "mydate": { + "date": "2026-01-01T00:00:00Z", + }, + }, + }, + } + } + + _, response = self.diff_and_apply(payload) + new_site = Site.objects.get(name="A New Custom Site") + self.assertEqual(new_site.cf[self.uuid_field.name], site_uuid) + self.assertEqual(new_site.cf[self.json_field.name], {"some_key": 1234567890}) + self.assertEqual(new_site.cf[self.datetime_field.name], datetime.datetime(2026, 1, 1, 9, 0, 0, tzinfo=datetime.timezone.utc)) + self.assertEqual(new_site.cf[self.date_field.name], datetime.date(2026, 1, 1)) + + payload = { + "timestamp": 1, + "object_type": "dcim.site", + "entity": { + "site": { + "customFields": { + "myuuid": { + "text": site_uuid, + }, + "mydatetime": { + "datetime": "2026-01-01T10:00:00Z", + }, + "mydate": { + "date": "2026-01-02T00:00:00Z", + }, + }, + }, + } + } + + _, response = self.diff_and_apply(payload) + new_site = Site.objects.get(name="A New Custom Site") + self.assertEqual(new_site.cf[self.uuid_field.name], site_uuid) + self.assertEqual(new_site.cf[self.json_field.name], {"some_key": 1234567890}) + self.assertEqual(new_site.cf[self.datetime_field.name], datetime.datetime(2026, 1, 1, 10, 0, 0, tzinfo=datetime.timezone.utc)) + self.assertEqual(new_site.cf[self.date_field.name], datetime.date(2026, 1, 2)) + + payload = { + "timestamp": 1, + "object_type": "dcim.site", + "entity": { + "site": { + "customFields": { + "myuuid": { + "text": site_uuid, + }, + "mydatetime": { + "datetime": "2026-01-01T10:00:00Z", + }, + "mydate": { + "date": "2026-01-02T00:00:00Z", + }, + }, + }, + } + } + response1 = self.client.post( + self.diff_url, data=payload, format="json", **self.user_header + ) + self.assertEqual(response1.status_code, status.HTTP_200_OK) + diff = response1.json().get("change_set", {}) + self.assertEqual(diff.get("changes", []), []) + + def diff_and_apply(self, payload): """Diff and apply the payload.""" response1 = self.client.post( diff --git a/netbox_diode_plugin/tests/test_api_generate_diff.py b/netbox_diode_plugin/tests/test_api_generate_diff.py index 4f70149..d7b1dd2 100644 --- a/netbox_diode_plugin/tests/test_api_generate_diff.py +++ b/netbox_diode_plugin/tests/test_api_generate_diff.py @@ -2,8 +2,13 @@ # Copyright 2024 NetBox Labs Inc """Diode NetBox Plugin - Tests.""" -from dcim.models import Site +from uuid import uuid4 + +from core.models import ObjectType +from dcim.models import Manufacturer, RackType, Site from django.contrib.auth import get_user_model +from extras.models import CustomField +from extras.models.customfields import CustomFieldTypeChoices from rest_framework import status from users.models import Token from utilities.testing import APITestCase @@ -22,7 +27,27 @@ def setUp(self): self.user_token = Token.objects.create(user=self.user) self.user_header = {"HTTP_AUTHORIZATION": f"Token {self.user_token.key}"} + self.object_type = ObjectType.objects.get_for_model(Site) + + self.uuid_field = CustomField.objects.create( + name='myuuid', + type=CustomFieldTypeChoices.TYPE_TEXT, + required=False, + unique=True, + ) + self.uuid_field.object_types.set([self.object_type]) + self.uuid_field.save() + + self.json_field = CustomField.objects.create( + name='some_json', + type=CustomFieldTypeChoices.TYPE_JSON, + required=False, + unique=False, + ) + self.json_field.object_types.set([self.object_type]) + self.json_field.save() + self.site_uuid = str(uuid4()) self.site = Site.objects.create( name="Site Generate Diff 1", slug="site-generate-diff-1", @@ -32,7 +57,22 @@ def setUp(self): shipping_address="123 Fake St Lincoln NE 68588", comments="Lorem ipsum etcetera", ) + self.site.custom_field_data[self.uuid_field.name] = self.site_uuid + self.site.custom_field_data[self.json_field.name] = { + "some_key": "some_value", + } + self.site.save() + self.manufacturer = Manufacturer.objects.create( + name="Manufacturer 1", + ) + self.manufacturer.save() + self.rack_type = RackType.objects.create( + model="Rack Type 1", + slug="rack-type-1", + manufacturer=self.manufacturer, + ) + self.rack_type.save() def test_generate_diff_create_site(self): """Test generate diff create site.""" @@ -63,6 +103,41 @@ def test_generate_diff_create_site(self): self.assertEqual(data.get("name"), "A New Site") self.assertEqual(data.get("slug"), "a-new-site") + def test_generate_diff_create_site_with_custom_field(self): + """Test generate diff create site with custom field.""" + payload = { + "timestamp": 1, + "object_type": "dcim.site", + "entity": { + "site": { + "name": "A New Site", + "slug": "a-new-site", + "customFields": { + "some_json": { + "json": '{"some_key": 1234567890}', + }, + }, + }, + } + } + + response = self.send_request(payload) + self.assertEqual(response.status_code, status.HTTP_200_OK) + cs = response.json().get("change_set", {}) + self.assertIsNotNone(cs.get("id")) + changes = cs.get("changes", []) + self.assertEqual(len(changes), 1) + change = changes[0] + self.assertEqual(change.get("object_type"), "dcim.site") + self.assertEqual(change.get("change_type"), "create") + self.assertEqual(change.get("object_id"), None) + self.assertIsNotNone(change.get("ref_id")) + + data = change.get("data", {}) + self.assertEqual(data.get("name"), "A New Site") + self.assertEqual(data.get("slug"), "a-new-site") + self.assertEqual(data.get("custom_fields", {}).get("some_json", {}).get("some_key"), 1234567890) + def test_generate_diff_update_site(self): """Test generate diff update site.""" """Test generate diff create site.""" @@ -96,6 +171,80 @@ def test_generate_diff_update_site(self): self.assertEqual(data.get("slug"), "site-generate-diff-1") self.assertEqual(data.get("comments"), "An updated comment") + def test_match_site_by_custom_field(self): + """Test match site by custom field.""" + payload = { + "timestamp": 1, + "object_type": "dcim.site", + "entity": { + "site": { + # here name and slug are not present in the payload + # but we expect to match the existing site by the + # unique custom field myuuid + "comments": "A custom comment", + "customFields": { + "myuuid": { + "text": self.site_uuid, + }, + }, + }, + } + } + + response = self.send_request(payload) + self.assertEqual(response.status_code, status.HTTP_200_OK) + cs = response.json().get("change_set", {}) + self.assertIsNotNone(cs.get("id")) + changes = cs.get("changes", []) + self.assertEqual(len(changes), 1) + change = changes[0] + self.assertEqual(change.get("object_type"), "dcim.site") + self.assertEqual(change.get("change_type"), "update") + self.assertEqual(change.get("object_id"), self.site.id) + self.assertEqual(change.get("ref_id"), None) + + data = change.get("data", {}) + self.assertEqual(data.get("comments"), "A custom comment") + self.assertEqual(data.get("custom_fields", {}).get("myuuid"), self.site_uuid) + + before = change.get("before", {}) + self.assertEqual(before.get("name"), "Site Generate Diff 1") + self.assertEqual(before.get("slug"), "site-generate-diff-1") + + def test_generate_diff_update_rack_type_autoslug(self): + """Test generate diff update rack type autoslug.""" + payload = { + "timestamp": 1, + "object_type": "dcim.racktype", + "entity": { + "rackType": { + "model": "Rack Type 1", + "form_factor": "wall-frame", + }, + } + } + + response = self.send_request(payload) + self.assertEqual(response.status_code, status.HTTP_200_OK) + cs = response.json().get("change_set", {}) + self.assertIsNotNone(cs.get("id")) + changes = cs.get("changes", []) + self.assertEqual(len(changes), 1) + change = changes[0] + self.assertEqual(change.get("object_type"), "dcim.racktype") + self.assertEqual(change.get("change_type"), "update") + self.assertEqual(change.get("object_id"), self.rack_type.id) + self.assertEqual(change.get("ref_id"), None) + + data = change.get("data", {}) + self.assertEqual(data.get("model"), "Rack Type 1") + self.assertEqual(data.get("slug"), None) # slug is not set, use prior slug + self.assertEqual(data.get("form_factor"), "wall-frame") + + before = change.get("before", {}) + self.assertEqual(before.get("model"), "Rack Type 1") + # correct slug is present in before data + self.assertEqual(before.get("slug"), "rack-type-1") def send_request(self, payload, status_code=status.HTTP_200_OK): From a8019f9d857e0cbac71c32d9563531bb8eb89039 Mon Sep 17 00:00:00 2001 From: Luke Tucker <64618+ltucker@users.noreply.github.com> Date: Thu, 10 Apr 2025 15:29:57 -0400 Subject: [PATCH 20/52] fix: fix errors attempting value harmonization (#76) cleanup error messages --- netbox_diode_plugin/api/applier.py | 19 ++-- netbox_diode_plugin/api/differ.py | 12 ++- netbox_diode_plugin/api/transformer.py | 5 +- .../tests/test_api_apply_change_set.py | 16 ++-- .../tests/test_api_diff_and_apply.py | 87 +++++++++++++++++++ 5 files changed, 122 insertions(+), 17 deletions(-) diff --git a/netbox_diode_plugin/api/applier.py b/netbox_diode_plugin/api/applier.py index 4c974d8..3f1b041 100644 --- a/netbox_diode_plugin/api/applier.py +++ b/netbox_diode_plugin/api/applier.py @@ -23,7 +23,7 @@ def apply_changeset(change_set: ChangeSet, request) -> ChangeSetResult: _validate_change_set(change_set) created = {} - for i, change in enumerate(change_set.changes): + for change in change_set.changes: change_type = change.change_type object_type = change.object_type @@ -35,9 +35,14 @@ def apply_changeset(change_set: ChangeSet, request) -> ChangeSetResult: data = _pre_apply(model_class, change, created) _apply_change(data, model_class, change, created, request) except ValidationError as e: - raise _err_from_validation_error(e, f"changes[{i}]") + raise _err_from_validation_error(e, object_type) except ObjectDoesNotExist: - raise _err(f"{object_type} with id {change.object_id} does not exist", f"changes[{i}]", "object_id") + raise _err(f"{object_type} with id {change.object_id} does not exist", object_type, "object_id") + except TypeError as e: + # this indicates a problem in model validation (should raise ValidationError) + # but raised non-validation error (TypeError) -- we don't know which field trigged it. + logger.error(f"invalid data type for unspecified field (validation raised non-validation error): {data}: {e}") + raise _err("invalid data type for field", object_type, "__all__") # ConstraintViolationError ? # ... @@ -113,13 +118,15 @@ def _validate_change_set(change_set: ChangeSet): if not change_set.changes: raise _err("Changes are required", "changeset", "changes") - for i, change in enumerate(change_set.changes): + for change in change_set.changes: if change.object_id is None and change.ref_id is None: - raise _err("Object ID or Ref ID must be provided", f"changes[{i}]", NON_FIELD_ERRORS) + raise _err("Object ID or Ref ID must be provided", change.object_type, NON_FIELD_ERRORS) if change.change_type not in ChangeType: - raise _err(f"Unsupported change type '{change.change_type}'", f"changes[{i}]", "change_type") + raise _err(f"Unsupported change type '{change.change_type}'", change.object_type, "change_type") def _err(message, object_name, field): + if not object_name: + object_name = "__all__" return ChangeSetException(message, errors={object_name: {field: [message]}}) def _err_from_validation_error(e, object_name): diff --git a/netbox_diode_plugin/api/differ.py b/netbox_diode_plugin/api/differ.py index 55a990f..1da1843 100644 --- a/netbox_diode_plugin/api/differ.py +++ b/netbox_diode_plugin/api/differ.py @@ -10,7 +10,7 @@ from django.core.exceptions import ValidationError from utilities.data import shallow_compare_dict -from .common import Change, ChangeSet, ChangeSetException, ChangeSetResult, ChangeType +from .common import Change, ChangeSet, ChangeSetException, ChangeSetResult, ChangeType, UnresolvedReference from .plugin_utils import get_primary_value, legal_fields from .supported_models import extract_supported_models from .transformer import cleanup_unresolved_references, set_custom_field_defaults, transform_proto_json @@ -84,12 +84,20 @@ def prechange_data_from_instance(instance) -> dict: # noqa: C901 def _harmonize_formats(prechange_data: dict, postchange_data: dict): for k, v in prechange_data.items(): + if k.startswith('_'): + continue if isinstance(v, datetime.datetime): prechange_data[k] = v.strftime("%Y-%m-%dT%H:%M:%SZ") elif isinstance(v, datetime.date): prechange_data[k] = v.strftime("%Y-%m-%d") elif isinstance(v, int) and k in postchange_data: - postchange_data[k] = int(postchange_data[k]) + val = postchange_data[k] + if isinstance(val, UnresolvedReference): + continue + try: + postchange_data[k] = int(val) + except Exception: + continue elif isinstance(v, dict): _harmonize_formats(v, postchange_data.get(k, {})) diff --git a/netbox_diode_plugin/api/transformer.py b/netbox_diode_plugin/api/transformer.py index cfb246d..2dd05cc 100644 --- a/netbox_diode_plugin/api/transformer.py +++ b/netbox_diode_plugin/api/transformer.py @@ -452,7 +452,10 @@ def _prepare_custom_fields(object_type: str, custom_fields: dict) -> tuple[dict, out[key] = value elif value_type == "date": # truncate to YYYY-MM-DD - out[key] = datetime.datetime.fromisoformat(value).strftime("%Y-%m-%d") + try: + out[key] = datetime.datetime.fromisoformat(value).strftime("%Y-%m-%d") + except Exception: + out[key] = value elif value_type == "integer": out[key] = int(value) elif value_type == "json": diff --git a/netbox_diode_plugin/tests/test_api_apply_change_set.py b/netbox_diode_plugin/tests/test_api_apply_change_set.py index b2d27c0..d35e1bb 100644 --- a/netbox_diode_plugin/tests/test_api_apply_change_set.py +++ b/netbox_diode_plugin/tests/test_api_apply_change_set.py @@ -300,7 +300,7 @@ def test_change_type_create_with_error_return_400(self): self.assertIn( 'Expected a list of items but got type "int".', - _get_error(response, "changes[0]", "asns"), + _get_error(response, "dcim.site", "asns"), ) self.assertFalse(site_created.exists()) @@ -334,7 +334,7 @@ def test_change_type_update_with_error_return_400(self): site_updated = Site.objects.get(id=20) self.assertIn( 'Expected a list of items but got type "int".', - _get_error(response, "changes[0]", "asns") + _get_error(response, "dcim.site", "asns") ) self.assertEqual(site_updated.name, "Site 2") @@ -478,7 +478,7 @@ def test_change_type_create_and_update_with_error_in_one_object_return_400(self) self.assertIn( "Related object not found using the provided numeric ID: 3", - _get_error(response, "changes[1]", "device_type"), + _get_error(response, "dcim.device", "device_type"), ) self.assertFalse(site_created.exists()) self.assertFalse(device_created.exists()) @@ -548,7 +548,7 @@ def test_multiples_create_type_error_in_two_objects_return_400(self): self.assertIn( "Related object not found using the provided numeric ID: 3", - _get_error(response, "changes[1]", "device_type"), + _get_error(response, "dcim.device", "device_type"), ) self.assertFalse(site_created.exists()) @@ -587,7 +587,7 @@ def test_change_type_update_with_object_id_not_exist_return_400(self): self.assertIn( "dcim.site with id 30 does not exist", - _get_error(response, "changes[0]", "object_id"), + _get_error(response, "dcim.site", "object_id"), ) self.assertEqual(site_updated.name, "Site 2") @@ -655,7 +655,7 @@ def test_change_type_field_not_provided_return_400( self.assertIn( "Unsupported change type ''", - _get_error(response, "changes[0]", "change_type"), + _get_error(response, "dcim.site", "change_type"), ) def test_change_set_id_field_and_change_set_not_provided_return_400(self): @@ -720,7 +720,7 @@ def test_change_type_and_object_type_provided_return_400( self.assertIn( "Unsupported change type 'None'", - _get_error(response, "changes[0]", "change_type"), + _get_error(response, "__all__", "change_type"), ) # self.assertEqual( # response.json().get("errors")[0].get("change_type"), @@ -992,7 +992,7 @@ def test_create_prefix_with_unknown_site_fails(self): response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) self.assertIn( 'Please select a site.', - _get_error(response, "changes[0]", "scope"), + _get_error(response, "ipam.prefix", "scope"), ) self.assertFalse(Prefix.objects.filter(prefix="192.168.0.0/24").exists()) diff --git a/netbox_diode_plugin/tests/test_api_diff_and_apply.py b/netbox_diode_plugin/tests/test_api_diff_and_apply.py index 97bc38b..c4b599d 100644 --- a/netbox_diode_plugin/tests/test_api_diff_and_apply.py +++ b/netbox_diode_plugin/tests/test_api_diff_and_apply.py @@ -113,6 +113,64 @@ def test_generate_diff_and_apply_create_interface_with_tags(self): self.assertEqual(new_interface.tags.first().name, "tag 1") + def test_generate_diff_and_apply_create_and_update_device_role(self): + """Test generate diff and apply create and update device role.""" + device_uuid = str(uuid4()) + role_1_uuid = str(uuid4()) + role_2_uuid = str(uuid4()) + site_uuid = str(uuid4()) + payload = { + "timestamp": 1, + "object_type": "dcim.device", + "entity": { + "device": { + "name": f"Device {device_uuid}", + "deviceType": { + "model": f"Device Type {uuid4()}", + "manufacturer": { + "name": f"Manufacturer {uuid4()}" + } + }, + "role": { + "name": f"Role {role_1_uuid}" + }, + "site": { + "name": f"Site {site_uuid}" + } + }, + } + } + _, response = self.diff_and_apply(payload) + new_device = Device.objects.get(name=f"Device {device_uuid}") + self.assertEqual(new_device.site.name, f"Site {site_uuid}") + self.assertEqual(new_device.role.name, f"Role {role_1_uuid}") + payload = { + "timestamp": 1, + "object_type": "dcim.device", + "entity": { + "device": { + "name": f"Device {device_uuid}", + "deviceType": { + "model": f"Device Type {uuid4()}", + "manufacturer": { + "name": f"Manufacturer {uuid4()}" + } + }, + "role": { + "name": f"Role {role_2_uuid}" + }, + "site": { + "name": f"Site {site_uuid}" + } + }, + } + } + _, response = self.diff_and_apply(payload) + device = Device.objects.get(name=f"Device {device_uuid}") + self.assertEqual(device.site.name, f"Site {site_uuid}") + self.assertEqual(device.role.name, f"Role {role_2_uuid}") + + def test_generate_diff_and_apply_create_site_autoslug(self): """Test generate diff and apply create site.""" """Test generate diff create site.""" @@ -321,6 +379,35 @@ def test_generate_diff_and_apply_create_and_update_site_with_custom_field(self): diff = response1.json().get("change_set", {}) self.assertEqual(diff.get("changes", []), []) + def test_generate_diff_wrong_type_date(self): + """Test generate diff wrong type date.""" + payload = { + "timestamp": 1, + "object_type": "dcim.site", + "entity": { + "site": { + "name": "Site Generate Diff 1", + "slug": "site-generate-diff-1", + "customFields": { + "mydate": { + "date": 12, + }, + }, + }, + } + } + response1 = self.client.post( + self.diff_url, data=payload, format="json", **self.user_header + ) + self.assertEqual(response1.status_code, status.HTTP_200_OK) + + diff = response1.json().get("change_set", {}) + + response2 = self.client.post( + self.apply_url, data=diff, format="json", **self.user_header + ) + self.assertEqual(response2.status_code, status.HTTP_400_BAD_REQUEST) + def diff_and_apply(self, payload): """Diff and apply the payload.""" From 6275570c1aeb9ca601130f6d532635ca09546ab3 Mon Sep 17 00:00:00 2001 From: Luke Tucker <64618+ltucker@users.noreply.github.com> Date: Fri, 11 Apr 2025 10:11:52 -0400 Subject: [PATCH 21/52] fix: merge tags rather than overwrite (#77) --- Makefile | 7 +- docker/netbox/configuration/logging.py | 17 ++++ netbox_diode_plugin/api/differ.py | 17 +++- netbox_diode_plugin/api/transformer.py | 4 +- .../tests/test_api_diff_and_apply.py | 91 +++++++++++++++++++ 5 files changed, 125 insertions(+), 11 deletions(-) diff --git a/Makefile b/Makefile index 79e7f75..00f51a4 100644 --- a/Makefile +++ b/Makefile @@ -14,12 +14,7 @@ docker-compose-netbox-plugin-down: .PHONY: docker-compose-netbox-plugin-test docker-compose-netbox-plugin-test: - -@$(DOCKER_COMPOSE) -f docker/docker-compose.yaml -f docker/docker-compose.test.yaml run -u root --rm netbox ./manage.py test --keepdb netbox_diode_plugin - @$(MAKE) docker-compose-netbox-plugin-down - -.PHONY: docker-compose-netbox-plugin-test-ff -docker-compose-netbox-plugin-test-ff: - -@$(DOCKER_COMPOSE) -f docker/docker-compose.yaml -f docker/docker-compose.test.yaml run -u root --rm netbox ./manage.py test --failfast --keepdb netbox_diode_plugin + -@$(DOCKER_COMPOSE) -f docker/docker-compose.yaml -f docker/docker-compose.test.yaml run -u root --rm netbox ./manage.py test $(TEST_FLAGS) --keepdb netbox_diode_plugin @$(MAKE) docker-compose-netbox-plugin-down .PHONY: docker-compose-netbox-plugin-test-cover diff --git a/docker/netbox/configuration/logging.py b/docker/netbox/configuration/logging.py index d786768..f145c5c 100644 --- a/docker/netbox/configuration/logging.py +++ b/docker/netbox/configuration/logging.py @@ -1,3 +1,20 @@ +from os import environ + +LOGGING = { + 'version': 1, + 'disable_existing_loggers': False, + 'handlers': { + 'console': { + 'class': 'logging.StreamHandler', + }, + }, + 'loggers': { + '': { # root logger + 'handlers': ['console'], + 'level': 'DEBUG' if environ.get('DEBUG', 'false').lower() == 'true' else 'INFO', + }, + }, +} # # Remove first comment(#) on each line to implement this working logging example. # # Add LOGLEVEL environment variable to netbox if you use this example & want a different log level. # from os import environ diff --git a/netbox_diode_plugin/api/differ.py b/netbox_diode_plugin/api/differ.py index 1da1843..026f3c9 100644 --- a/netbox_diode_plugin/api/differ.py +++ b/netbox_diode_plugin/api/differ.py @@ -55,7 +55,7 @@ def prechange_data_from_instance(instance) -> dict: # noqa: C901 if hasattr(value, "all"): # Handle many-to-many and many-to-one relationships # For any relationship that has an 'all' method, get all related objects' primary keys prechange_data[field_name] = ( - [item.pk for item in value.all()] if value is not None else [] + sorted([item.pk for item in value.all()] if value is not None else []) ) elif hasattr( value, "pk" @@ -233,8 +233,11 @@ def _partially_merge(prechange_data: dict, postchange_data: dict, instance) -> d """Merge lists and custom_fields rather than replacing the full value...""" result = {} for key, value in postchange_data.items(): - # TODO: partially merge lists like tags? all lists? - result[key] = value + # currently we only merge tags, but this could be extended to other reference lists? + if key == "tags": + result[key] = _merge_reference_list(prechange_data.get(key, []), value) + else: + result[key] = value # these are fully merged in from the prechange state because # they don't respect partial update serialization. @@ -244,3 +247,11 @@ def _partially_merge(prechange_data: dict, postchange_data: dict, instance) -> d result["custom_fields"][key] = value set_custom_field_defaults(result, instance) return result + +def _merge_reference_list(prechange_list: list, postchange_list: list) -> list: + """Merge reference lists rather than replacing the full value.""" + result = set(prechange_list) + result.update(postchange_list) + return sorted(result, key=str) + + diff --git a/netbox_diode_plugin/api/transformer.py b/netbox_diode_plugin/api/transformer.py index 2dd05cc..59949e2 100644 --- a/netbox_diode_plugin/api/transformer.py +++ b/netbox_diode_plugin/api/transformer.py @@ -319,7 +319,7 @@ def _update_unresolved_refs(entity, new_refs): def _update_dict_refs(data, new_refs): for k, v in data.items(): if isinstance(v, UnresolvedReference) and v.uuid in new_refs: - data[k] = new_refs[v.uuid] + v.uuid = new_refs[v.uuid] elif isinstance(v, (list, tuple)): for item in v: if isinstance(item, UnresolvedReference) and item.uuid in new_refs: @@ -439,7 +439,7 @@ def _check_unresolved_refs(entities: list[dict]) -> list[str]: ) -def _prepare_custom_fields(object_type: str, custom_fields: dict) -> tuple[dict, set, list]: +def _prepare_custom_fields(object_type: str, custom_fields: dict) -> tuple[dict, set, list]: # noqa: C901 """Prepare custom fields for transformation.""" out = {} refs = set() diff --git a/netbox_diode_plugin/tests/test_api_diff_and_apply.py b/netbox_diode_plugin/tests/test_api_diff_and_apply.py index c4b599d..a0814cb 100644 --- a/netbox_diode_plugin/tests/test_api_diff_and_apply.py +++ b/netbox_diode_plugin/tests/test_api_diff_and_apply.py @@ -189,6 +189,97 @@ def test_generate_diff_and_apply_create_site_autoslug(self): new_site = Site.objects.get(name=f"Site {site_uuid}") self.assertEqual(new_site.slug, f"site-{site_uuid}") + def test_generate_diff_and_apply_tags_merged(self): + """Test generate diff and apply merges tags.""" + site_uuid = str(uuid4()) + payload = { + "timestamp": 1, + "object_type": "dcim.site", + "entity": { + "site": { + "name": f"Site {site_uuid}", + "tags": [ + {"name": "tag 1"}, + {"name": "tag 2"}, + ], + }, + } + } + + _, response = self.diff_and_apply(payload) + new_site = Site.objects.get(name=f"Site {site_uuid}") + self.assertEqual(new_site.tags.count(), 2) + tag_names = [tag.name for tag in new_site.tags.all()] + self.assertIn("tag 1", tag_names) + self.assertIn("tag 2", tag_names) + + payload = { + "timestamp": 1, + "object_type": "dcim.site", + "entity": { + "site": { + "name": f"Site {site_uuid}", + "tags": [ + {"name": "tag 3"}, + ], + }, + } + } + + _, response = self.diff_and_apply(payload) + new_site = Site.objects.get(name=f"Site {site_uuid}") + self.assertEqual(new_site.tags.count(), 3) + tag_names = [tag.name for tag in new_site.tags.all()] + self.assertIn("tag 1", tag_names) + self.assertIn("tag 2", tag_names) + self.assertIn("tag 3", tag_names) + + def test_generate_diff_and_apply_refs_not_merged(self): + """Test generate diff and apply does not merge reference lists.""" + site_uuid = str(uuid4()) + payload = { + "timestamp": 1, + "object_type": "dcim.site", + "entity": { + "site": { + "name": f"Site {site_uuid}", + "asns": [ + {"asn": "1", "rir": {"name": "RIR 1"}}, + {"asn": "2", "rir": {"name": "RIR 1"}}, + ], + }, + } + } + + _, response = self.diff_and_apply(payload) + new_site = Site.objects.get(name=f"Site {site_uuid}") + self.assertEqual(new_site.asns.count(), 2) + asns = [asn.asn for asn in new_site.asns.all()] + self.assertIn(1, asns) + self.assertIn(2, asns) + + payload = { + "timestamp": 1, + "object_type": "dcim.site", + "entity": { + "site": { + "name": f"Site {site_uuid}", + "asns": [ + {"asn": "3", "rir": {"name": "RIR 1"}}, + ], + }, + } + } + + _, response = self.diff_and_apply(payload) + new_site = Site.objects.get(name=f"Site {site_uuid}") + self.assertEqual(new_site.asns.count(), 1) + asns = [asn.asn for asn in new_site.asns.all()] + self.assertNotIn(1, asns) + self.assertNotIn(2, asns) + self.assertIn(3, asns) + + def test_generate_diff_and_apply_create_interface_with_primay_mac_address(self): """Test generate diff and apply create interface with primary mac address.""" interface_uuid = str(uuid4()) From 4ee1b5eaa4e5edfa8eb04dd3b4e7236838299dbd Mon Sep 17 00:00:00 2001 From: Luke Tucker <64618+ltucker@users.noreply.github.com> Date: Sat, 12 Apr 2025 13:31:22 -0400 Subject: [PATCH 22/52] fix: accept both valid forms of protoJSON (snake and camel) (#78) --- netbox_diode_plugin/api/plugin_utils.py | 856 +++++++++--------- netbox_diode_plugin/api/transformer.py | 26 +- netbox_diode_plugin/api/views.py | 23 +- .../tests/test_api_diff_and_apply.py | 65 +- .../tests/test_api_generate_diff.py | 38 +- 5 files changed, 554 insertions(+), 454 deletions(-) diff --git a/netbox_diode_plugin/api/plugin_utils.py b/netbox_diode_plugin/api/plugin_utils.py index 9571f47..9a08f33 100644 --- a/netbox_diode_plugin/api/plugin_utils.py +++ b/netbox_diode_plugin/api/plugin_utils.py @@ -1,7 +1,7 @@ """Diode plugin helpers.""" # Generated code. DO NOT EDIT. -# Timestamp: 2025-04-10 14:44:19Z +# Timestamp: 2025-04-12 15:25:46Z from dataclasses import dataclass from functools import lru_cache @@ -44,98 +44,98 @@ class RefInfo: _JSON_REF_INFO = { 'diode.custom_field_object_reference': { 'asn': RefInfo(object_type='ipam.asn', field_name='object', is_generic=True), - 'asnRange': RefInfo(object_type='ipam.asnrange', field_name='object', is_generic=True), + 'asn_range': RefInfo(object_type='ipam.asnrange', field_name='object', is_generic=True), 'aggregate': RefInfo(object_type='ipam.aggregate', field_name='object', is_generic=True), 'cable': RefInfo(object_type='dcim.cable', field_name='object', is_generic=True), - 'cablePath': RefInfo(object_type='dcim.cablepath', field_name='object', is_generic=True), - 'cableTermination': RefInfo(object_type='dcim.cabletermination', field_name='object', is_generic=True), + 'cable_path': RefInfo(object_type='dcim.cablepath', field_name='object', is_generic=True), + 'cable_termination': RefInfo(object_type='dcim.cabletermination', field_name='object', is_generic=True), 'circuit': RefInfo(object_type='circuits.circuit', field_name='object', is_generic=True), - 'circuitGroup': RefInfo(object_type='circuits.circuitgroup', field_name='object', is_generic=True), - 'circuitGroupAssignment': RefInfo(object_type='circuits.circuitgroupassignment', field_name='object', is_generic=True), - 'circuitTermination': RefInfo(object_type='circuits.circuittermination', field_name='object', is_generic=True), - 'circuitType': RefInfo(object_type='circuits.circuittype', field_name='object', is_generic=True), + 'circuit_group': RefInfo(object_type='circuits.circuitgroup', field_name='object', is_generic=True), + 'circuit_group_assignment': RefInfo(object_type='circuits.circuitgroupassignment', field_name='object', is_generic=True), + 'circuit_termination': RefInfo(object_type='circuits.circuittermination', field_name='object', is_generic=True), + 'circuit_type': RefInfo(object_type='circuits.circuittype', field_name='object', is_generic=True), 'cluster': RefInfo(object_type='virtualization.cluster', field_name='object', is_generic=True), - 'clusterGroup': RefInfo(object_type='virtualization.clustergroup', field_name='object', is_generic=True), - 'clusterType': RefInfo(object_type='virtualization.clustertype', field_name='object', is_generic=True), - 'consolePort': RefInfo(object_type='dcim.consoleport', field_name='object', is_generic=True), - 'consoleServerPort': RefInfo(object_type='dcim.consoleserverport', field_name='object', is_generic=True), + 'cluster_group': RefInfo(object_type='virtualization.clustergroup', field_name='object', is_generic=True), + 'cluster_type': RefInfo(object_type='virtualization.clustertype', field_name='object', is_generic=True), + 'console_port': RefInfo(object_type='dcim.consoleport', field_name='object', is_generic=True), + 'console_server_port': RefInfo(object_type='dcim.consoleserverport', field_name='object', is_generic=True), 'contact': RefInfo(object_type='tenancy.contact', field_name='object', is_generic=True), - 'contactAssignment': RefInfo(object_type='tenancy.contactassignment', field_name='object', is_generic=True), - 'contactGroup': RefInfo(object_type='tenancy.contactgroup', field_name='object', is_generic=True), - 'contactRole': RefInfo(object_type='tenancy.contactrole', field_name='object', is_generic=True), + 'contact_assignment': RefInfo(object_type='tenancy.contactassignment', field_name='object', is_generic=True), + 'contact_group': RefInfo(object_type='tenancy.contactgroup', field_name='object', is_generic=True), + 'contact_role': RefInfo(object_type='tenancy.contactrole', field_name='object', is_generic=True), 'device': RefInfo(object_type='dcim.device', field_name='object', is_generic=True), - 'deviceBay': RefInfo(object_type='dcim.devicebay', field_name='object', is_generic=True), - 'deviceRole': RefInfo(object_type='dcim.devicerole', field_name='object', is_generic=True), - 'deviceType': RefInfo(object_type='dcim.devicetype', field_name='object', is_generic=True), - 'fhrpGroup': RefInfo(object_type='ipam.fhrpgroup', field_name='object', is_generic=True), - 'fhrpGroupAssignment': RefInfo(object_type='ipam.fhrpgroupassignment', field_name='object', is_generic=True), - 'frontPort': RefInfo(object_type='dcim.frontport', field_name='object', is_generic=True), - 'ikePolicy': RefInfo(object_type='vpn.ikepolicy', field_name='object', is_generic=True), - 'ikeProposal': RefInfo(object_type='vpn.ikeproposal', field_name='object', is_generic=True), - 'ipAddress': RefInfo(object_type='ipam.ipaddress', field_name='object', is_generic=True), - 'ipRange': RefInfo(object_type='ipam.iprange', field_name='object', is_generic=True), - 'ipSecPolicy': RefInfo(object_type='vpn.ipsecpolicy', field_name='object', is_generic=True), - 'ipSecProfile': RefInfo(object_type='vpn.ipsecprofile', field_name='object', is_generic=True), - 'ipSecProposal': RefInfo(object_type='vpn.ipsecproposal', field_name='object', is_generic=True), + 'device_bay': RefInfo(object_type='dcim.devicebay', field_name='object', is_generic=True), + 'device_role': RefInfo(object_type='dcim.devicerole', field_name='object', is_generic=True), + 'device_type': RefInfo(object_type='dcim.devicetype', field_name='object', is_generic=True), + 'fhrp_group': RefInfo(object_type='ipam.fhrpgroup', field_name='object', is_generic=True), + 'fhrp_group_assignment': RefInfo(object_type='ipam.fhrpgroupassignment', field_name='object', is_generic=True), + 'front_port': RefInfo(object_type='dcim.frontport', field_name='object', is_generic=True), + 'ike_policy': RefInfo(object_type='vpn.ikepolicy', field_name='object', is_generic=True), + 'ike_proposal': RefInfo(object_type='vpn.ikeproposal', field_name='object', is_generic=True), + 'ip_address': RefInfo(object_type='ipam.ipaddress', field_name='object', is_generic=True), + 'ip_range': RefInfo(object_type='ipam.iprange', field_name='object', is_generic=True), + 'ip_sec_policy': RefInfo(object_type='vpn.ipsecpolicy', field_name='object', is_generic=True), + 'ip_sec_profile': RefInfo(object_type='vpn.ipsecprofile', field_name='object', is_generic=True), + 'ip_sec_proposal': RefInfo(object_type='vpn.ipsecproposal', field_name='object', is_generic=True), 'interface': RefInfo(object_type='dcim.interface', field_name='object', is_generic=True), - 'inventoryItem': RefInfo(object_type='dcim.inventoryitem', field_name='object', is_generic=True), - 'inventoryItemRole': RefInfo(object_type='dcim.inventoryitemrole', field_name='object', is_generic=True), - 'l2Vpn': RefInfo(object_type='vpn.l2vpn', field_name='object', is_generic=True), - 'l2VpnTermination': RefInfo(object_type='vpn.l2vpntermination', field_name='object', is_generic=True), + 'inventory_item': RefInfo(object_type='dcim.inventoryitem', field_name='object', is_generic=True), + 'inventory_item_role': RefInfo(object_type='dcim.inventoryitemrole', field_name='object', is_generic=True), + 'l2vpn': RefInfo(object_type='vpn.l2vpn', field_name='object', is_generic=True), + 'l2vpn_termination': RefInfo(object_type='vpn.l2vpntermination', field_name='object', is_generic=True), 'location': RefInfo(object_type='dcim.location', field_name='object', is_generic=True), - 'macAddress': RefInfo(object_type='dcim.macaddress', field_name='object', is_generic=True), + 'mac_address': RefInfo(object_type='dcim.macaddress', field_name='object', is_generic=True), 'manufacturer': RefInfo(object_type='dcim.manufacturer', field_name='object', is_generic=True), 'module': RefInfo(object_type='dcim.module', field_name='object', is_generic=True), - 'moduleBay': RefInfo(object_type='dcim.modulebay', field_name='object', is_generic=True), - 'moduleType': RefInfo(object_type='dcim.moduletype', field_name='object', is_generic=True), + 'module_bay': RefInfo(object_type='dcim.modulebay', field_name='object', is_generic=True), + 'module_type': RefInfo(object_type='dcim.moduletype', field_name='object', is_generic=True), 'platform': RefInfo(object_type='dcim.platform', field_name='object', is_generic=True), - 'powerFeed': RefInfo(object_type='dcim.powerfeed', field_name='object', is_generic=True), - 'powerOutlet': RefInfo(object_type='dcim.poweroutlet', field_name='object', is_generic=True), - 'powerPanel': RefInfo(object_type='dcim.powerpanel', field_name='object', is_generic=True), - 'powerPort': RefInfo(object_type='dcim.powerport', field_name='object', is_generic=True), + 'power_feed': RefInfo(object_type='dcim.powerfeed', field_name='object', is_generic=True), + 'power_outlet': RefInfo(object_type='dcim.poweroutlet', field_name='object', is_generic=True), + 'power_panel': RefInfo(object_type='dcim.powerpanel', field_name='object', is_generic=True), + 'power_port': RefInfo(object_type='dcim.powerport', field_name='object', is_generic=True), 'prefix': RefInfo(object_type='ipam.prefix', field_name='object', is_generic=True), 'provider': RefInfo(object_type='circuits.provider', field_name='object', is_generic=True), - 'providerAccount': RefInfo(object_type='circuits.provideraccount', field_name='object', is_generic=True), - 'providerNetwork': RefInfo(object_type='circuits.providernetwork', field_name='object', is_generic=True), + 'provider_account': RefInfo(object_type='circuits.provideraccount', field_name='object', is_generic=True), + 'provider_network': RefInfo(object_type='circuits.providernetwork', field_name='object', is_generic=True), 'rir': RefInfo(object_type='ipam.rir', field_name='object', is_generic=True), 'rack': RefInfo(object_type='dcim.rack', field_name='object', is_generic=True), - 'rackReservation': RefInfo(object_type='dcim.rackreservation', field_name='object', is_generic=True), - 'rackRole': RefInfo(object_type='dcim.rackrole', field_name='object', is_generic=True), - 'rackType': RefInfo(object_type='dcim.racktype', field_name='object', is_generic=True), - 'rearPort': RefInfo(object_type='dcim.rearport', field_name='object', is_generic=True), + 'rack_reservation': RefInfo(object_type='dcim.rackreservation', field_name='object', is_generic=True), + 'rack_role': RefInfo(object_type='dcim.rackrole', field_name='object', is_generic=True), + 'rack_type': RefInfo(object_type='dcim.racktype', field_name='object', is_generic=True), + 'rear_port': RefInfo(object_type='dcim.rearport', field_name='object', is_generic=True), 'region': RefInfo(object_type='dcim.region', field_name='object', is_generic=True), 'role': RefInfo(object_type='ipam.role', field_name='object', is_generic=True), - 'routeTarget': RefInfo(object_type='ipam.routetarget', field_name='object', is_generic=True), + 'route_target': RefInfo(object_type='ipam.routetarget', field_name='object', is_generic=True), 'service': RefInfo(object_type='ipam.service', field_name='object', is_generic=True), 'site': RefInfo(object_type='dcim.site', field_name='object', is_generic=True), - 'siteGroup': RefInfo(object_type='dcim.sitegroup', field_name='object', is_generic=True), + 'site_group': RefInfo(object_type='dcim.sitegroup', field_name='object', is_generic=True), 'tag': RefInfo(object_type='extras.tag', field_name='object', is_generic=True), 'tenant': RefInfo(object_type='tenancy.tenant', field_name='object', is_generic=True), - 'tenantGroup': RefInfo(object_type='tenancy.tenantgroup', field_name='object', is_generic=True), + 'tenant_group': RefInfo(object_type='tenancy.tenantgroup', field_name='object', is_generic=True), 'tunnel': RefInfo(object_type='vpn.tunnel', field_name='object', is_generic=True), - 'tunnelGroup': RefInfo(object_type='vpn.tunnelgroup', field_name='object', is_generic=True), - 'tunnelTermination': RefInfo(object_type='vpn.tunneltermination', field_name='object', is_generic=True), + 'tunnel_group': RefInfo(object_type='vpn.tunnelgroup', field_name='object', is_generic=True), + 'tunnel_termination': RefInfo(object_type='vpn.tunneltermination', field_name='object', is_generic=True), 'vlan': RefInfo(object_type='ipam.vlan', field_name='object', is_generic=True), - 'vlanGroup': RefInfo(object_type='ipam.vlangroup', field_name='object', is_generic=True), - 'vlanTranslationPolicy': RefInfo(object_type='ipam.vlantranslationpolicy', field_name='object', is_generic=True), - 'vlanTranslationRule': RefInfo(object_type='ipam.vlantranslationrule', field_name='object', is_generic=True), - 'vmInterface': RefInfo(object_type='virtualization.vminterface', field_name='object', is_generic=True), + 'vlan_group': RefInfo(object_type='ipam.vlangroup', field_name='object', is_generic=True), + 'vlan_translation_policy': RefInfo(object_type='ipam.vlantranslationpolicy', field_name='object', is_generic=True), + 'vlan_translation_rule': RefInfo(object_type='ipam.vlantranslationrule', field_name='object', is_generic=True), + 'vm_interface': RefInfo(object_type='virtualization.vminterface', field_name='object', is_generic=True), 'vrf': RefInfo(object_type='ipam.vrf', field_name='object', is_generic=True), - 'virtualChassis': RefInfo(object_type='dcim.virtualchassis', field_name='object', is_generic=True), - 'virtualCircuit': RefInfo(object_type='circuits.virtualcircuit', field_name='object', is_generic=True), - 'virtualCircuitTermination': RefInfo(object_type='circuits.virtualcircuittermination', field_name='object', is_generic=True), - 'virtualCircuitType': RefInfo(object_type='circuits.virtualcircuittype', field_name='object', is_generic=True), - 'virtualDeviceContext': RefInfo(object_type='dcim.virtualdevicecontext', field_name='object', is_generic=True), - 'virtualDisk': RefInfo(object_type='virtualization.virtualdisk', field_name='object', is_generic=True), - 'virtualMachine': RefInfo(object_type='virtualization.virtualmachine', field_name='object', is_generic=True), - 'wirelessLan': RefInfo(object_type='wireless.wirelesslan', field_name='object', is_generic=True), - 'wirelessLanGroup': RefInfo(object_type='wireless.wirelesslangroup', field_name='object', is_generic=True), - 'wirelessLink': RefInfo(object_type='wireless.wirelesslink', field_name='object', is_generic=True), + 'virtual_chassis': RefInfo(object_type='dcim.virtualchassis', field_name='object', is_generic=True), + 'virtual_circuit': RefInfo(object_type='circuits.virtualcircuit', field_name='object', is_generic=True), + 'virtual_circuit_termination': RefInfo(object_type='circuits.virtualcircuittermination', field_name='object', is_generic=True), + 'virtual_circuit_type': RefInfo(object_type='circuits.virtualcircuittype', field_name='object', is_generic=True), + 'virtual_device_context': RefInfo(object_type='dcim.virtualdevicecontext', field_name='object', is_generic=True), + 'virtual_disk': RefInfo(object_type='virtualization.virtualdisk', field_name='object', is_generic=True), + 'virtual_machine': RefInfo(object_type='virtualization.virtualmachine', field_name='object', is_generic=True), + 'wireless_lan': RefInfo(object_type='wireless.wirelesslan', field_name='object', is_generic=True), + 'wireless_lan_group': RefInfo(object_type='wireless.wirelesslangroup', field_name='object', is_generic=True), + 'wireless_link': RefInfo(object_type='wireless.wirelesslink', field_name='object', is_generic=True), }, 'circuits.circuit': { 'assignments': RefInfo(object_type='circuits.circuitgroupassignment', field_name='assignments', is_many=True), 'provider': RefInfo(object_type='circuits.provider', field_name='provider'), - 'providerAccount': RefInfo(object_type='circuits.provideraccount', field_name='provider_account'), + 'provider_account': RefInfo(object_type='circuits.provideraccount', field_name='provider_account'), 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), 'type': RefInfo(object_type='circuits.circuittype', field_name='type'), @@ -146,18 +146,18 @@ class RefInfo: }, 'circuits.circuitgroupassignment': { 'group': RefInfo(object_type='circuits.circuitgroup', field_name='group'), - 'memberCircuit': RefInfo(object_type='circuits.circuit', field_name='member', is_generic=True), - 'memberVirtualCircuit': RefInfo(object_type='circuits.virtualcircuit', field_name='member', is_generic=True), + 'member_circuit': RefInfo(object_type='circuits.circuit', field_name='member', is_generic=True), + 'member_virtual_circuit': RefInfo(object_type='circuits.virtualcircuit', field_name='member', is_generic=True), 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), }, 'circuits.circuittermination': { 'circuit': RefInfo(object_type='circuits.circuit', field_name='circuit'), 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), - 'terminationLocation': RefInfo(object_type='dcim.location', field_name='termination', is_generic=True), - 'terminationProviderNetwork': RefInfo(object_type='circuits.providernetwork', field_name='termination', is_generic=True), - 'terminationRegion': RefInfo(object_type='dcim.region', field_name='termination', is_generic=True), - 'terminationSite': RefInfo(object_type='dcim.site', field_name='termination', is_generic=True), - 'terminationSiteGroup': RefInfo(object_type='dcim.sitegroup', field_name='termination', is_generic=True), + 'termination_location': RefInfo(object_type='dcim.location', field_name='termination', is_generic=True), + 'termination_provider_network': RefInfo(object_type='circuits.providernetwork', field_name='termination', is_generic=True), + 'termination_region': RefInfo(object_type='dcim.region', field_name='termination', is_generic=True), + 'termination_site': RefInfo(object_type='dcim.site', field_name='termination', is_generic=True), + 'termination_site_group': RefInfo(object_type='dcim.sitegroup', field_name='termination', is_generic=True), }, 'circuits.circuittype': { 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), @@ -176,8 +176,8 @@ class RefInfo: 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), }, 'circuits.virtualcircuit': { - 'providerAccount': RefInfo(object_type='circuits.provideraccount', field_name='provider_account'), - 'providerNetwork': RefInfo(object_type='circuits.providernetwork', field_name='provider_network'), + 'provider_account': RefInfo(object_type='circuits.provideraccount', field_name='provider_account'), + 'provider_network': RefInfo(object_type='circuits.providernetwork', field_name='provider_network'), 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), 'type': RefInfo(object_type='circuits.virtualcircuittype', field_name='type'), @@ -185,7 +185,7 @@ class RefInfo: 'circuits.virtualcircuittermination': { 'interface': RefInfo(object_type='dcim.interface', field_name='interface'), 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), - 'virtualCircuit': RefInfo(object_type='circuits.virtualcircuit', field_name='virtual_circuit'), + 'virtual_circuit': RefInfo(object_type='circuits.virtualcircuit', field_name='virtual_circuit'), }, 'circuits.virtualcircuittype': { 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), @@ -196,15 +196,15 @@ class RefInfo: }, 'dcim.cabletermination': { 'cable': RefInfo(object_type='dcim.cable', field_name='cable'), - 'terminationCircuitTermination': RefInfo(object_type='circuits.circuittermination', field_name='termination', is_generic=True), - 'terminationConsolePort': RefInfo(object_type='dcim.consoleport', field_name='termination', is_generic=True), - 'terminationConsoleServerPort': RefInfo(object_type='dcim.consoleserverport', field_name='termination', is_generic=True), - 'terminationFrontPort': RefInfo(object_type='dcim.frontport', field_name='termination', is_generic=True), - 'terminationInterface': RefInfo(object_type='dcim.interface', field_name='termination', is_generic=True), - 'terminationPowerFeed': RefInfo(object_type='dcim.powerfeed', field_name='termination', is_generic=True), - 'terminationPowerOutlet': RefInfo(object_type='dcim.poweroutlet', field_name='termination', is_generic=True), - 'terminationPowerPort': RefInfo(object_type='dcim.powerport', field_name='termination', is_generic=True), - 'terminationRearPort': RefInfo(object_type='dcim.rearport', field_name='termination', is_generic=True), + 'termination_circuit_termination': RefInfo(object_type='circuits.circuittermination', field_name='termination', is_generic=True), + 'termination_console_port': RefInfo(object_type='dcim.consoleport', field_name='termination', is_generic=True), + 'termination_console_server_port': RefInfo(object_type='dcim.consoleserverport', field_name='termination', is_generic=True), + 'termination_front_port': RefInfo(object_type='dcim.frontport', field_name='termination', is_generic=True), + 'termination_interface': RefInfo(object_type='dcim.interface', field_name='termination', is_generic=True), + 'termination_power_feed': RefInfo(object_type='dcim.powerfeed', field_name='termination', is_generic=True), + 'termination_power_outlet': RefInfo(object_type='dcim.poweroutlet', field_name='termination', is_generic=True), + 'termination_power_port': RefInfo(object_type='dcim.powerport', field_name='termination', is_generic=True), + 'termination_rear_port': RefInfo(object_type='dcim.rearport', field_name='termination', is_generic=True), }, 'dcim.consoleport': { 'device': RefInfo(object_type='dcim.device', field_name='device'), @@ -218,36 +218,36 @@ class RefInfo: }, 'dcim.device': { 'cluster': RefInfo(object_type='virtualization.cluster', field_name='cluster'), - 'deviceType': RefInfo(object_type='dcim.devicetype', field_name='device_type'), + 'device_type': RefInfo(object_type='dcim.devicetype', field_name='device_type'), 'location': RefInfo(object_type='dcim.location', field_name='location'), - 'oobIp': RefInfo(object_type='ipam.ipaddress', field_name='oob_ip'), + 'oob_ip': RefInfo(object_type='ipam.ipaddress', field_name='oob_ip'), 'platform': RefInfo(object_type='dcim.platform', field_name='platform'), - 'primaryIp4': RefInfo(object_type='ipam.ipaddress', field_name='primary_ip4'), - 'primaryIp6': RefInfo(object_type='ipam.ipaddress', field_name='primary_ip6'), + 'primary_ip4': RefInfo(object_type='ipam.ipaddress', field_name='primary_ip4'), + 'primary_ip6': RefInfo(object_type='ipam.ipaddress', field_name='primary_ip6'), 'rack': RefInfo(object_type='dcim.rack', field_name='rack'), 'role': RefInfo(object_type='dcim.devicerole', field_name='role'), 'site': RefInfo(object_type='dcim.site', field_name='site'), 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), - 'virtualChassis': RefInfo(object_type='dcim.virtualchassis', field_name='virtual_chassis'), + 'virtual_chassis': RefInfo(object_type='dcim.virtualchassis', field_name='virtual_chassis'), }, 'dcim.devicebay': { 'device': RefInfo(object_type='dcim.device', field_name='device'), - 'installedDevice': RefInfo(object_type='dcim.device', field_name='installed_device'), + 'installed_device': RefInfo(object_type='dcim.device', field_name='installed_device'), 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), }, 'dcim.devicerole': { 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), }, 'dcim.devicetype': { - 'defaultPlatform': RefInfo(object_type='dcim.platform', field_name='default_platform'), + 'default_platform': RefInfo(object_type='dcim.platform', field_name='default_platform'), 'manufacturer': RefInfo(object_type='dcim.manufacturer', field_name='manufacturer'), 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), }, 'dcim.frontport': { 'device': RefInfo(object_type='dcim.device', field_name='device'), 'module': RefInfo(object_type='dcim.module', field_name='module'), - 'rearPort': RefInfo(object_type='dcim.rearport', field_name='rear_port'), + 'rear_port': RefInfo(object_type='dcim.rearport', field_name='rear_port'), 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), }, 'dcim.interface': { @@ -256,24 +256,24 @@ class RefInfo: 'lag': RefInfo(object_type='dcim.interface', field_name='lag'), 'module': RefInfo(object_type='dcim.module', field_name='module'), 'parent': RefInfo(object_type='dcim.interface', field_name='parent'), - 'primaryMacAddress': RefInfo(object_type='dcim.macaddress', field_name='primary_mac_address'), - 'qinqSvlan': RefInfo(object_type='ipam.vlan', field_name='qinq_svlan'), - 'taggedVlans': RefInfo(object_type='ipam.vlan', field_name='tagged_vlans', is_many=True), + 'primary_mac_address': RefInfo(object_type='dcim.macaddress', field_name='primary_mac_address'), + 'qinq_svlan': RefInfo(object_type='ipam.vlan', field_name='qinq_svlan'), + 'tagged_vlans': RefInfo(object_type='ipam.vlan', field_name='tagged_vlans', is_many=True), 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), - 'untaggedVlan': RefInfo(object_type='ipam.vlan', field_name='untagged_vlan'), + 'untagged_vlan': RefInfo(object_type='ipam.vlan', field_name='untagged_vlan'), 'vdcs': RefInfo(object_type='dcim.virtualdevicecontext', field_name='vdcs', is_many=True), - 'vlanTranslationPolicy': RefInfo(object_type='ipam.vlantranslationpolicy', field_name='vlan_translation_policy'), + 'vlan_translation_policy': RefInfo(object_type='ipam.vlantranslationpolicy', field_name='vlan_translation_policy'), 'vrf': RefInfo(object_type='ipam.vrf', field_name='vrf'), - 'wirelessLans': RefInfo(object_type='wireless.wirelesslan', field_name='wireless_lans', is_many=True), + 'wireless_lans': RefInfo(object_type='wireless.wirelesslan', field_name='wireless_lans', is_many=True), }, 'dcim.inventoryitem': { - 'componentConsolePort': RefInfo(object_type='dcim.consoleport', field_name='component', is_generic=True), - 'componentConsoleServerPort': RefInfo(object_type='dcim.consoleserverport', field_name='component', is_generic=True), - 'componentFrontPort': RefInfo(object_type='dcim.frontport', field_name='component', is_generic=True), - 'componentInterface': RefInfo(object_type='dcim.interface', field_name='component', is_generic=True), - 'componentPowerOutlet': RefInfo(object_type='dcim.poweroutlet', field_name='component', is_generic=True), - 'componentPowerPort': RefInfo(object_type='dcim.powerport', field_name='component', is_generic=True), - 'componentRearPort': RefInfo(object_type='dcim.rearport', field_name='component', is_generic=True), + 'component_console_port': RefInfo(object_type='dcim.consoleport', field_name='component', is_generic=True), + 'component_console_server_port': RefInfo(object_type='dcim.consoleserverport', field_name='component', is_generic=True), + 'component_front_port': RefInfo(object_type='dcim.frontport', field_name='component', is_generic=True), + 'component_interface': RefInfo(object_type='dcim.interface', field_name='component', is_generic=True), + 'component_power_outlet': RefInfo(object_type='dcim.poweroutlet', field_name='component', is_generic=True), + 'component_power_port': RefInfo(object_type='dcim.powerport', field_name='component', is_generic=True), + 'component_rear_port': RefInfo(object_type='dcim.rearport', field_name='component', is_generic=True), 'device': RefInfo(object_type='dcim.device', field_name='device'), 'manufacturer': RefInfo(object_type='dcim.manufacturer', field_name='manufacturer'), 'parent': RefInfo(object_type='dcim.inventoryitem', field_name='parent'), @@ -290,8 +290,8 @@ class RefInfo: 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), }, 'dcim.macaddress': { - 'assignedObjectInterface': RefInfo(object_type='dcim.interface', field_name='assigned_object', is_generic=True), - 'assignedObjectVmInterface': RefInfo(object_type='virtualization.vminterface', field_name='assigned_object', is_generic=True), + 'assigned_object_interface': RefInfo(object_type='dcim.interface', field_name='assigned_object', is_generic=True), + 'assigned_object_vm_interface': RefInfo(object_type='virtualization.vminterface', field_name='assigned_object', is_generic=True), 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), }, 'dcim.manufacturer': { @@ -299,13 +299,13 @@ class RefInfo: }, 'dcim.module': { 'device': RefInfo(object_type='dcim.device', field_name='device'), - 'moduleBay': RefInfo(object_type='dcim.modulebay', field_name='module_bay'), - 'moduleType': RefInfo(object_type='dcim.moduletype', field_name='module_type'), + 'module_bay': RefInfo(object_type='dcim.modulebay', field_name='module_bay'), + 'module_type': RefInfo(object_type='dcim.moduletype', field_name='module_type'), 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), }, 'dcim.modulebay': { 'device': RefInfo(object_type='dcim.device', field_name='device'), - 'installedModule': RefInfo(object_type='dcim.module', field_name='installed_module'), + 'installed_module': RefInfo(object_type='dcim.module', field_name='installed_module'), 'module': RefInfo(object_type='dcim.module', field_name='module'), 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), }, @@ -318,7 +318,7 @@ class RefInfo: 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), }, 'dcim.powerfeed': { - 'powerPanel': RefInfo(object_type='dcim.powerpanel', field_name='power_panel'), + 'power_panel': RefInfo(object_type='dcim.powerpanel', field_name='power_panel'), 'rack': RefInfo(object_type='dcim.rack', field_name='rack'), 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), @@ -326,7 +326,7 @@ class RefInfo: 'dcim.poweroutlet': { 'device': RefInfo(object_type='dcim.device', field_name='device'), 'module': RefInfo(object_type='dcim.module', field_name='module'), - 'powerPort': RefInfo(object_type='dcim.powerport', field_name='power_port'), + 'power_port': RefInfo(object_type='dcim.powerport', field_name='power_port'), 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), }, 'dcim.powerpanel': { @@ -341,7 +341,7 @@ class RefInfo: }, 'dcim.rack': { 'location': RefInfo(object_type='dcim.location', field_name='location'), - 'rackType': RefInfo(object_type='dcim.racktype', field_name='rack_type'), + 'rack_type': RefInfo(object_type='dcim.racktype', field_name='rack_type'), 'role': RefInfo(object_type='dcim.rackrole', field_name='role'), 'site': RefInfo(object_type='dcim.site', field_name='site'), 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), @@ -385,8 +385,8 @@ class RefInfo: }, 'dcim.virtualdevicecontext': { 'device': RefInfo(object_type='dcim.device', field_name='device'), - 'primaryIp4': RefInfo(object_type='ipam.ipaddress', field_name='primary_ip4'), - 'primaryIp6': RefInfo(object_type='ipam.ipaddress', field_name='primary_ip6'), + 'primary_ip4': RefInfo(object_type='ipam.ipaddress', field_name='primary_ip4'), + 'primary_ip6': RefInfo(object_type='ipam.ipaddress', field_name='primary_ip6'), 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), }, @@ -410,100 +410,100 @@ class RefInfo: }, 'ipam.fhrpgroupassignment': { 'group': RefInfo(object_type='ipam.fhrpgroup', field_name='group'), - 'interfaceAsn': RefInfo(object_type='ipam.asn', field_name='interface', is_generic=True), - 'interfaceAsnRange': RefInfo(object_type='ipam.asnrange', field_name='interface', is_generic=True), - 'interfaceAggregate': RefInfo(object_type='ipam.aggregate', field_name='interface', is_generic=True), - 'interfaceCable': RefInfo(object_type='dcim.cable', field_name='interface', is_generic=True), - 'interfaceCablePath': RefInfo(object_type='dcim.cablepath', field_name='interface', is_generic=True), - 'interfaceCableTermination': RefInfo(object_type='dcim.cabletermination', field_name='interface', is_generic=True), - 'interfaceCircuit': RefInfo(object_type='circuits.circuit', field_name='interface', is_generic=True), - 'interfaceCircuitGroup': RefInfo(object_type='circuits.circuitgroup', field_name='interface', is_generic=True), - 'interfaceCircuitGroupAssignment': RefInfo(object_type='circuits.circuitgroupassignment', field_name='interface', is_generic=True), - 'interfaceCircuitTermination': RefInfo(object_type='circuits.circuittermination', field_name='interface', is_generic=True), - 'interfaceCircuitType': RefInfo(object_type='circuits.circuittype', field_name='interface', is_generic=True), - 'interfaceCluster': RefInfo(object_type='virtualization.cluster', field_name='interface', is_generic=True), - 'interfaceClusterGroup': RefInfo(object_type='virtualization.clustergroup', field_name='interface', is_generic=True), - 'interfaceClusterType': RefInfo(object_type='virtualization.clustertype', field_name='interface', is_generic=True), - 'interfaceConsolePort': RefInfo(object_type='dcim.consoleport', field_name='interface', is_generic=True), - 'interfaceConsoleServerPort': RefInfo(object_type='dcim.consoleserverport', field_name='interface', is_generic=True), - 'interfaceContact': RefInfo(object_type='tenancy.contact', field_name='interface', is_generic=True), - 'interfaceContactAssignment': RefInfo(object_type='tenancy.contactassignment', field_name='interface', is_generic=True), - 'interfaceContactGroup': RefInfo(object_type='tenancy.contactgroup', field_name='interface', is_generic=True), - 'interfaceContactRole': RefInfo(object_type='tenancy.contactrole', field_name='interface', is_generic=True), - 'interfaceDevice': RefInfo(object_type='dcim.device', field_name='interface', is_generic=True), - 'interfaceDeviceBay': RefInfo(object_type='dcim.devicebay', field_name='interface', is_generic=True), - 'interfaceDeviceRole': RefInfo(object_type='dcim.devicerole', field_name='interface', is_generic=True), - 'interfaceDeviceType': RefInfo(object_type='dcim.devicetype', field_name='interface', is_generic=True), - 'interfaceFhrpGroup': RefInfo(object_type='ipam.fhrpgroup', field_name='interface', is_generic=True), - 'interfaceFhrpGroupAssignment': RefInfo(object_type='ipam.fhrpgroupassignment', field_name='interface', is_generic=True), - 'interfaceFrontPort': RefInfo(object_type='dcim.frontport', field_name='interface', is_generic=True), - 'interfaceIkePolicy': RefInfo(object_type='vpn.ikepolicy', field_name='interface', is_generic=True), - 'interfaceIkeProposal': RefInfo(object_type='vpn.ikeproposal', field_name='interface', is_generic=True), - 'interfaceIpAddress': RefInfo(object_type='ipam.ipaddress', field_name='interface', is_generic=True), - 'interfaceIpRange': RefInfo(object_type='ipam.iprange', field_name='interface', is_generic=True), - 'interfaceIpSecPolicy': RefInfo(object_type='vpn.ipsecpolicy', field_name='interface', is_generic=True), - 'interfaceIpSecProfile': RefInfo(object_type='vpn.ipsecprofile', field_name='interface', is_generic=True), - 'interfaceIpSecProposal': RefInfo(object_type='vpn.ipsecproposal', field_name='interface', is_generic=True), - 'interfaceInterface': RefInfo(object_type='dcim.interface', field_name='interface', is_generic=True), - 'interfaceInventoryItem': RefInfo(object_type='dcim.inventoryitem', field_name='interface', is_generic=True), - 'interfaceInventoryItemRole': RefInfo(object_type='dcim.inventoryitemrole', field_name='interface', is_generic=True), - 'interfaceL2Vpn': RefInfo(object_type='vpn.l2vpn', field_name='interface', is_generic=True), - 'interfaceL2VpnTermination': RefInfo(object_type='vpn.l2vpntermination', field_name='interface', is_generic=True), - 'interfaceLocation': RefInfo(object_type='dcim.location', field_name='interface', is_generic=True), - 'interfaceMacAddress': RefInfo(object_type='dcim.macaddress', field_name='interface', is_generic=True), - 'interfaceManufacturer': RefInfo(object_type='dcim.manufacturer', field_name='interface', is_generic=True), - 'interfaceModule': RefInfo(object_type='dcim.module', field_name='interface', is_generic=True), - 'interfaceModuleBay': RefInfo(object_type='dcim.modulebay', field_name='interface', is_generic=True), - 'interfaceModuleType': RefInfo(object_type='dcim.moduletype', field_name='interface', is_generic=True), - 'interfacePlatform': RefInfo(object_type='dcim.platform', field_name='interface', is_generic=True), - 'interfacePowerFeed': RefInfo(object_type='dcim.powerfeed', field_name='interface', is_generic=True), - 'interfacePowerOutlet': RefInfo(object_type='dcim.poweroutlet', field_name='interface', is_generic=True), - 'interfacePowerPanel': RefInfo(object_type='dcim.powerpanel', field_name='interface', is_generic=True), - 'interfacePowerPort': RefInfo(object_type='dcim.powerport', field_name='interface', is_generic=True), - 'interfacePrefix': RefInfo(object_type='ipam.prefix', field_name='interface', is_generic=True), - 'interfaceProvider': RefInfo(object_type='circuits.provider', field_name='interface', is_generic=True), - 'interfaceProviderAccount': RefInfo(object_type='circuits.provideraccount', field_name='interface', is_generic=True), - 'interfaceProviderNetwork': RefInfo(object_type='circuits.providernetwork', field_name='interface', is_generic=True), - 'interfaceRir': RefInfo(object_type='ipam.rir', field_name='interface', is_generic=True), - 'interfaceRack': RefInfo(object_type='dcim.rack', field_name='interface', is_generic=True), - 'interfaceRackReservation': RefInfo(object_type='dcim.rackreservation', field_name='interface', is_generic=True), - 'interfaceRackRole': RefInfo(object_type='dcim.rackrole', field_name='interface', is_generic=True), - 'interfaceRackType': RefInfo(object_type='dcim.racktype', field_name='interface', is_generic=True), - 'interfaceRearPort': RefInfo(object_type='dcim.rearport', field_name='interface', is_generic=True), - 'interfaceRegion': RefInfo(object_type='dcim.region', field_name='interface', is_generic=True), - 'interfaceRole': RefInfo(object_type='ipam.role', field_name='interface', is_generic=True), - 'interfaceRouteTarget': RefInfo(object_type='ipam.routetarget', field_name='interface', is_generic=True), - 'interfaceService': RefInfo(object_type='ipam.service', field_name='interface', is_generic=True), - 'interfaceSite': RefInfo(object_type='dcim.site', field_name='interface', is_generic=True), - 'interfaceSiteGroup': RefInfo(object_type='dcim.sitegroup', field_name='interface', is_generic=True), - 'interfaceTag': RefInfo(object_type='extras.tag', field_name='interface', is_generic=True), - 'interfaceTenant': RefInfo(object_type='tenancy.tenant', field_name='interface', is_generic=True), - 'interfaceTenantGroup': RefInfo(object_type='tenancy.tenantgroup', field_name='interface', is_generic=True), - 'interfaceTunnel': RefInfo(object_type='vpn.tunnel', field_name='interface', is_generic=True), - 'interfaceTunnelGroup': RefInfo(object_type='vpn.tunnelgroup', field_name='interface', is_generic=True), - 'interfaceTunnelTermination': RefInfo(object_type='vpn.tunneltermination', field_name='interface', is_generic=True), - 'interfaceVlan': RefInfo(object_type='ipam.vlan', field_name='interface', is_generic=True), - 'interfaceVlanGroup': RefInfo(object_type='ipam.vlangroup', field_name='interface', is_generic=True), - 'interfaceVlanTranslationPolicy': RefInfo(object_type='ipam.vlantranslationpolicy', field_name='interface', is_generic=True), - 'interfaceVlanTranslationRule': RefInfo(object_type='ipam.vlantranslationrule', field_name='interface', is_generic=True), - 'interfaceVmInterface': RefInfo(object_type='virtualization.vminterface', field_name='interface', is_generic=True), - 'interfaceVrf': RefInfo(object_type='ipam.vrf', field_name='interface', is_generic=True), - 'interfaceVirtualChassis': RefInfo(object_type='dcim.virtualchassis', field_name='interface', is_generic=True), - 'interfaceVirtualCircuit': RefInfo(object_type='circuits.virtualcircuit', field_name='interface', is_generic=True), - 'interfaceVirtualCircuitTermination': RefInfo(object_type='circuits.virtualcircuittermination', field_name='interface', is_generic=True), - 'interfaceVirtualCircuitType': RefInfo(object_type='circuits.virtualcircuittype', field_name='interface', is_generic=True), - 'interfaceVirtualDeviceContext': RefInfo(object_type='dcim.virtualdevicecontext', field_name='interface', is_generic=True), - 'interfaceVirtualDisk': RefInfo(object_type='virtualization.virtualdisk', field_name='interface', is_generic=True), - 'interfaceVirtualMachine': RefInfo(object_type='virtualization.virtualmachine', field_name='interface', is_generic=True), - 'interfaceWirelessLan': RefInfo(object_type='wireless.wirelesslan', field_name='interface', is_generic=True), - 'interfaceWirelessLanGroup': RefInfo(object_type='wireless.wirelesslangroup', field_name='interface', is_generic=True), - 'interfaceWirelessLink': RefInfo(object_type='wireless.wirelesslink', field_name='interface', is_generic=True), + 'interface_asn': RefInfo(object_type='ipam.asn', field_name='interface', is_generic=True), + 'interface_asn_range': RefInfo(object_type='ipam.asnrange', field_name='interface', is_generic=True), + 'interface_aggregate': RefInfo(object_type='ipam.aggregate', field_name='interface', is_generic=True), + 'interface_cable': RefInfo(object_type='dcim.cable', field_name='interface', is_generic=True), + 'interface_cable_path': RefInfo(object_type='dcim.cablepath', field_name='interface', is_generic=True), + 'interface_cable_termination': RefInfo(object_type='dcim.cabletermination', field_name='interface', is_generic=True), + 'interface_circuit': RefInfo(object_type='circuits.circuit', field_name='interface', is_generic=True), + 'interface_circuit_group': RefInfo(object_type='circuits.circuitgroup', field_name='interface', is_generic=True), + 'interface_circuit_group_assignment': RefInfo(object_type='circuits.circuitgroupassignment', field_name='interface', is_generic=True), + 'interface_circuit_termination': RefInfo(object_type='circuits.circuittermination', field_name='interface', is_generic=True), + 'interface_circuit_type': RefInfo(object_type='circuits.circuittype', field_name='interface', is_generic=True), + 'interface_cluster': RefInfo(object_type='virtualization.cluster', field_name='interface', is_generic=True), + 'interface_cluster_group': RefInfo(object_type='virtualization.clustergroup', field_name='interface', is_generic=True), + 'interface_cluster_type': RefInfo(object_type='virtualization.clustertype', field_name='interface', is_generic=True), + 'interface_console_port': RefInfo(object_type='dcim.consoleport', field_name='interface', is_generic=True), + 'interface_console_server_port': RefInfo(object_type='dcim.consoleserverport', field_name='interface', is_generic=True), + 'interface_contact': RefInfo(object_type='tenancy.contact', field_name='interface', is_generic=True), + 'interface_contact_assignment': RefInfo(object_type='tenancy.contactassignment', field_name='interface', is_generic=True), + 'interface_contact_group': RefInfo(object_type='tenancy.contactgroup', field_name='interface', is_generic=True), + 'interface_contact_role': RefInfo(object_type='tenancy.contactrole', field_name='interface', is_generic=True), + 'interface_device': RefInfo(object_type='dcim.device', field_name='interface', is_generic=True), + 'interface_device_bay': RefInfo(object_type='dcim.devicebay', field_name='interface', is_generic=True), + 'interface_device_role': RefInfo(object_type='dcim.devicerole', field_name='interface', is_generic=True), + 'interface_device_type': RefInfo(object_type='dcim.devicetype', field_name='interface', is_generic=True), + 'interface_fhrp_group': RefInfo(object_type='ipam.fhrpgroup', field_name='interface', is_generic=True), + 'interface_fhrp_group_assignment': RefInfo(object_type='ipam.fhrpgroupassignment', field_name='interface', is_generic=True), + 'interface_front_port': RefInfo(object_type='dcim.frontport', field_name='interface', is_generic=True), + 'interface_ike_policy': RefInfo(object_type='vpn.ikepolicy', field_name='interface', is_generic=True), + 'interface_ike_proposal': RefInfo(object_type='vpn.ikeproposal', field_name='interface', is_generic=True), + 'interface_ip_address': RefInfo(object_type='ipam.ipaddress', field_name='interface', is_generic=True), + 'interface_ip_range': RefInfo(object_type='ipam.iprange', field_name='interface', is_generic=True), + 'interface_ip_sec_policy': RefInfo(object_type='vpn.ipsecpolicy', field_name='interface', is_generic=True), + 'interface_ip_sec_profile': RefInfo(object_type='vpn.ipsecprofile', field_name='interface', is_generic=True), + 'interface_ip_sec_proposal': RefInfo(object_type='vpn.ipsecproposal', field_name='interface', is_generic=True), + 'interface_interface': RefInfo(object_type='dcim.interface', field_name='interface', is_generic=True), + 'interface_inventory_item': RefInfo(object_type='dcim.inventoryitem', field_name='interface', is_generic=True), + 'interface_inventory_item_role': RefInfo(object_type='dcim.inventoryitemrole', field_name='interface', is_generic=True), + 'interface_l2vpn': RefInfo(object_type='vpn.l2vpn', field_name='interface', is_generic=True), + 'interface_l2vpn_termination': RefInfo(object_type='vpn.l2vpntermination', field_name='interface', is_generic=True), + 'interface_location': RefInfo(object_type='dcim.location', field_name='interface', is_generic=True), + 'interface_mac_address': RefInfo(object_type='dcim.macaddress', field_name='interface', is_generic=True), + 'interface_manufacturer': RefInfo(object_type='dcim.manufacturer', field_name='interface', is_generic=True), + 'interface_module': RefInfo(object_type='dcim.module', field_name='interface', is_generic=True), + 'interface_module_bay': RefInfo(object_type='dcim.modulebay', field_name='interface', is_generic=True), + 'interface_module_type': RefInfo(object_type='dcim.moduletype', field_name='interface', is_generic=True), + 'interface_platform': RefInfo(object_type='dcim.platform', field_name='interface', is_generic=True), + 'interface_power_feed': RefInfo(object_type='dcim.powerfeed', field_name='interface', is_generic=True), + 'interface_power_outlet': RefInfo(object_type='dcim.poweroutlet', field_name='interface', is_generic=True), + 'interface_power_panel': RefInfo(object_type='dcim.powerpanel', field_name='interface', is_generic=True), + 'interface_power_port': RefInfo(object_type='dcim.powerport', field_name='interface', is_generic=True), + 'interface_prefix': RefInfo(object_type='ipam.prefix', field_name='interface', is_generic=True), + 'interface_provider': RefInfo(object_type='circuits.provider', field_name='interface', is_generic=True), + 'interface_provider_account': RefInfo(object_type='circuits.provideraccount', field_name='interface', is_generic=True), + 'interface_provider_network': RefInfo(object_type='circuits.providernetwork', field_name='interface', is_generic=True), + 'interface_rir': RefInfo(object_type='ipam.rir', field_name='interface', is_generic=True), + 'interface_rack': RefInfo(object_type='dcim.rack', field_name='interface', is_generic=True), + 'interface_rack_reservation': RefInfo(object_type='dcim.rackreservation', field_name='interface', is_generic=True), + 'interface_rack_role': RefInfo(object_type='dcim.rackrole', field_name='interface', is_generic=True), + 'interface_rack_type': RefInfo(object_type='dcim.racktype', field_name='interface', is_generic=True), + 'interface_rear_port': RefInfo(object_type='dcim.rearport', field_name='interface', is_generic=True), + 'interface_region': RefInfo(object_type='dcim.region', field_name='interface', is_generic=True), + 'interface_role': RefInfo(object_type='ipam.role', field_name='interface', is_generic=True), + 'interface_route_target': RefInfo(object_type='ipam.routetarget', field_name='interface', is_generic=True), + 'interface_service': RefInfo(object_type='ipam.service', field_name='interface', is_generic=True), + 'interface_site': RefInfo(object_type='dcim.site', field_name='interface', is_generic=True), + 'interface_site_group': RefInfo(object_type='dcim.sitegroup', field_name='interface', is_generic=True), + 'interface_tag': RefInfo(object_type='extras.tag', field_name='interface', is_generic=True), + 'interface_tenant': RefInfo(object_type='tenancy.tenant', field_name='interface', is_generic=True), + 'interface_tenant_group': RefInfo(object_type='tenancy.tenantgroup', field_name='interface', is_generic=True), + 'interface_tunnel': RefInfo(object_type='vpn.tunnel', field_name='interface', is_generic=True), + 'interface_tunnel_group': RefInfo(object_type='vpn.tunnelgroup', field_name='interface', is_generic=True), + 'interface_tunnel_termination': RefInfo(object_type='vpn.tunneltermination', field_name='interface', is_generic=True), + 'interface_vlan': RefInfo(object_type='ipam.vlan', field_name='interface', is_generic=True), + 'interface_vlan_group': RefInfo(object_type='ipam.vlangroup', field_name='interface', is_generic=True), + 'interface_vlan_translation_policy': RefInfo(object_type='ipam.vlantranslationpolicy', field_name='interface', is_generic=True), + 'interface_vlan_translation_rule': RefInfo(object_type='ipam.vlantranslationrule', field_name='interface', is_generic=True), + 'interface_vm_interface': RefInfo(object_type='virtualization.vminterface', field_name='interface', is_generic=True), + 'interface_vrf': RefInfo(object_type='ipam.vrf', field_name='interface', is_generic=True), + 'interface_virtual_chassis': RefInfo(object_type='dcim.virtualchassis', field_name='interface', is_generic=True), + 'interface_virtual_circuit': RefInfo(object_type='circuits.virtualcircuit', field_name='interface', is_generic=True), + 'interface_virtual_circuit_termination': RefInfo(object_type='circuits.virtualcircuittermination', field_name='interface', is_generic=True), + 'interface_virtual_circuit_type': RefInfo(object_type='circuits.virtualcircuittype', field_name='interface', is_generic=True), + 'interface_virtual_device_context': RefInfo(object_type='dcim.virtualdevicecontext', field_name='interface', is_generic=True), + 'interface_virtual_disk': RefInfo(object_type='virtualization.virtualdisk', field_name='interface', is_generic=True), + 'interface_virtual_machine': RefInfo(object_type='virtualization.virtualmachine', field_name='interface', is_generic=True), + 'interface_wireless_lan': RefInfo(object_type='wireless.wirelesslan', field_name='interface', is_generic=True), + 'interface_wireless_lan_group': RefInfo(object_type='wireless.wirelesslangroup', field_name='interface', is_generic=True), + 'interface_wireless_link': RefInfo(object_type='wireless.wirelesslink', field_name='interface', is_generic=True), }, 'ipam.ipaddress': { - 'assignedObjectFhrpGroup': RefInfo(object_type='ipam.fhrpgroup', field_name='assigned_object', is_generic=True), - 'assignedObjectInterface': RefInfo(object_type='dcim.interface', field_name='assigned_object', is_generic=True), - 'assignedObjectVmInterface': RefInfo(object_type='virtualization.vminterface', field_name='assigned_object', is_generic=True), - 'natInside': RefInfo(object_type='ipam.ipaddress', field_name='nat_inside'), + 'assigned_object_fhrp_group': RefInfo(object_type='ipam.fhrpgroup', field_name='assigned_object', is_generic=True), + 'assigned_object_interface': RefInfo(object_type='dcim.interface', field_name='assigned_object', is_generic=True), + 'assigned_object_vm_interface': RefInfo(object_type='virtualization.vminterface', field_name='assigned_object', is_generic=True), + 'nat_inside': RefInfo(object_type='ipam.ipaddress', field_name='nat_inside'), 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), 'vrf': RefInfo(object_type='ipam.vrf', field_name='vrf'), @@ -516,10 +516,10 @@ class RefInfo: }, 'ipam.prefix': { 'role': RefInfo(object_type='ipam.role', field_name='role'), - 'scopeLocation': RefInfo(object_type='dcim.location', field_name='scope', is_generic=True), - 'scopeRegion': RefInfo(object_type='dcim.region', field_name='scope', is_generic=True), - 'scopeSite': RefInfo(object_type='dcim.site', field_name='scope', is_generic=True), - 'scopeSiteGroup': RefInfo(object_type='dcim.sitegroup', field_name='scope', is_generic=True), + 'scope_location': RefInfo(object_type='dcim.location', field_name='scope', is_generic=True), + 'scope_region': RefInfo(object_type='dcim.region', field_name='scope', is_generic=True), + 'scope_site': RefInfo(object_type='dcim.site', field_name='scope', is_generic=True), + 'scope_site_group': RefInfo(object_type='dcim.sitegroup', field_name='scope', is_generic=True), 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), 'vlan': RefInfo(object_type='ipam.vlan', field_name='vlan'), @@ -539,32 +539,32 @@ class RefInfo: 'device': RefInfo(object_type='dcim.device', field_name='device'), 'ipaddresses': RefInfo(object_type='ipam.ipaddress', field_name='ipaddresses', is_many=True), 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), - 'virtualMachine': RefInfo(object_type='virtualization.virtualmachine', field_name='virtual_machine'), + 'virtual_machine': RefInfo(object_type='virtualization.virtualmachine', field_name='virtual_machine'), }, 'ipam.vlan': { 'group': RefInfo(object_type='ipam.vlangroup', field_name='group'), - 'qinqSvlan': RefInfo(object_type='ipam.vlan', field_name='qinq_svlan'), + 'qinq_svlan': RefInfo(object_type='ipam.vlan', field_name='qinq_svlan'), 'role': RefInfo(object_type='ipam.role', field_name='role'), 'site': RefInfo(object_type='dcim.site', field_name='site'), 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), }, 'ipam.vlangroup': { - 'scopeCluster': RefInfo(object_type='virtualization.cluster', field_name='scope', is_generic=True), - 'scopeClusterGroup': RefInfo(object_type='virtualization.clustergroup', field_name='scope', is_generic=True), - 'scopeLocation': RefInfo(object_type='dcim.location', field_name='scope', is_generic=True), - 'scopeRack': RefInfo(object_type='dcim.rack', field_name='scope', is_generic=True), - 'scopeRegion': RefInfo(object_type='dcim.region', field_name='scope', is_generic=True), - 'scopeSite': RefInfo(object_type='dcim.site', field_name='scope', is_generic=True), - 'scopeSiteGroup': RefInfo(object_type='dcim.sitegroup', field_name='scope', is_generic=True), + 'scope_cluster': RefInfo(object_type='virtualization.cluster', field_name='scope', is_generic=True), + 'scope_cluster_group': RefInfo(object_type='virtualization.clustergroup', field_name='scope', is_generic=True), + 'scope_location': RefInfo(object_type='dcim.location', field_name='scope', is_generic=True), + 'scope_rack': RefInfo(object_type='dcim.rack', field_name='scope', is_generic=True), + 'scope_region': RefInfo(object_type='dcim.region', field_name='scope', is_generic=True), + 'scope_site': RefInfo(object_type='dcim.site', field_name='scope', is_generic=True), + 'scope_site_group': RefInfo(object_type='dcim.sitegroup', field_name='scope', is_generic=True), 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), }, 'ipam.vlantranslationrule': { 'policy': RefInfo(object_type='ipam.vlantranslationpolicy', field_name='policy'), }, 'ipam.vrf': { - 'exportTargets': RefInfo(object_type='ipam.routetarget', field_name='export_targets', is_many=True), - 'importTargets': RefInfo(object_type='ipam.routetarget', field_name='import_targets', is_many=True), + 'export_targets': RefInfo(object_type='ipam.routetarget', field_name='export_targets', is_many=True), + 'import_targets': RefInfo(object_type='ipam.routetarget', field_name='import_targets', is_many=True), 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), }, @@ -574,94 +574,94 @@ class RefInfo: }, 'tenancy.contactassignment': { 'contact': RefInfo(object_type='tenancy.contact', field_name='contact'), - 'objectAsn': RefInfo(object_type='ipam.asn', field_name='object', is_generic=True), - 'objectAsnRange': RefInfo(object_type='ipam.asnrange', field_name='object', is_generic=True), - 'objectAggregate': RefInfo(object_type='ipam.aggregate', field_name='object', is_generic=True), - 'objectCable': RefInfo(object_type='dcim.cable', field_name='object', is_generic=True), - 'objectCablePath': RefInfo(object_type='dcim.cablepath', field_name='object', is_generic=True), - 'objectCableTermination': RefInfo(object_type='dcim.cabletermination', field_name='object', is_generic=True), - 'objectCircuit': RefInfo(object_type='circuits.circuit', field_name='object', is_generic=True), - 'objectCircuitGroup': RefInfo(object_type='circuits.circuitgroup', field_name='object', is_generic=True), - 'objectCircuitGroupAssignment': RefInfo(object_type='circuits.circuitgroupassignment', field_name='object', is_generic=True), - 'objectCircuitTermination': RefInfo(object_type='circuits.circuittermination', field_name='object', is_generic=True), - 'objectCircuitType': RefInfo(object_type='circuits.circuittype', field_name='object', is_generic=True), - 'objectCluster': RefInfo(object_type='virtualization.cluster', field_name='object', is_generic=True), - 'objectClusterGroup': RefInfo(object_type='virtualization.clustergroup', field_name='object', is_generic=True), - 'objectClusterType': RefInfo(object_type='virtualization.clustertype', field_name='object', is_generic=True), - 'objectConsolePort': RefInfo(object_type='dcim.consoleport', field_name='object', is_generic=True), - 'objectConsoleServerPort': RefInfo(object_type='dcim.consoleserverport', field_name='object', is_generic=True), - 'objectContact': RefInfo(object_type='tenancy.contact', field_name='object', is_generic=True), - 'objectContactAssignment': RefInfo(object_type='tenancy.contactassignment', field_name='object', is_generic=True), - 'objectContactGroup': RefInfo(object_type='tenancy.contactgroup', field_name='object', is_generic=True), - 'objectContactRole': RefInfo(object_type='tenancy.contactrole', field_name='object', is_generic=True), - 'objectDevice': RefInfo(object_type='dcim.device', field_name='object', is_generic=True), - 'objectDeviceBay': RefInfo(object_type='dcim.devicebay', field_name='object', is_generic=True), - 'objectDeviceRole': RefInfo(object_type='dcim.devicerole', field_name='object', is_generic=True), - 'objectDeviceType': RefInfo(object_type='dcim.devicetype', field_name='object', is_generic=True), - 'objectFhrpGroup': RefInfo(object_type='ipam.fhrpgroup', field_name='object', is_generic=True), - 'objectFhrpGroupAssignment': RefInfo(object_type='ipam.fhrpgroupassignment', field_name='object', is_generic=True), - 'objectFrontPort': RefInfo(object_type='dcim.frontport', field_name='object', is_generic=True), - 'objectIkePolicy': RefInfo(object_type='vpn.ikepolicy', field_name='object', is_generic=True), - 'objectIkeProposal': RefInfo(object_type='vpn.ikeproposal', field_name='object', is_generic=True), - 'objectIpAddress': RefInfo(object_type='ipam.ipaddress', field_name='object', is_generic=True), - 'objectIpRange': RefInfo(object_type='ipam.iprange', field_name='object', is_generic=True), - 'objectIpSecPolicy': RefInfo(object_type='vpn.ipsecpolicy', field_name='object', is_generic=True), - 'objectIpSecProfile': RefInfo(object_type='vpn.ipsecprofile', field_name='object', is_generic=True), - 'objectIpSecProposal': RefInfo(object_type='vpn.ipsecproposal', field_name='object', is_generic=True), - 'objectInterface': RefInfo(object_type='dcim.interface', field_name='object', is_generic=True), - 'objectInventoryItem': RefInfo(object_type='dcim.inventoryitem', field_name='object', is_generic=True), - 'objectInventoryItemRole': RefInfo(object_type='dcim.inventoryitemrole', field_name='object', is_generic=True), - 'objectL2Vpn': RefInfo(object_type='vpn.l2vpn', field_name='object', is_generic=True), - 'objectL2VpnTermination': RefInfo(object_type='vpn.l2vpntermination', field_name='object', is_generic=True), - 'objectLocation': RefInfo(object_type='dcim.location', field_name='object', is_generic=True), - 'objectMacAddress': RefInfo(object_type='dcim.macaddress', field_name='object', is_generic=True), - 'objectManufacturer': RefInfo(object_type='dcim.manufacturer', field_name='object', is_generic=True), - 'objectModule': RefInfo(object_type='dcim.module', field_name='object', is_generic=True), - 'objectModuleBay': RefInfo(object_type='dcim.modulebay', field_name='object', is_generic=True), - 'objectModuleType': RefInfo(object_type='dcim.moduletype', field_name='object', is_generic=True), - 'objectPlatform': RefInfo(object_type='dcim.platform', field_name='object', is_generic=True), - 'objectPowerFeed': RefInfo(object_type='dcim.powerfeed', field_name='object', is_generic=True), - 'objectPowerOutlet': RefInfo(object_type='dcim.poweroutlet', field_name='object', is_generic=True), - 'objectPowerPanel': RefInfo(object_type='dcim.powerpanel', field_name='object', is_generic=True), - 'objectPowerPort': RefInfo(object_type='dcim.powerport', field_name='object', is_generic=True), - 'objectPrefix': RefInfo(object_type='ipam.prefix', field_name='object', is_generic=True), - 'objectProvider': RefInfo(object_type='circuits.provider', field_name='object', is_generic=True), - 'objectProviderAccount': RefInfo(object_type='circuits.provideraccount', field_name='object', is_generic=True), - 'objectProviderNetwork': RefInfo(object_type='circuits.providernetwork', field_name='object', is_generic=True), - 'objectRir': RefInfo(object_type='ipam.rir', field_name='object', is_generic=True), - 'objectRack': RefInfo(object_type='dcim.rack', field_name='object', is_generic=True), - 'objectRackReservation': RefInfo(object_type='dcim.rackreservation', field_name='object', is_generic=True), - 'objectRackRole': RefInfo(object_type='dcim.rackrole', field_name='object', is_generic=True), - 'objectRackType': RefInfo(object_type='dcim.racktype', field_name='object', is_generic=True), - 'objectRearPort': RefInfo(object_type='dcim.rearport', field_name='object', is_generic=True), - 'objectRegion': RefInfo(object_type='dcim.region', field_name='object', is_generic=True), - 'objectRole': RefInfo(object_type='ipam.role', field_name='object', is_generic=True), - 'objectRouteTarget': RefInfo(object_type='ipam.routetarget', field_name='object', is_generic=True), - 'objectService': RefInfo(object_type='ipam.service', field_name='object', is_generic=True), - 'objectSite': RefInfo(object_type='dcim.site', field_name='object', is_generic=True), - 'objectSiteGroup': RefInfo(object_type='dcim.sitegroup', field_name='object', is_generic=True), - 'objectTag': RefInfo(object_type='extras.tag', field_name='object', is_generic=True), - 'objectTenant': RefInfo(object_type='tenancy.tenant', field_name='object', is_generic=True), - 'objectTenantGroup': RefInfo(object_type='tenancy.tenantgroup', field_name='object', is_generic=True), - 'objectTunnel': RefInfo(object_type='vpn.tunnel', field_name='object', is_generic=True), - 'objectTunnelGroup': RefInfo(object_type='vpn.tunnelgroup', field_name='object', is_generic=True), - 'objectTunnelTermination': RefInfo(object_type='vpn.tunneltermination', field_name='object', is_generic=True), - 'objectVlan': RefInfo(object_type='ipam.vlan', field_name='object', is_generic=True), - 'objectVlanGroup': RefInfo(object_type='ipam.vlangroup', field_name='object', is_generic=True), - 'objectVlanTranslationPolicy': RefInfo(object_type='ipam.vlantranslationpolicy', field_name='object', is_generic=True), - 'objectVlanTranslationRule': RefInfo(object_type='ipam.vlantranslationrule', field_name='object', is_generic=True), - 'objectVmInterface': RefInfo(object_type='virtualization.vminterface', field_name='object', is_generic=True), - 'objectVrf': RefInfo(object_type='ipam.vrf', field_name='object', is_generic=True), - 'objectVirtualChassis': RefInfo(object_type='dcim.virtualchassis', field_name='object', is_generic=True), - 'objectVirtualCircuit': RefInfo(object_type='circuits.virtualcircuit', field_name='object', is_generic=True), - 'objectVirtualCircuitTermination': RefInfo(object_type='circuits.virtualcircuittermination', field_name='object', is_generic=True), - 'objectVirtualCircuitType': RefInfo(object_type='circuits.virtualcircuittype', field_name='object', is_generic=True), - 'objectVirtualDeviceContext': RefInfo(object_type='dcim.virtualdevicecontext', field_name='object', is_generic=True), - 'objectVirtualDisk': RefInfo(object_type='virtualization.virtualdisk', field_name='object', is_generic=True), - 'objectVirtualMachine': RefInfo(object_type='virtualization.virtualmachine', field_name='object', is_generic=True), - 'objectWirelessLan': RefInfo(object_type='wireless.wirelesslan', field_name='object', is_generic=True), - 'objectWirelessLanGroup': RefInfo(object_type='wireless.wirelesslangroup', field_name='object', is_generic=True), - 'objectWirelessLink': RefInfo(object_type='wireless.wirelesslink', field_name='object', is_generic=True), + 'object_asn': RefInfo(object_type='ipam.asn', field_name='object', is_generic=True), + 'object_asn_range': RefInfo(object_type='ipam.asnrange', field_name='object', is_generic=True), + 'object_aggregate': RefInfo(object_type='ipam.aggregate', field_name='object', is_generic=True), + 'object_cable': RefInfo(object_type='dcim.cable', field_name='object', is_generic=True), + 'object_cable_path': RefInfo(object_type='dcim.cablepath', field_name='object', is_generic=True), + 'object_cable_termination': RefInfo(object_type='dcim.cabletermination', field_name='object', is_generic=True), + 'object_circuit': RefInfo(object_type='circuits.circuit', field_name='object', is_generic=True), + 'object_circuit_group': RefInfo(object_type='circuits.circuitgroup', field_name='object', is_generic=True), + 'object_circuit_group_assignment': RefInfo(object_type='circuits.circuitgroupassignment', field_name='object', is_generic=True), + 'object_circuit_termination': RefInfo(object_type='circuits.circuittermination', field_name='object', is_generic=True), + 'object_circuit_type': RefInfo(object_type='circuits.circuittype', field_name='object', is_generic=True), + 'object_cluster': RefInfo(object_type='virtualization.cluster', field_name='object', is_generic=True), + 'object_cluster_group': RefInfo(object_type='virtualization.clustergroup', field_name='object', is_generic=True), + 'object_cluster_type': RefInfo(object_type='virtualization.clustertype', field_name='object', is_generic=True), + 'object_console_port': RefInfo(object_type='dcim.consoleport', field_name='object', is_generic=True), + 'object_console_server_port': RefInfo(object_type='dcim.consoleserverport', field_name='object', is_generic=True), + 'object_contact': RefInfo(object_type='tenancy.contact', field_name='object', is_generic=True), + 'object_contact_assignment': RefInfo(object_type='tenancy.contactassignment', field_name='object', is_generic=True), + 'object_contact_group': RefInfo(object_type='tenancy.contactgroup', field_name='object', is_generic=True), + 'object_contact_role': RefInfo(object_type='tenancy.contactrole', field_name='object', is_generic=True), + 'object_device': RefInfo(object_type='dcim.device', field_name='object', is_generic=True), + 'object_device_bay': RefInfo(object_type='dcim.devicebay', field_name='object', is_generic=True), + 'object_device_role': RefInfo(object_type='dcim.devicerole', field_name='object', is_generic=True), + 'object_device_type': RefInfo(object_type='dcim.devicetype', field_name='object', is_generic=True), + 'object_fhrp_group': RefInfo(object_type='ipam.fhrpgroup', field_name='object', is_generic=True), + 'object_fhrp_group_assignment': RefInfo(object_type='ipam.fhrpgroupassignment', field_name='object', is_generic=True), + 'object_front_port': RefInfo(object_type='dcim.frontport', field_name='object', is_generic=True), + 'object_ike_policy': RefInfo(object_type='vpn.ikepolicy', field_name='object', is_generic=True), + 'object_ike_proposal': RefInfo(object_type='vpn.ikeproposal', field_name='object', is_generic=True), + 'object_ip_address': RefInfo(object_type='ipam.ipaddress', field_name='object', is_generic=True), + 'object_ip_range': RefInfo(object_type='ipam.iprange', field_name='object', is_generic=True), + 'object_ip_sec_policy': RefInfo(object_type='vpn.ipsecpolicy', field_name='object', is_generic=True), + 'object_ip_sec_profile': RefInfo(object_type='vpn.ipsecprofile', field_name='object', is_generic=True), + 'object_ip_sec_proposal': RefInfo(object_type='vpn.ipsecproposal', field_name='object', is_generic=True), + 'object_interface': RefInfo(object_type='dcim.interface', field_name='object', is_generic=True), + 'object_inventory_item': RefInfo(object_type='dcim.inventoryitem', field_name='object', is_generic=True), + 'object_inventory_item_role': RefInfo(object_type='dcim.inventoryitemrole', field_name='object', is_generic=True), + 'object_l2vpn': RefInfo(object_type='vpn.l2vpn', field_name='object', is_generic=True), + 'object_l2vpn_termination': RefInfo(object_type='vpn.l2vpntermination', field_name='object', is_generic=True), + 'object_location': RefInfo(object_type='dcim.location', field_name='object', is_generic=True), + 'object_mac_address': RefInfo(object_type='dcim.macaddress', field_name='object', is_generic=True), + 'object_manufacturer': RefInfo(object_type='dcim.manufacturer', field_name='object', is_generic=True), + 'object_module': RefInfo(object_type='dcim.module', field_name='object', is_generic=True), + 'object_module_bay': RefInfo(object_type='dcim.modulebay', field_name='object', is_generic=True), + 'object_module_type': RefInfo(object_type='dcim.moduletype', field_name='object', is_generic=True), + 'object_platform': RefInfo(object_type='dcim.platform', field_name='object', is_generic=True), + 'object_power_feed': RefInfo(object_type='dcim.powerfeed', field_name='object', is_generic=True), + 'object_power_outlet': RefInfo(object_type='dcim.poweroutlet', field_name='object', is_generic=True), + 'object_power_panel': RefInfo(object_type='dcim.powerpanel', field_name='object', is_generic=True), + 'object_power_port': RefInfo(object_type='dcim.powerport', field_name='object', is_generic=True), + 'object_prefix': RefInfo(object_type='ipam.prefix', field_name='object', is_generic=True), + 'object_provider': RefInfo(object_type='circuits.provider', field_name='object', is_generic=True), + 'object_provider_account': RefInfo(object_type='circuits.provideraccount', field_name='object', is_generic=True), + 'object_provider_network': RefInfo(object_type='circuits.providernetwork', field_name='object', is_generic=True), + 'object_rir': RefInfo(object_type='ipam.rir', field_name='object', is_generic=True), + 'object_rack': RefInfo(object_type='dcim.rack', field_name='object', is_generic=True), + 'object_rack_reservation': RefInfo(object_type='dcim.rackreservation', field_name='object', is_generic=True), + 'object_rack_role': RefInfo(object_type='dcim.rackrole', field_name='object', is_generic=True), + 'object_rack_type': RefInfo(object_type='dcim.racktype', field_name='object', is_generic=True), + 'object_rear_port': RefInfo(object_type='dcim.rearport', field_name='object', is_generic=True), + 'object_region': RefInfo(object_type='dcim.region', field_name='object', is_generic=True), + 'object_role': RefInfo(object_type='ipam.role', field_name='object', is_generic=True), + 'object_route_target': RefInfo(object_type='ipam.routetarget', field_name='object', is_generic=True), + 'object_service': RefInfo(object_type='ipam.service', field_name='object', is_generic=True), + 'object_site': RefInfo(object_type='dcim.site', field_name='object', is_generic=True), + 'object_site_group': RefInfo(object_type='dcim.sitegroup', field_name='object', is_generic=True), + 'object_tag': RefInfo(object_type='extras.tag', field_name='object', is_generic=True), + 'object_tenant': RefInfo(object_type='tenancy.tenant', field_name='object', is_generic=True), + 'object_tenant_group': RefInfo(object_type='tenancy.tenantgroup', field_name='object', is_generic=True), + 'object_tunnel': RefInfo(object_type='vpn.tunnel', field_name='object', is_generic=True), + 'object_tunnel_group': RefInfo(object_type='vpn.tunnelgroup', field_name='object', is_generic=True), + 'object_tunnel_termination': RefInfo(object_type='vpn.tunneltermination', field_name='object', is_generic=True), + 'object_vlan': RefInfo(object_type='ipam.vlan', field_name='object', is_generic=True), + 'object_vlan_group': RefInfo(object_type='ipam.vlangroup', field_name='object', is_generic=True), + 'object_vlan_translation_policy': RefInfo(object_type='ipam.vlantranslationpolicy', field_name='object', is_generic=True), + 'object_vlan_translation_rule': RefInfo(object_type='ipam.vlantranslationrule', field_name='object', is_generic=True), + 'object_vm_interface': RefInfo(object_type='virtualization.vminterface', field_name='object', is_generic=True), + 'object_vrf': RefInfo(object_type='ipam.vrf', field_name='object', is_generic=True), + 'object_virtual_chassis': RefInfo(object_type='dcim.virtualchassis', field_name='object', is_generic=True), + 'object_virtual_circuit': RefInfo(object_type='circuits.virtualcircuit', field_name='object', is_generic=True), + 'object_virtual_circuit_termination': RefInfo(object_type='circuits.virtualcircuittermination', field_name='object', is_generic=True), + 'object_virtual_circuit_type': RefInfo(object_type='circuits.virtualcircuittype', field_name='object', is_generic=True), + 'object_virtual_device_context': RefInfo(object_type='dcim.virtualdevicecontext', field_name='object', is_generic=True), + 'object_virtual_disk': RefInfo(object_type='virtualization.virtualdisk', field_name='object', is_generic=True), + 'object_virtual_machine': RefInfo(object_type='virtualization.virtualmachine', field_name='object', is_generic=True), + 'object_wireless_lan': RefInfo(object_type='wireless.wirelesslan', field_name='object', is_generic=True), + 'object_wireless_lan_group': RefInfo(object_type='wireless.wirelesslangroup', field_name='object', is_generic=True), + 'object_wireless_link': RefInfo(object_type='wireless.wirelesslink', field_name='object', is_generic=True), 'role': RefInfo(object_type='tenancy.contactrole', field_name='role'), 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), }, @@ -682,10 +682,10 @@ class RefInfo: }, 'virtualization.cluster': { 'group': RefInfo(object_type='virtualization.clustergroup', field_name='group'), - 'scopeLocation': RefInfo(object_type='dcim.location', field_name='scope', is_generic=True), - 'scopeRegion': RefInfo(object_type='dcim.region', field_name='scope', is_generic=True), - 'scopeSite': RefInfo(object_type='dcim.site', field_name='scope', is_generic=True), - 'scopeSiteGroup': RefInfo(object_type='dcim.sitegroup', field_name='scope', is_generic=True), + 'scope_location': RefInfo(object_type='dcim.location', field_name='scope', is_generic=True), + 'scope_region': RefInfo(object_type='dcim.region', field_name='scope', is_generic=True), + 'scope_site': RefInfo(object_type='dcim.site', field_name='scope', is_generic=True), + 'scope_site_group': RefInfo(object_type='dcim.sitegroup', field_name='scope', is_generic=True), 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), 'type': RefInfo(object_type='virtualization.clustertype', field_name='type'), @@ -698,14 +698,14 @@ class RefInfo: }, 'virtualization.virtualdisk': { 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), - 'virtualMachine': RefInfo(object_type='virtualization.virtualmachine', field_name='virtual_machine'), + 'virtual_machine': RefInfo(object_type='virtualization.virtualmachine', field_name='virtual_machine'), }, 'virtualization.virtualmachine': { 'cluster': RefInfo(object_type='virtualization.cluster', field_name='cluster'), 'device': RefInfo(object_type='dcim.device', field_name='device'), 'platform': RefInfo(object_type='dcim.platform', field_name='platform'), - 'primaryIp4': RefInfo(object_type='ipam.ipaddress', field_name='primary_ip4'), - 'primaryIp6': RefInfo(object_type='ipam.ipaddress', field_name='primary_ip6'), + 'primary_ip4': RefInfo(object_type='ipam.ipaddress', field_name='primary_ip4'), + 'primary_ip6': RefInfo(object_type='ipam.ipaddress', field_name='primary_ip6'), 'role': RefInfo(object_type='dcim.devicerole', field_name='role'), 'site': RefInfo(object_type='dcim.site', field_name='site'), 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), @@ -714,13 +714,13 @@ class RefInfo: 'virtualization.vminterface': { 'bridge': RefInfo(object_type='virtualization.vminterface', field_name='bridge'), 'parent': RefInfo(object_type='virtualization.vminterface', field_name='parent'), - 'primaryMacAddress': RefInfo(object_type='dcim.macaddress', field_name='primary_mac_address'), - 'qinqSvlan': RefInfo(object_type='ipam.vlan', field_name='qinq_svlan'), - 'taggedVlans': RefInfo(object_type='ipam.vlan', field_name='tagged_vlans', is_many=True), + 'primary_mac_address': RefInfo(object_type='dcim.macaddress', field_name='primary_mac_address'), + 'qinq_svlan': RefInfo(object_type='ipam.vlan', field_name='qinq_svlan'), + 'tagged_vlans': RefInfo(object_type='ipam.vlan', field_name='tagged_vlans', is_many=True), 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), - 'untaggedVlan': RefInfo(object_type='ipam.vlan', field_name='untagged_vlan'), - 'virtualMachine': RefInfo(object_type='virtualization.virtualmachine', field_name='virtual_machine'), - 'vlanTranslationPolicy': RefInfo(object_type='ipam.vlantranslationpolicy', field_name='vlan_translation_policy'), + 'untagged_vlan': RefInfo(object_type='ipam.vlan', field_name='untagged_vlan'), + 'virtual_machine': RefInfo(object_type='virtualization.virtualmachine', field_name='virtual_machine'), + 'vlan_translation_policy': RefInfo(object_type='ipam.vlantranslationpolicy', field_name='vlan_translation_policy'), 'vrf': RefInfo(object_type='ipam.vrf', field_name='vrf'), }, 'vpn.ikepolicy': { @@ -735,29 +735,29 @@ class RefInfo: 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), }, 'vpn.ipsecprofile': { - 'ikePolicy': RefInfo(object_type='vpn.ikepolicy', field_name='ike_policy'), - 'ipsecPolicy': RefInfo(object_type='vpn.ipsecpolicy', field_name='ipsec_policy'), + 'ike_policy': RefInfo(object_type='vpn.ikepolicy', field_name='ike_policy'), + 'ipsec_policy': RefInfo(object_type='vpn.ipsecpolicy', field_name='ipsec_policy'), 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), }, 'vpn.ipsecproposal': { 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), }, 'vpn.l2vpn': { - 'exportTargets': RefInfo(object_type='ipam.routetarget', field_name='export_targets', is_many=True), - 'importTargets': RefInfo(object_type='ipam.routetarget', field_name='import_targets', is_many=True), + 'export_targets': RefInfo(object_type='ipam.routetarget', field_name='export_targets', is_many=True), + 'import_targets': RefInfo(object_type='ipam.routetarget', field_name='import_targets', is_many=True), 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), }, 'vpn.l2vpntermination': { - 'assignedObjectInterface': RefInfo(object_type='dcim.interface', field_name='assigned_object', is_generic=True), - 'assignedObjectVlan': RefInfo(object_type='ipam.vlan', field_name='assigned_object', is_generic=True), - 'assignedObjectVmInterface': RefInfo(object_type='virtualization.vminterface', field_name='assigned_object', is_generic=True), - 'l2Vpn': RefInfo(object_type='vpn.l2vpn', field_name='l2vpn'), + 'assigned_object_interface': RefInfo(object_type='dcim.interface', field_name='assigned_object', is_generic=True), + 'assigned_object_vlan': RefInfo(object_type='ipam.vlan', field_name='assigned_object', is_generic=True), + 'assigned_object_vm_interface': RefInfo(object_type='virtualization.vminterface', field_name='assigned_object', is_generic=True), + 'l2vpn': RefInfo(object_type='vpn.l2vpn', field_name='l2vpn'), 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), }, 'vpn.tunnel': { 'group': RefInfo(object_type='vpn.tunnelgroup', field_name='group'), - 'ipsecProfile': RefInfo(object_type='vpn.ipsecprofile', field_name='ipsec_profile'), + 'ipsec_profile': RefInfo(object_type='vpn.ipsecprofile', field_name='ipsec_profile'), 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), }, @@ -765,104 +765,104 @@ class RefInfo: 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), }, 'vpn.tunneltermination': { - 'outsideIp': RefInfo(object_type='ipam.ipaddress', field_name='outside_ip'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), - 'terminationAsn': RefInfo(object_type='ipam.asn', field_name='termination', is_generic=True), - 'terminationAsnRange': RefInfo(object_type='ipam.asnrange', field_name='termination', is_generic=True), - 'terminationAggregate': RefInfo(object_type='ipam.aggregate', field_name='termination', is_generic=True), - 'terminationCable': RefInfo(object_type='dcim.cable', field_name='termination', is_generic=True), - 'terminationCablePath': RefInfo(object_type='dcim.cablepath', field_name='termination', is_generic=True), - 'terminationCableTermination': RefInfo(object_type='dcim.cabletermination', field_name='termination', is_generic=True), - 'terminationCircuit': RefInfo(object_type='circuits.circuit', field_name='termination', is_generic=True), - 'terminationCircuitGroup': RefInfo(object_type='circuits.circuitgroup', field_name='termination', is_generic=True), - 'terminationCircuitGroupAssignment': RefInfo(object_type='circuits.circuitgroupassignment', field_name='termination', is_generic=True), - 'terminationCircuitTermination': RefInfo(object_type='circuits.circuittermination', field_name='termination', is_generic=True), - 'terminationCircuitType': RefInfo(object_type='circuits.circuittype', field_name='termination', is_generic=True), - 'terminationCluster': RefInfo(object_type='virtualization.cluster', field_name='termination', is_generic=True), - 'terminationClusterGroup': RefInfo(object_type='virtualization.clustergroup', field_name='termination', is_generic=True), - 'terminationClusterType': RefInfo(object_type='virtualization.clustertype', field_name='termination', is_generic=True), - 'terminationConsolePort': RefInfo(object_type='dcim.consoleport', field_name='termination', is_generic=True), - 'terminationConsoleServerPort': RefInfo(object_type='dcim.consoleserverport', field_name='termination', is_generic=True), - 'terminationContact': RefInfo(object_type='tenancy.contact', field_name='termination', is_generic=True), - 'terminationContactAssignment': RefInfo(object_type='tenancy.contactassignment', field_name='termination', is_generic=True), - 'terminationContactGroup': RefInfo(object_type='tenancy.contactgroup', field_name='termination', is_generic=True), - 'terminationContactRole': RefInfo(object_type='tenancy.contactrole', field_name='termination', is_generic=True), - 'terminationDevice': RefInfo(object_type='dcim.device', field_name='termination', is_generic=True), - 'terminationDeviceBay': RefInfo(object_type='dcim.devicebay', field_name='termination', is_generic=True), - 'terminationDeviceRole': RefInfo(object_type='dcim.devicerole', field_name='termination', is_generic=True), - 'terminationDeviceType': RefInfo(object_type='dcim.devicetype', field_name='termination', is_generic=True), - 'terminationFhrpGroup': RefInfo(object_type='ipam.fhrpgroup', field_name='termination', is_generic=True), - 'terminationFhrpGroupAssignment': RefInfo(object_type='ipam.fhrpgroupassignment', field_name='termination', is_generic=True), - 'terminationFrontPort': RefInfo(object_type='dcim.frontport', field_name='termination', is_generic=True), - 'terminationIkePolicy': RefInfo(object_type='vpn.ikepolicy', field_name='termination', is_generic=True), - 'terminationIkeProposal': RefInfo(object_type='vpn.ikeproposal', field_name='termination', is_generic=True), - 'terminationIpAddress': RefInfo(object_type='ipam.ipaddress', field_name='termination', is_generic=True), - 'terminationIpRange': RefInfo(object_type='ipam.iprange', field_name='termination', is_generic=True), - 'terminationIpSecPolicy': RefInfo(object_type='vpn.ipsecpolicy', field_name='termination', is_generic=True), - 'terminationIpSecProfile': RefInfo(object_type='vpn.ipsecprofile', field_name='termination', is_generic=True), - 'terminationIpSecProposal': RefInfo(object_type='vpn.ipsecproposal', field_name='termination', is_generic=True), - 'terminationInterface': RefInfo(object_type='dcim.interface', field_name='termination', is_generic=True), - 'terminationInventoryItem': RefInfo(object_type='dcim.inventoryitem', field_name='termination', is_generic=True), - 'terminationInventoryItemRole': RefInfo(object_type='dcim.inventoryitemrole', field_name='termination', is_generic=True), - 'terminationL2Vpn': RefInfo(object_type='vpn.l2vpn', field_name='termination', is_generic=True), - 'terminationL2VpnTermination': RefInfo(object_type='vpn.l2vpntermination', field_name='termination', is_generic=True), - 'terminationLocation': RefInfo(object_type='dcim.location', field_name='termination', is_generic=True), - 'terminationMacAddress': RefInfo(object_type='dcim.macaddress', field_name='termination', is_generic=True), - 'terminationManufacturer': RefInfo(object_type='dcim.manufacturer', field_name='termination', is_generic=True), - 'terminationModule': RefInfo(object_type='dcim.module', field_name='termination', is_generic=True), - 'terminationModuleBay': RefInfo(object_type='dcim.modulebay', field_name='termination', is_generic=True), - 'terminationModuleType': RefInfo(object_type='dcim.moduletype', field_name='termination', is_generic=True), - 'terminationPlatform': RefInfo(object_type='dcim.platform', field_name='termination', is_generic=True), - 'terminationPowerFeed': RefInfo(object_type='dcim.powerfeed', field_name='termination', is_generic=True), - 'terminationPowerOutlet': RefInfo(object_type='dcim.poweroutlet', field_name='termination', is_generic=True), - 'terminationPowerPanel': RefInfo(object_type='dcim.powerpanel', field_name='termination', is_generic=True), - 'terminationPowerPort': RefInfo(object_type='dcim.powerport', field_name='termination', is_generic=True), - 'terminationPrefix': RefInfo(object_type='ipam.prefix', field_name='termination', is_generic=True), - 'terminationProvider': RefInfo(object_type='circuits.provider', field_name='termination', is_generic=True), - 'terminationProviderAccount': RefInfo(object_type='circuits.provideraccount', field_name='termination', is_generic=True), - 'terminationProviderNetwork': RefInfo(object_type='circuits.providernetwork', field_name='termination', is_generic=True), - 'terminationRir': RefInfo(object_type='ipam.rir', field_name='termination', is_generic=True), - 'terminationRack': RefInfo(object_type='dcim.rack', field_name='termination', is_generic=True), - 'terminationRackReservation': RefInfo(object_type='dcim.rackreservation', field_name='termination', is_generic=True), - 'terminationRackRole': RefInfo(object_type='dcim.rackrole', field_name='termination', is_generic=True), - 'terminationRackType': RefInfo(object_type='dcim.racktype', field_name='termination', is_generic=True), - 'terminationRearPort': RefInfo(object_type='dcim.rearport', field_name='termination', is_generic=True), - 'terminationRegion': RefInfo(object_type='dcim.region', field_name='termination', is_generic=True), - 'terminationRole': RefInfo(object_type='ipam.role', field_name='termination', is_generic=True), - 'terminationRouteTarget': RefInfo(object_type='ipam.routetarget', field_name='termination', is_generic=True), - 'terminationService': RefInfo(object_type='ipam.service', field_name='termination', is_generic=True), - 'terminationSite': RefInfo(object_type='dcim.site', field_name='termination', is_generic=True), - 'terminationSiteGroup': RefInfo(object_type='dcim.sitegroup', field_name='termination', is_generic=True), - 'terminationTag': RefInfo(object_type='extras.tag', field_name='termination', is_generic=True), - 'terminationTenant': RefInfo(object_type='tenancy.tenant', field_name='termination', is_generic=True), - 'terminationTenantGroup': RefInfo(object_type='tenancy.tenantgroup', field_name='termination', is_generic=True), - 'terminationTunnel': RefInfo(object_type='vpn.tunnel', field_name='termination', is_generic=True), - 'terminationTunnelGroup': RefInfo(object_type='vpn.tunnelgroup', field_name='termination', is_generic=True), - 'terminationTunnelTermination': RefInfo(object_type='vpn.tunneltermination', field_name='termination', is_generic=True), - 'terminationVlan': RefInfo(object_type='ipam.vlan', field_name='termination', is_generic=True), - 'terminationVlanGroup': RefInfo(object_type='ipam.vlangroup', field_name='termination', is_generic=True), - 'terminationVlanTranslationPolicy': RefInfo(object_type='ipam.vlantranslationpolicy', field_name='termination', is_generic=True), - 'terminationVlanTranslationRule': RefInfo(object_type='ipam.vlantranslationrule', field_name='termination', is_generic=True), - 'terminationVmInterface': RefInfo(object_type='virtualization.vminterface', field_name='termination', is_generic=True), - 'terminationVrf': RefInfo(object_type='ipam.vrf', field_name='termination', is_generic=True), - 'terminationVirtualChassis': RefInfo(object_type='dcim.virtualchassis', field_name='termination', is_generic=True), - 'terminationVirtualCircuit': RefInfo(object_type='circuits.virtualcircuit', field_name='termination', is_generic=True), - 'terminationVirtualCircuitTermination': RefInfo(object_type='circuits.virtualcircuittermination', field_name='termination', is_generic=True), - 'terminationVirtualCircuitType': RefInfo(object_type='circuits.virtualcircuittype', field_name='termination', is_generic=True), - 'terminationVirtualDeviceContext': RefInfo(object_type='dcim.virtualdevicecontext', field_name='termination', is_generic=True), - 'terminationVirtualDisk': RefInfo(object_type='virtualization.virtualdisk', field_name='termination', is_generic=True), - 'terminationVirtualMachine': RefInfo(object_type='virtualization.virtualmachine', field_name='termination', is_generic=True), - 'terminationWirelessLan': RefInfo(object_type='wireless.wirelesslan', field_name='termination', is_generic=True), - 'terminationWirelessLanGroup': RefInfo(object_type='wireless.wirelesslangroup', field_name='termination', is_generic=True), - 'terminationWirelessLink': RefInfo(object_type='wireless.wirelesslink', field_name='termination', is_generic=True), + 'outside_ip': RefInfo(object_type='ipam.ipaddress', field_name='outside_ip'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'termination_asn': RefInfo(object_type='ipam.asn', field_name='termination', is_generic=True), + 'termination_asn_range': RefInfo(object_type='ipam.asnrange', field_name='termination', is_generic=True), + 'termination_aggregate': RefInfo(object_type='ipam.aggregate', field_name='termination', is_generic=True), + 'termination_cable': RefInfo(object_type='dcim.cable', field_name='termination', is_generic=True), + 'termination_cable_path': RefInfo(object_type='dcim.cablepath', field_name='termination', is_generic=True), + 'termination_cable_termination': RefInfo(object_type='dcim.cabletermination', field_name='termination', is_generic=True), + 'termination_circuit': RefInfo(object_type='circuits.circuit', field_name='termination', is_generic=True), + 'termination_circuit_group': RefInfo(object_type='circuits.circuitgroup', field_name='termination', is_generic=True), + 'termination_circuit_group_assignment': RefInfo(object_type='circuits.circuitgroupassignment', field_name='termination', is_generic=True), + 'termination_circuit_termination': RefInfo(object_type='circuits.circuittermination', field_name='termination', is_generic=True), + 'termination_circuit_type': RefInfo(object_type='circuits.circuittype', field_name='termination', is_generic=True), + 'termination_cluster': RefInfo(object_type='virtualization.cluster', field_name='termination', is_generic=True), + 'termination_cluster_group': RefInfo(object_type='virtualization.clustergroup', field_name='termination', is_generic=True), + 'termination_cluster_type': RefInfo(object_type='virtualization.clustertype', field_name='termination', is_generic=True), + 'termination_console_port': RefInfo(object_type='dcim.consoleport', field_name='termination', is_generic=True), + 'termination_console_server_port': RefInfo(object_type='dcim.consoleserverport', field_name='termination', is_generic=True), + 'termination_contact': RefInfo(object_type='tenancy.contact', field_name='termination', is_generic=True), + 'termination_contact_assignment': RefInfo(object_type='tenancy.contactassignment', field_name='termination', is_generic=True), + 'termination_contact_group': RefInfo(object_type='tenancy.contactgroup', field_name='termination', is_generic=True), + 'termination_contact_role': RefInfo(object_type='tenancy.contactrole', field_name='termination', is_generic=True), + 'termination_device': RefInfo(object_type='dcim.device', field_name='termination', is_generic=True), + 'termination_device_bay': RefInfo(object_type='dcim.devicebay', field_name='termination', is_generic=True), + 'termination_device_role': RefInfo(object_type='dcim.devicerole', field_name='termination', is_generic=True), + 'termination_device_type': RefInfo(object_type='dcim.devicetype', field_name='termination', is_generic=True), + 'termination_fhrp_group': RefInfo(object_type='ipam.fhrpgroup', field_name='termination', is_generic=True), + 'termination_fhrp_group_assignment': RefInfo(object_type='ipam.fhrpgroupassignment', field_name='termination', is_generic=True), + 'termination_front_port': RefInfo(object_type='dcim.frontport', field_name='termination', is_generic=True), + 'termination_ike_policy': RefInfo(object_type='vpn.ikepolicy', field_name='termination', is_generic=True), + 'termination_ike_proposal': RefInfo(object_type='vpn.ikeproposal', field_name='termination', is_generic=True), + 'termination_ip_address': RefInfo(object_type='ipam.ipaddress', field_name='termination', is_generic=True), + 'termination_ip_range': RefInfo(object_type='ipam.iprange', field_name='termination', is_generic=True), + 'termination_ip_sec_policy': RefInfo(object_type='vpn.ipsecpolicy', field_name='termination', is_generic=True), + 'termination_ip_sec_profile': RefInfo(object_type='vpn.ipsecprofile', field_name='termination', is_generic=True), + 'termination_ip_sec_proposal': RefInfo(object_type='vpn.ipsecproposal', field_name='termination', is_generic=True), + 'termination_interface': RefInfo(object_type='dcim.interface', field_name='termination', is_generic=True), + 'termination_inventory_item': RefInfo(object_type='dcim.inventoryitem', field_name='termination', is_generic=True), + 'termination_inventory_item_role': RefInfo(object_type='dcim.inventoryitemrole', field_name='termination', is_generic=True), + 'termination_l2vpn': RefInfo(object_type='vpn.l2vpn', field_name='termination', is_generic=True), + 'termination_l2vpn_termination': RefInfo(object_type='vpn.l2vpntermination', field_name='termination', is_generic=True), + 'termination_location': RefInfo(object_type='dcim.location', field_name='termination', is_generic=True), + 'termination_mac_address': RefInfo(object_type='dcim.macaddress', field_name='termination', is_generic=True), + 'termination_manufacturer': RefInfo(object_type='dcim.manufacturer', field_name='termination', is_generic=True), + 'termination_module': RefInfo(object_type='dcim.module', field_name='termination', is_generic=True), + 'termination_module_bay': RefInfo(object_type='dcim.modulebay', field_name='termination', is_generic=True), + 'termination_module_type': RefInfo(object_type='dcim.moduletype', field_name='termination', is_generic=True), + 'termination_platform': RefInfo(object_type='dcim.platform', field_name='termination', is_generic=True), + 'termination_power_feed': RefInfo(object_type='dcim.powerfeed', field_name='termination', is_generic=True), + 'termination_power_outlet': RefInfo(object_type='dcim.poweroutlet', field_name='termination', is_generic=True), + 'termination_power_panel': RefInfo(object_type='dcim.powerpanel', field_name='termination', is_generic=True), + 'termination_power_port': RefInfo(object_type='dcim.powerport', field_name='termination', is_generic=True), + 'termination_prefix': RefInfo(object_type='ipam.prefix', field_name='termination', is_generic=True), + 'termination_provider': RefInfo(object_type='circuits.provider', field_name='termination', is_generic=True), + 'termination_provider_account': RefInfo(object_type='circuits.provideraccount', field_name='termination', is_generic=True), + 'termination_provider_network': RefInfo(object_type='circuits.providernetwork', field_name='termination', is_generic=True), + 'termination_rir': RefInfo(object_type='ipam.rir', field_name='termination', is_generic=True), + 'termination_rack': RefInfo(object_type='dcim.rack', field_name='termination', is_generic=True), + 'termination_rack_reservation': RefInfo(object_type='dcim.rackreservation', field_name='termination', is_generic=True), + 'termination_rack_role': RefInfo(object_type='dcim.rackrole', field_name='termination', is_generic=True), + 'termination_rack_type': RefInfo(object_type='dcim.racktype', field_name='termination', is_generic=True), + 'termination_rear_port': RefInfo(object_type='dcim.rearport', field_name='termination', is_generic=True), + 'termination_region': RefInfo(object_type='dcim.region', field_name='termination', is_generic=True), + 'termination_role': RefInfo(object_type='ipam.role', field_name='termination', is_generic=True), + 'termination_route_target': RefInfo(object_type='ipam.routetarget', field_name='termination', is_generic=True), + 'termination_service': RefInfo(object_type='ipam.service', field_name='termination', is_generic=True), + 'termination_site': RefInfo(object_type='dcim.site', field_name='termination', is_generic=True), + 'termination_site_group': RefInfo(object_type='dcim.sitegroup', field_name='termination', is_generic=True), + 'termination_tag': RefInfo(object_type='extras.tag', field_name='termination', is_generic=True), + 'termination_tenant': RefInfo(object_type='tenancy.tenant', field_name='termination', is_generic=True), + 'termination_tenant_group': RefInfo(object_type='tenancy.tenantgroup', field_name='termination', is_generic=True), + 'termination_tunnel': RefInfo(object_type='vpn.tunnel', field_name='termination', is_generic=True), + 'termination_tunnel_group': RefInfo(object_type='vpn.tunnelgroup', field_name='termination', is_generic=True), + 'termination_tunnel_termination': RefInfo(object_type='vpn.tunneltermination', field_name='termination', is_generic=True), + 'termination_vlan': RefInfo(object_type='ipam.vlan', field_name='termination', is_generic=True), + 'termination_vlan_group': RefInfo(object_type='ipam.vlangroup', field_name='termination', is_generic=True), + 'termination_vlan_translation_policy': RefInfo(object_type='ipam.vlantranslationpolicy', field_name='termination', is_generic=True), + 'termination_vlan_translation_rule': RefInfo(object_type='ipam.vlantranslationrule', field_name='termination', is_generic=True), + 'termination_vm_interface': RefInfo(object_type='virtualization.vminterface', field_name='termination', is_generic=True), + 'termination_vrf': RefInfo(object_type='ipam.vrf', field_name='termination', is_generic=True), + 'termination_virtual_chassis': RefInfo(object_type='dcim.virtualchassis', field_name='termination', is_generic=True), + 'termination_virtual_circuit': RefInfo(object_type='circuits.virtualcircuit', field_name='termination', is_generic=True), + 'termination_virtual_circuit_termination': RefInfo(object_type='circuits.virtualcircuittermination', field_name='termination', is_generic=True), + 'termination_virtual_circuit_type': RefInfo(object_type='circuits.virtualcircuittype', field_name='termination', is_generic=True), + 'termination_virtual_device_context': RefInfo(object_type='dcim.virtualdevicecontext', field_name='termination', is_generic=True), + 'termination_virtual_disk': RefInfo(object_type='virtualization.virtualdisk', field_name='termination', is_generic=True), + 'termination_virtual_machine': RefInfo(object_type='virtualization.virtualmachine', field_name='termination', is_generic=True), + 'termination_wireless_lan': RefInfo(object_type='wireless.wirelesslan', field_name='termination', is_generic=True), + 'termination_wireless_lan_group': RefInfo(object_type='wireless.wirelesslangroup', field_name='termination', is_generic=True), + 'termination_wireless_link': RefInfo(object_type='wireless.wirelesslink', field_name='termination', is_generic=True), 'tunnel': RefInfo(object_type='vpn.tunnel', field_name='tunnel'), }, 'wireless.wirelesslan': { 'group': RefInfo(object_type='wireless.wirelesslangroup', field_name='group'), - 'scopeLocation': RefInfo(object_type='dcim.location', field_name='scope', is_generic=True), - 'scopeRegion': RefInfo(object_type='dcim.region', field_name='scope', is_generic=True), - 'scopeSite': RefInfo(object_type='dcim.site', field_name='scope', is_generic=True), - 'scopeSiteGroup': RefInfo(object_type='dcim.sitegroup', field_name='scope', is_generic=True), + 'scope_location': RefInfo(object_type='dcim.location', field_name='scope', is_generic=True), + 'scope_region': RefInfo(object_type='dcim.region', field_name='scope', is_generic=True), + 'scope_site': RefInfo(object_type='dcim.site', field_name='scope', is_generic=True), + 'scope_site_group': RefInfo(object_type='dcim.sitegroup', field_name='scope', is_generic=True), 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), 'vlan': RefInfo(object_type='ipam.vlan', field_name='vlan'), @@ -872,8 +872,8 @@ class RefInfo: 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), }, 'wireless.wirelesslink': { - 'interfaceA': RefInfo(object_type='dcim.interface', field_name='interface_a'), - 'interfaceB': RefInfo(object_type='dcim.interface', field_name='interface_b'), + 'interface_a': RefInfo(object_type='dcim.interface', field_name='interface_a'), + 'interface_b': RefInfo(object_type='dcim.interface', field_name='interface_b'), 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), }, diff --git a/netbox_diode_plugin/api/transformer.py b/netbox_diode_plugin/api/transformer.py index 59949e2..10234f0 100644 --- a/netbox_diode_plugin/api/transformer.py +++ b/netbox_diode_plugin/api/transformer.py @@ -18,7 +18,7 @@ from .common import AutoSlug, ChangeSetException, UnresolvedReference from .matcher import find_existing_object, fingerprint -from .plugin_utils import CUSTOM_FIELD_OBJECT_REFERENCE_TYPE, get_json_ref_info, get_primary_value +from .plugin_utils import CUSTOM_FIELD_OBJECT_REFERENCE_TYPE, get_json_ref_info, get_primary_value, legal_fields logger = logging.getLogger("netbox.diode_data") @@ -28,7 +28,6 @@ def _camel_to_snake_case(name): name = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", name) return re.sub("([a-z0-9])([A-Z])", r"\1_\2", name).lower() - # these are implied values pushed down to referenced objects. _NESTED_CONTEXT = { "dcim.interface": { @@ -104,6 +103,9 @@ def _transform_proto_json_1(proto_json: dict, object_type: str, context=None) -> "_refs": set(), } + # handle camelCase protoJSON if provided... + proto_json = _ensure_snake_case(proto_json, object_type) + # context pushed down from parent nodes if context is not None: for k, v in context.items(): @@ -115,7 +117,7 @@ def _transform_proto_json_1(proto_json: dict, object_type: str, context=None) -> post_create = None # special handling for custom fields - custom_fields = dict.pop(proto_json, "customFields", {}) + custom_fields = dict.pop(proto_json, "custom_fields", {}) if custom_fields: custom_fields, custom_fields_refs, nested = _prepare_custom_fields(object_type, custom_fields) node['custom_fields'] = custom_fields @@ -125,7 +127,7 @@ def _transform_proto_json_1(proto_json: dict, object_type: str, context=None) -> for key, value in proto_json.items(): ref_info = get_json_ref_info(object_type, key) if ref_info is None: - node[_camel_to_snake_case(key)] = copy.deepcopy(value) + node[key] = copy.deepcopy(value) continue nested_context = _nested_context(object_type, uuid, ref_info.field_name) @@ -181,6 +183,22 @@ def _transform_proto_json_1(proto_json: dict, object_type: str, context=None) -> return nodes +def _ensure_snake_case(proto_json: dict, object_type: str) -> dict: + fields = legal_fields(object_type) + out = {} + for k, v in proto_json.items(): + if k in fields or get_json_ref_info(object_type, k): + out[k] = v + continue + snake_key = _camel_to_snake_case(k) + if snake_key in fields or get_json_ref_info(object_type, snake_key): + out[snake_key] = v + else: + # error? + logger.warning(f"Unknown field {k}/{snake_key} is not legal for {object_type}, skipping...") + return out + + def _topo_sort(entities: list[dict]) -> list[dict]: """Topologically sort entities by reference.""" by_uuid = {e['_uuid']: e for e in entities} diff --git a/netbox_diode_plugin/api/views.py b/netbox_diode_plugin/api/views.py index ce5c879..d286175 100644 --- a/netbox_diode_plugin/api/views.py +++ b/netbox_diode_plugin/api/views.py @@ -30,13 +30,19 @@ ) -def get_entity_key(model_name): - """Get the entity key for a model name.""" +def get_valid_entity_keys(model_name): + """ + Get the valid entity keys for a model name. + + This can be snake or lowerCamel case (both are valid for protoJSON) + """ s = re.sub(r'([A-Z0-9]{2,})([A-Z])([a-z])', r'\1_\2\3', model_name) s = re.sub(r'([a-z])([A-Z])', r'\1_\2', s) - s = re.sub(r'_+', '_', s.lower()) # snake - s = ''.join([word.capitalize() for word in s.split("_")]) # upperCamelCase - return s[0].lower() + s[1:] # lowerCamelCase + snake = re.sub(r'_+', '_', s.lower()) # snake + upperCamel = ''.join([word.capitalize() for word in snake.split("_")]) # upperCamelCase + lowerCamel = upperCamel[0].lower() + upperCamel[1:] # lowerCamelCase + + return (snake, lowerCamel) class GenerateDiffView(views.APIView): @@ -65,9 +71,10 @@ def _post(self, request, *args, **kwargs): app_label, model_name = object_type.split(".") model_class = apps.get_model(app_label, model_name) - # Convert model name to lowerCamelCase for entity lookup - entity_key = get_entity_key(model_class.__name__) - original_entity_data = entity.get(entity_key) + for entity_key in get_valid_entity_keys(model_class.__name__): + original_entity_data = entity.get(entity_key) + if original_entity_data: + break if original_entity_data is None: raise ValidationError( diff --git a/netbox_diode_plugin/tests/test_api_diff_and_apply.py b/netbox_diode_plugin/tests/test_api_diff_and_apply.py index a0814cb..6303735 100644 --- a/netbox_diode_plugin/tests/test_api_diff_and_apply.py +++ b/netbox_diode_plugin/tests/test_api_diff_and_apply.py @@ -89,7 +89,7 @@ def test_generate_diff_and_apply_create_interface_with_tags(self): "type": "1000base-t", "device": { "name": f"Device {uuid4()}", - "deviceType": { + "device_type": { "model": f"Device Type {uuid4()}", "manufacturer": { "name": f"Manufacturer {uuid4()}" @@ -125,7 +125,7 @@ def test_generate_diff_and_apply_create_and_update_device_role(self): "entity": { "device": { "name": f"Device {device_uuid}", - "deviceType": { + "device_type": { "model": f"Device Type {uuid4()}", "manufacturer": { "name": f"Manufacturer {uuid4()}" @@ -298,14 +298,14 @@ def test_generate_diff_and_apply_create_interface_with_primay_mac_address(self): "site": { "Name": f"Site {uuid4()}", }, - "deviceType": { + "device_type": { "manufacturer": { "Name": f"Manufacturer {uuid4()}", }, "model": f"Device Type {uuid4()}", }, }, - "primaryMacAddress": { + "primary_mac_address": { "mac_address": "00:00:00:00:00:01", }, }, @@ -316,8 +316,8 @@ def test_generate_diff_and_apply_create_interface_with_primay_mac_address(self): new_interface = Interface.objects.get(name=f"Interface {interface_uuid}") self.assertEqual(new_interface.primary_mac_address.mac_address, "00:00:00:00:00:01") - def test_generate_diff_and_apply_create_device_with_primary_ip4(self): - """Test generate diff and apply create device with primary ip4.""" + def test_generate_diff_and_apply_create_device_with_primary_ip4_camel_case(self): + """Test generate diff and apply create device with primary ip4 (camel case).""" device_uuid = str(uuid4()) interface_uuid = str(uuid4()) addr = "192.168.1.1" @@ -359,6 +359,49 @@ def test_generate_diff_and_apply_create_device_with_primary_ip4(self): device = Device.objects.get(name=f"Device {device_uuid}") self.assertEqual(device.primary_ip4.pk, new_ipaddress.pk) + def test_generate_diff_and_apply_create_device_with_primary_ip4(self): + """Test generate diff and apply create device with primary ip4.""" + device_uuid = str(uuid4()) + interface_uuid = str(uuid4()) + addr = "192.168.1.1" + payload = { + "timestamp": 1, + "object_type": "ipam.ipaddress", + "entity": { + "ip_address": { + "address": addr, + "assigned_object_interface": { + "name": f"Interface {interface_uuid}", + "type": "1000base-t", + "device": { + "name": f"Device {device_uuid}", + "role": { + "name": f"Role {uuid4()}", + }, + "site": { + "name": f"Site {uuid4()}", + }, + "device_type": { + "manufacturer": { + "name": f"Manufacturer {uuid4()}", + }, + "model": f"Device Type {uuid4()}", + }, + "primary_ip4": { + "address": addr, + }, + }, + }, + }, + }, + } + + _, response = self.diff_and_apply(payload) + new_ipaddress = IPAddress.objects.get(address=addr) + self.assertEqual(new_ipaddress.assigned_object.name, f"Interface {interface_uuid}") + device = Device.objects.get(name=f"Device {device_uuid}") + self.assertEqual(device.primary_ip4.pk, new_ipaddress.pk) + def test_generate_diff_and_apply_create_and_update_site_with_custom_field(self): """Test generate diff and apply create and update site with custom field.""" site_uuid = str(uuid4()) @@ -369,7 +412,7 @@ def test_generate_diff_and_apply_create_and_update_site_with_custom_field(self): "site": { "name": "A New Custom Site", "slug": "a-new-custom-site", - "customFields": { + "custom_fields": { "myuuid": { "text": site_uuid, }, @@ -392,7 +435,7 @@ def test_generate_diff_and_apply_create_and_update_site_with_custom_field(self): "entity": { "site": { "comments": "An updated comment", - "customFields": { + "custom_fields": { "myuuid": { "text": site_uuid, }, @@ -422,7 +465,7 @@ def test_generate_diff_and_apply_create_and_update_site_with_custom_field(self): "object_type": "dcim.site", "entity": { "site": { - "customFields": { + "custom_fields": { "myuuid": { "text": site_uuid, }, @@ -449,7 +492,7 @@ def test_generate_diff_and_apply_create_and_update_site_with_custom_field(self): "object_type": "dcim.site", "entity": { "site": { - "customFields": { + "custom_fields": { "myuuid": { "text": site_uuid, }, @@ -479,7 +522,7 @@ def test_generate_diff_wrong_type_date(self): "site": { "name": "Site Generate Diff 1", "slug": "site-generate-diff-1", - "customFields": { + "custom_fields": { "mydate": { "date": 12, }, diff --git a/netbox_diode_plugin/tests/test_api_generate_diff.py b/netbox_diode_plugin/tests/test_api_generate_diff.py index d7b1dd2..0c84fd9 100644 --- a/netbox_diode_plugin/tests/test_api_generate_diff.py +++ b/netbox_diode_plugin/tests/test_api_generate_diff.py @@ -112,7 +112,7 @@ def test_generate_diff_create_site_with_custom_field(self): "site": { "name": "A New Site", "slug": "a-new-site", - "customFields": { + "custom_fields": { "some_json": { "json": '{"some_key": 1234567890}', }, @@ -182,7 +182,7 @@ def test_match_site_by_custom_field(self): # but we expect to match the existing site by the # unique custom field myuuid "comments": "A custom comment", - "customFields": { + "custom_fields": { "myuuid": { "text": self.site_uuid, }, @@ -217,7 +217,7 @@ def test_generate_diff_update_rack_type_autoslug(self): "timestamp": 1, "object_type": "dcim.racktype", "entity": { - "rackType": { + "rack_type": { "model": "Rack Type 1", "form_factor": "wall-frame", }, @@ -246,6 +246,38 @@ def test_generate_diff_update_rack_type_autoslug(self): # correct slug is present in before data self.assertEqual(before.get("slug"), "rack-type-1") + def test_generate_diff_update_rack_type_camel_case(self): + """Test generate diff update rack type with came cased protoJSON.""" + payload = { + "timestamp": 1, + "object_type": "dcim.racktype", + "entity": { + "rackType": { + "slug": "rack-type-1", + "model": "Rack Type 1", + "formFactor": "wall-frame", + }, + } + } + + response = self.send_request(payload) + self.assertEqual(response.status_code, status.HTTP_200_OK) + cs = response.json().get("change_set", {}) + self.assertIsNotNone(cs.get("id")) + changes = cs.get("changes", []) + self.assertEqual(len(changes), 1) + change = changes[0] + self.assertEqual(change.get("object_type"), "dcim.racktype") + self.assertEqual(change.get("change_type"), "update") + self.assertEqual(change.get("object_id"), self.rack_type.id) + self.assertEqual(change.get("ref_id"), None) + + data = change.get("data", {}) + self.assertEqual(data.get("model"), "Rack Type 1") + self.assertEqual(data.get("form_factor"), "wall-frame") + + before = change.get("before", {}) + self.assertEqual(before.get("model"), "Rack Type 1") def send_request(self, payload, status_code=status.HTTP_200_OK): """Post the payload to the url and return the response.""" From 53158b2fdb25f2fc21562e0ab8d89f83bc57d863 Mon Sep 17 00:00:00 2001 From: Luke Tucker <64618+ltucker@users.noreply.github.com> Date: Sun, 13 Apr 2025 10:54:35 -0400 Subject: [PATCH 23/52] fix: add special format transformations (#80) special transformations to format inputs the way serializers expect adds handling for date only fields, decimal (vs float) and integer range --- netbox_diode_plugin/api/applier.py | 18 +- netbox_diode_plugin/api/common.py | 17 ++ netbox_diode_plugin/api/differ.py | 49 +++-- netbox_diode_plugin/api/plugin_utils.py | 201 +++++++++++++++++- netbox_diode_plugin/api/transformer.py | 10 +- .../tests/test_api_diff_and_apply.py | 100 ++++++++- 6 files changed, 352 insertions(+), 43 deletions(-) diff --git a/netbox_diode_plugin/api/applier.py b/netbox_diode_plugin/api/applier.py index 3f1b041..0267302 100644 --- a/netbox_diode_plugin/api/applier.py +++ b/netbox_diode_plugin/api/applier.py @@ -11,7 +11,7 @@ from django.db import models from rest_framework.exceptions import ValidationError as ValidationError -from .common import NON_FIELD_ERRORS, Change, ChangeSet, ChangeSetException, ChangeSetResult, ChangeType +from .common import NON_FIELD_ERRORS, Change, ChangeSet, ChangeSetException, ChangeSetResult, ChangeType, error_from_validation_error from .plugin_utils import get_object_type_model, legal_fields from .supported_models import get_serializer_for_model @@ -35,7 +35,7 @@ def apply_changeset(change_set: ChangeSet, request) -> ChangeSetResult: data = _pre_apply(model_class, change, created) _apply_change(data, model_class, change, created, request) except ValidationError as e: - raise _err_from_validation_error(e, object_type) + raise error_from_validation_error(e, object_type) except ObjectDoesNotExist: raise _err(f"{object_type} with id {change.object_id} does not exist", object_type, "object_id") except TypeError as e: @@ -129,17 +129,3 @@ def _err(message, object_name, field): object_name = "__all__" return ChangeSetException(message, errors={object_name: {field: [message]}}) -def _err_from_validation_error(e, object_name): - errors = {} - if e.detail: - if isinstance(e.detail, dict): - errors[object_name] = e.detail - elif isinstance(e.detail, (list, tuple)): - errors[object_name] = { - NON_FIELD_ERRORS: e.detail - } - else: - errors[object_name] = { - NON_FIELD_ERRORS: [e.detail] - } - return ChangeSetException("validation error", errors=errors) diff --git a/netbox_diode_plugin/api/common.py b/netbox_diode_plugin/api/common.py index 65a9a1f..8c735a9 100644 --- a/netbox_diode_plugin/api/common.py +++ b/netbox_diode_plugin/api/common.py @@ -235,3 +235,20 @@ class AutoSlug: field_name: str value: str + + +def error_from_validation_error(e, object_name): + """Convert a drf ValidationError to a ChangeSetException.""" + errors = {} + if e.detail: + if isinstance(e.detail, dict): + errors[object_name] = e.detail + elif isinstance(e.detail, (list, tuple)): + errors[object_name] = { + NON_FIELD_ERRORS: e.detail + } + else: + errors[object_name] = { + NON_FIELD_ERRORS: [e.detail] + } + return ChangeSetException("validation error", errors=errors) diff --git a/netbox_diode_plugin/api/differ.py b/netbox_diode_plugin/api/differ.py index 026f3c9..a3121c0 100644 --- a/netbox_diode_plugin/api/differ.py +++ b/netbox_diode_plugin/api/differ.py @@ -9,8 +9,9 @@ from django.contrib.contenttypes.models import ContentType from django.core.exceptions import ValidationError from utilities.data import shallow_compare_dict +from django.db.backends.postgresql.psycopg_any import NumericRange -from .common import Change, ChangeSet, ChangeSetException, ChangeSetResult, ChangeType, UnresolvedReference +from .common import Change, ChangeSet, ChangeSetException, ChangeSetResult, ChangeType, error_from_validation_error from .plugin_utils import get_primary_value, legal_fields from .supported_models import extract_supported_models from .transformer import cleanup_unresolved_references, set_custom_field_defaults, transform_proto_json @@ -78,29 +79,23 @@ def prechange_data_from_instance(instance) -> dict: # noqa: C901 else: cfmap[cf.name] = cf.serialize(value) prechange_data["custom_fields"] = cfmap - + prechange_data = _harmonize_formats(prechange_data) return prechange_data -def _harmonize_formats(prechange_data: dict, postchange_data: dict): - for k, v in prechange_data.items(): - if k.startswith('_'): - continue - if isinstance(v, datetime.datetime): - prechange_data[k] = v.strftime("%Y-%m-%dT%H:%M:%SZ") - elif isinstance(v, datetime.date): - prechange_data[k] = v.strftime("%Y-%m-%d") - elif isinstance(v, int) and k in postchange_data: - val = postchange_data[k] - if isinstance(val, UnresolvedReference): - continue - try: - postchange_data[k] = int(val) - except Exception: - continue - elif isinstance(v, dict): - _harmonize_formats(v, postchange_data.get(k, {})) +def _harmonize_formats(prechange_data): + if isinstance(prechange_data, dict): + return {k: _harmonize_formats(v) for k, v in prechange_data.items()} + if isinstance(prechange_data, (list, tuple)): + return [_harmonize_formats(v) for v in prechange_data] + if isinstance(prechange_data, datetime.datetime): + return prechange_data.strftime("%Y-%m-%dT%H:%M:%SZ") + if isinstance(prechange_data, datetime.date): + return prechange_data.strftime("%Y-%m-%d") + if isinstance(prechange_data, NumericRange): + return (prechange_data.lower, prechange_data.upper-1) + return prechange_data def clean_diff_data(data: dict, exclude_empty_values: bool = True) -> dict: """Clean diff data by removing null values.""" @@ -170,8 +165,19 @@ def sort_dict_recursively(d): return sorted([sort_dict_recursively(item) for item in d], key=str) return d - def generate_changeset(entity: dict, object_type: str) -> ChangeSetResult: + """Generate a changeset for an entity.""" + try: + return _generate_changeset(entity, object_type) + except ChangeSetException: + raise + except ValidationError as e: + raise error_from_validation_error(e, object_type) + except Exception as e: + logger.error(f"Unexpected error generating changeset: {e}") + raise + +def _generate_changeset(entity: dict, object_type: str) -> ChangeSetResult: """Generate a changeset for an entity.""" change_set = ChangeSet() @@ -196,7 +202,6 @@ def generate_changeset(entity: dict, object_type: str) -> ChangeSetResult: # this is also important for custom fields because they do not appear to # respsect paritial update serialization. entity = _partially_merge(prechange_data, entity, instance) - _harmonize_formats(prechange_data, entity) changed_data = shallow_compare_dict( prechange_data, entity, ) diff --git a/netbox_diode_plugin/api/plugin_utils.py b/netbox_diode_plugin/api/plugin_utils.py index 9a08f33..d85038c 100644 --- a/netbox_diode_plugin/api/plugin_utils.py +++ b/netbox_diode_plugin/api/plugin_utils.py @@ -1,16 +1,20 @@ """Diode plugin helpers.""" # Generated code. DO NOT EDIT. -# Timestamp: 2025-04-12 15:25:46Z +# Timestamp: 2025-04-13 13:20:10Z from dataclasses import dataclass +import datetime +import decimal from functools import lru_cache +import logging from typing import Type from core.models import ObjectType as NetBoxType from django.contrib.contenttypes.models import ContentType from django.db import models +logger = logging.getLogger(__name__) @lru_cache(maxsize=256) def get_object_type_model(object_type: str) -> Type[models.Model]: @@ -995,4 +999,197 @@ def legal_fields(object_type: str|Type[models.Model]) -> frozenset[str]: def get_primary_value(data: dict, object_type: str) -> str|None: field = _OBJECT_TYPE_PRIMARY_VALUE_FIELD_MAP.get(object_type, 'name') - return data.get(field) \ No newline at end of file + return data.get(field) + + +def transform_timestamp_to_date_only(value: str) -> str: + return datetime.datetime.fromisoformat(value).strftime('%Y-%m-%d') + +def transform_float_to_decimal(value: float) -> decimal.Decimal: + try: + return decimal.Decimal(str(value)) + except decimal.InvalidOperation: + raise ValueError(f'Invalid decimal value: {value}') + +def int_from_int64string(value: str) -> int: + return int(value) + +def collect_integer_pairs(value: list[int]) -> list[tuple[int, int]]: + if len(value) % 2 != 0: + raise ValueError('Array must have an even number of elements') + return [(value[i], value[i+1]) for i in range(0, len(value), 2)] + +def for_all(transform): + def wrapper(value): + if isinstance(value, list): + return [transform(v) for v in value] + return transform(value) + return wrapper + +_FORMAT_TRANSFORMATIONS = { + 'circuits.circuit': { + 'commit_rate': int_from_int64string, + 'distance': transform_float_to_decimal, + 'install_date': transform_timestamp_to_date_only, + 'termination_date': transform_timestamp_to_date_only, + }, + 'circuits.circuittermination': { + 'port_speed': int_from_int64string, + 'upstream_speed': int_from_int64string, + }, + 'dcim.cable': { + 'length': transform_float_to_decimal, + }, + 'dcim.consoleport': { + 'speed': int_from_int64string, + }, + 'dcim.consoleserverport': { + 'speed': int_from_int64string, + }, + 'dcim.device': { + 'latitude': transform_float_to_decimal, + 'longitude': transform_float_to_decimal, + 'position': transform_float_to_decimal, + 'vc_position': int_from_int64string, + 'vc_priority': int_from_int64string, + }, + 'dcim.devicetype': { + 'u_height': transform_float_to_decimal, + 'weight': transform_float_to_decimal, + }, + 'dcim.frontport': { + 'rear_port_position': int_from_int64string, + }, + 'dcim.interface': { + 'mtu': int_from_int64string, + 'rf_channel_frequency': transform_float_to_decimal, + 'rf_channel_width': transform_float_to_decimal, + 'speed': int_from_int64string, + 'tx_power': int_from_int64string, + }, + 'dcim.moduletype': { + 'weight': transform_float_to_decimal, + }, + 'dcim.powerfeed': { + 'amperage': int_from_int64string, + 'max_utilization': int_from_int64string, + 'voltage': int_from_int64string, + }, + 'dcim.powerport': { + 'allocated_draw': int_from_int64string, + 'maximum_draw': int_from_int64string, + }, + 'dcim.rack': { + 'max_weight': int_from_int64string, + 'mounting_depth': int_from_int64string, + 'outer_depth': int_from_int64string, + 'outer_width': int_from_int64string, + 'starting_unit': int_from_int64string, + 'u_height': int_from_int64string, + 'weight': transform_float_to_decimal, + 'width': int_from_int64string, + }, + 'dcim.rackreservation': { + 'units': for_all(int_from_int64string), + }, + 'dcim.racktype': { + 'max_weight': int_from_int64string, + 'mounting_depth': int_from_int64string, + 'outer_depth': int_from_int64string, + 'outer_width': int_from_int64string, + 'starting_unit': int_from_int64string, + 'u_height': int_from_int64string, + 'weight': transform_float_to_decimal, + 'width': int_from_int64string, + }, + 'dcim.rearport': { + 'positions': int_from_int64string, + }, + 'dcim.site': { + 'latitude': transform_float_to_decimal, + 'longitude': transform_float_to_decimal, + }, + 'dcim.virtualdevicecontext': { + 'identifier': int_from_int64string, + }, + 'ipam.aggregate': { + 'date_added': transform_timestamp_to_date_only, + }, + 'ipam.asn': { + 'asn': int_from_int64string, + }, + 'ipam.asnrange': { + 'end': int_from_int64string, + 'start': int_from_int64string, + }, + 'ipam.fhrpgroup': { + 'group_id': int_from_int64string, + }, + 'ipam.fhrpgroupassignment': { + 'priority': int_from_int64string, + }, + 'ipam.role': { + 'weight': int_from_int64string, + }, + 'ipam.service': { + 'ports': for_all(int_from_int64string), + }, + 'ipam.vlan': { + 'vid': int_from_int64string, + }, + 'ipam.vlangroup': { + 'vid_ranges': collect_integer_pairs, + }, + 'ipam.vlantranslationrule': { + 'local_vid': int_from_int64string, + 'remote_vid': int_from_int64string, + }, + 'virtualization.virtualdisk': { + 'size': int_from_int64string, + }, + 'virtualization.virtualmachine': { + 'disk': int_from_int64string, + 'memory': int_from_int64string, + 'vcpus': transform_float_to_decimal, + }, + 'virtualization.vminterface': { + 'mtu': int_from_int64string, + }, + 'vpn.ikepolicy': { + 'version': int_from_int64string, + }, + 'vpn.ikeproposal': { + 'group': int_from_int64string, + 'sa_lifetime': int_from_int64string, + }, + 'vpn.ipsecpolicy': { + 'pfs_group': int_from_int64string, + }, + 'vpn.ipsecproposal': { + 'sa_lifetime_data': int_from_int64string, + 'sa_lifetime_seconds': int_from_int64string, + }, + 'vpn.l2vpn': { + 'identifier': int_from_int64string, + }, + 'vpn.tunnel': { + 'tunnel_id': int_from_int64string, + }, + 'wireless.wirelesslink': { + 'distance': transform_float_to_decimal, + }, +} + +def apply_format_transformations(data: dict, object_type: str): + for key, transform in _FORMAT_TRANSFORMATIONS.get(object_type, {}).items(): + val = data.get(key, None) + if val is None: + continue + try: + data[key] = transform(val) + except ValidationError: + raise + except ValueError as e: + raise ValidationError(f'Invalid value {val} for field {key} in {object_type}: {e}') + except Exception as e: + raise ValidationError(f'Invalid value {val} for field {key} in {object_type}') \ No newline at end of file diff --git a/netbox_diode_plugin/api/transformer.py b/netbox_diode_plugin/api/transformer.py index 10234f0..830e5a7 100644 --- a/netbox_diode_plugin/api/transformer.py +++ b/netbox_diode_plugin/api/transformer.py @@ -18,7 +18,13 @@ from .common import AutoSlug, ChangeSetException, UnresolvedReference from .matcher import find_existing_object, fingerprint -from .plugin_utils import CUSTOM_FIELD_OBJECT_REFERENCE_TYPE, get_json_ref_info, get_primary_value, legal_fields +from .plugin_utils import ( + CUSTOM_FIELD_OBJECT_REFERENCE_TYPE, + apply_format_transformations, + get_json_ref_info, + get_primary_value, + legal_fields, +) logger = logging.getLogger("netbox.diode_data") @@ -72,6 +78,7 @@ def transform_proto_json(proto_json: dict, object_type: str, supported_models: d """ entities = _transform_proto_json_1(proto_json, object_type) logger.debug(f"_transform_proto_json_1 entities: {json.dumps(entities, default=lambda o: str(o), indent=4)}") + entities = _topo_sort(entities) logger.debug(f"_topo_sort: {json.dumps(entities, default=lambda o: str(o), indent=4)}") deduplicated = _fingerprint_dedupe(entities) @@ -105,6 +112,7 @@ def _transform_proto_json_1(proto_json: dict, object_type: str, context=None) -> # handle camelCase protoJSON if provided... proto_json = _ensure_snake_case(proto_json, object_type) + apply_format_transformations(proto_json, object_type) # context pushed down from parent nodes if context is not None: diff --git a/netbox_diode_plugin/tests/test_api_diff_and_apply.py b/netbox_diode_plugin/tests/test_api_diff_and_apply.py index 6303735..2367309 100644 --- a/netbox_diode_plugin/tests/test_api_diff_and_apply.py +++ b/netbox_diode_plugin/tests/test_api_diff_and_apply.py @@ -3,11 +3,14 @@ """Diode NetBox Plugin - Tests.""" import datetime +import decimal import logging from uuid import uuid4 from core.models import ObjectType from dcim.models import Device, Interface, Site +from ipam.models import VLANGroup +from circuits.models import Circuit from django.contrib.auth import get_user_model from extras.models import CustomField from extras.models.customfields import CustomFieldTypeChoices @@ -72,6 +75,15 @@ def setUp(self): self.date_field.object_types.set([self.object_type]) self.date_field.save() + self.decimal_field = CustomField.objects.create( + name='mydecimal', + type=CustomFieldTypeChoices.TYPE_DECIMAL, + required=False, + unique=False, + ) + self.decimal_field.object_types.set([self.object_type]) + self.decimal_field.save() + def test_generate_diff_and_apply_create_interface_with_tags(self): """Test generate diff and apply create interface with tags.""" interface_uuid = str(uuid4()) @@ -416,6 +428,9 @@ def test_generate_diff_and_apply_create_and_update_site_with_custom_field(self): "myuuid": { "text": site_uuid, }, + "mydecimal": { + "decimal": 1234.567, + }, "some_json": { "json": '{"some_key": 9876543210}', }, @@ -428,6 +443,7 @@ def test_generate_diff_and_apply_create_and_update_site_with_custom_field(self): new_site = Site.objects.get(name="A New Custom Site") self.assertEqual(new_site.custom_field_data[self.uuid_field.name], site_uuid) self.assertEqual(new_site.custom_field_data[self.json_field.name], {"some_key": 9876543210}) + self.assertEqual(new_site.custom_field_data[self.decimal_field.name], 1234.567) payload = { "timestamp": 1, @@ -513,8 +529,52 @@ def test_generate_diff_and_apply_create_and_update_site_with_custom_field(self): diff = response1.json().get("change_set", {}) self.assertEqual(diff.get("changes", []), []) - def test_generate_diff_wrong_type_date(self): - """Test generate diff wrong type date.""" + def test_generate_diff_and_apply_circuit_with_install_date(self): + """Test generate diff and apply circuit with date.""" + circuit_uuid = str(uuid4()) + payload = { + "timestamp": 1, + "object_type": "circuits.circuit", + "entity": { + "circuit": { + "cid": f"Circuit {circuit_uuid}", + "install_date": "2026-01-01T00:00:00Z", + "provider": { + "name": f"Provider {uuid4()}", + }, + "type": { + "name": f"Ciruit Type {uuid4()}", + }, + }, + }, + } + + _, response = self.diff_and_apply(payload) + new_circuit = Circuit.objects.get(cid=f"Circuit {circuit_uuid}") + self.assertEqual(new_circuit.install_date, datetime.date(2026, 1, 1)) + + def test_generate_diff_and_apply_site_with_lat_lon(self): + """Test generate diff and apply site with lat and lon.""" + site_uuid = str(uuid4()) + payload = { + "timestamp": 1, + "object_type": "dcim.site", + "entity": { + "site": { + "name": f"Site {site_uuid}", + "latitude": 23.456, + "longitude": 78.910, + }, + }, + } + + _, response = self.diff_and_apply(payload) + new_site = Site.objects.get(name=f"Site {site_uuid}") + self.assertEqual(new_site.latitude, decimal.Decimal("23.456")) + self.assertEqual(new_site.longitude, decimal.Decimal("78.910")) + + def test_generate_diff_and_apply_wrong_type_date(self): + """Test generate diff and apply wrong type date.""" payload = { "timestamp": 1, "object_type": "dcim.site", @@ -542,6 +602,42 @@ def test_generate_diff_wrong_type_date(self): ) self.assertEqual(response2.status_code, status.HTTP_400_BAD_REQUEST) + def test_generate_diff_and_apply_vlan_group_with_vid_ranges(self): + """Test generate diff and apply vlan group vid ranges.""" + payload = { + "timestamp": 1, + "object_type": "ipam.vlangroup", + "entity": { + "vlan_group": { + "name": "VLAN Group 1", + "vid_ranges": [1,5,10,15], + }, + }, + } + _, response = self.diff_and_apply(payload) + new_vlan_group = VLANGroup.objects.get(name="VLAN Group 1") + self.assertEqual(new_vlan_group.vid_ranges[0].lower, 1) + self.assertEqual(new_vlan_group.vid_ranges[0].upper, 6) + self.assertEqual(new_vlan_group.vid_ranges[1].lower, 10) + self.assertEqual(new_vlan_group.vid_ranges[1].upper, 16) + + payload = { + "timestamp": 1, + "object_type": "ipam.vlangroup", + "entity": { + "vlan_group": { + "name": "VLAN Group 1", + "vid_ranges": [3,9,12,20], + }, + }, + } + _, response = self.diff_and_apply(payload) + new_vlan_group = VLANGroup.objects.get(name="VLAN Group 1") + self.assertEqual(new_vlan_group.vid_ranges[0].lower, 3) + self.assertEqual(new_vlan_group.vid_ranges[0].upper, 10) + self.assertEqual(new_vlan_group.vid_ranges[1].lower, 12) + self.assertEqual(new_vlan_group.vid_ranges[1].upper, 21) + def diff_and_apply(self, payload): """Diff and apply the payload.""" From 9b01b3ecc1b6ddd81b75f2282c0ba7064495a212 Mon Sep 17 00:00:00 2001 From: Luke Tucker <64618+ltucker@users.noreply.github.com> Date: Sun, 13 Apr 2025 14:03:30 -0400 Subject: [PATCH 24/52] fix: special cases for ip network defaulting (#81) * fix: special handling for ip address network defaulting * fix: match ips ignoring mask value, use specific matchers --- netbox_diode_plugin/api/common.py | 2 +- netbox_diode_plugin/api/differ.py | 10 +- netbox_diode_plugin/api/matcher.py | 137 ++++++++++++- netbox_diode_plugin/api/plugin_utils.py | 21 +- .../tests/test_api_diff_and_apply.py | 185 ++++++++++++++++++ 5 files changed, 344 insertions(+), 11 deletions(-) diff --git a/netbox_diode_plugin/api/common.py b/netbox_diode_plugin/api/common.py index 8c735a9..8ea7b1e 100644 --- a/netbox_diode_plugin/api/common.py +++ b/netbox_diode_plugin/api/common.py @@ -238,7 +238,7 @@ class AutoSlug: def error_from_validation_error(e, object_name): - """Convert a drf ValidationError to a ChangeSetException.""" + """Convert a from rest_framework.exceptions.ValidationError to a ChangeSetException.""" errors = {} if e.detail: if isinstance(e.detail, dict): diff --git a/netbox_diode_plugin/api/differ.py b/netbox_diode_plugin/api/differ.py index a3121c0..ca18faf 100644 --- a/netbox_diode_plugin/api/differ.py +++ b/netbox_diode_plugin/api/differ.py @@ -10,6 +10,7 @@ from django.core.exceptions import ValidationError from utilities.data import shallow_compare_dict from django.db.backends.postgresql.psycopg_any import NumericRange +import netaddr from .common import Change, ChangeSet, ChangeSetException, ChangeSetResult, ChangeType, error_from_validation_error from .plugin_utils import get_primary_value, legal_fields @@ -84,6 +85,10 @@ def prechange_data_from_instance(instance) -> dict: # noqa: C901 def _harmonize_formats(prechange_data): + if prechange_data is None: + return None + if isinstance(prechange_data, (str, int, float, bool)): + return prechange_data if isinstance(prechange_data, dict): return {k: _harmonize_formats(v) for k, v in prechange_data.items()} if isinstance(prechange_data, (list, tuple)): @@ -94,8 +99,11 @@ def _harmonize_formats(prechange_data): return prechange_data.strftime("%Y-%m-%d") if isinstance(prechange_data, NumericRange): return (prechange_data.lower, prechange_data.upper-1) + if isinstance(prechange_data, netaddr.IPNetwork): + return str(prechange_data) - return prechange_data + logger.warning(f"Unknown type in prechange_data: {type(prechange_data)}") + return str(prechange_data) def clean_diff_data(data: dict, exclude_empty_values: bool = True) -> dict: """Clean diff data by removing null values.""" diff --git a/netbox_diode_plugin/api/matcher.py b/netbox_diode_plugin/api/matcher.py index 8fe0a1b..c033a26 100644 --- a/netbox_diode_plugin/api/matcher.py +++ b/netbox_diode_plugin/api/matcher.py @@ -17,6 +17,7 @@ from django.db.models.fields import SlugField from django.db.models.lookups import Exact from django.db.models.query_utils import Q +import netaddr from extras.models.customfields import CustomField from .common import AutoSlug, UnresolvedReference @@ -46,17 +47,17 @@ ), ], "ipam.ipaddress": lambda: [ - ObjectMatchCriteria( - fields=("address", ), - name="logical_ip_address_global_no_vrf", + GlobalIPNetworkIPMatcher( + ip_field="address", + vrf_field="vrf", model_class=get_object_type_model("ipam.ipaddress"), - condition=Q(vrf__isnull=True), + name="logical_ip_address_global_no_vrf", ), - ObjectMatchCriteria( - fields=("address", "assigned_object_type", "assigned_object_id"), - name="logical_ip_address_within_vrf", + VRFIPNetworkIPMatcher( + ip_field="address", + vrf_field="vrf", model_class=get_object_type_model("ipam.ipaddress"), - condition=Q(vrf__isnull=False) + name="logical_ip_address_within_vrf", ), ], "ipam.prefix": lambda: [ @@ -271,6 +272,8 @@ def _prepare_data(self, data: dict) -> dict: continue return prepared + + @dataclass class CustomFieldMatcher: """A matcher for a unique custom field.""" @@ -305,6 +308,124 @@ def has_required_fields(self, data: dict) -> bool: """Returns True if the data given contains a value for all fields referenced by the constraint.""" return self.custom_field in data.get("custom_fields", {}) + +@dataclass +class GlobalIPNetworkIPMatcher: + """A matcher that ignores the mask.""" + + ip_field: str + vrf_field: str + model_class: Type[models.Model] + name: str + + def _check_condition(self, data: dict) -> bool: + """Check the condition for the custom field.""" + return data.get(self.vrf_field, None) is None + + def fingerprint(self, data: dict) -> str|None: + """Fingerprint the custom field value.""" + if not self.has_required_fields(data): + return None + + if not self._check_condition(data): + return None + + value = self.ip_value(data) + if value is None: + return None + + return hash((self.model_class.__name__, self.name, value)) + + def has_required_fields(self, data: dict) -> bool: + """Returns True if the data given contains a value for all fields referenced by the constraint.""" + return self.ip_field in data + + def ip_value(self, data: dict) -> str|None: + """Get the IP value from the data.""" + value = data.get(self.ip_field) + if value is None: + return None + return _ip_only(value) + + def build_queryset(self, data: dict) -> models.QuerySet: + """Build a queryset for the custom field.""" + if not self.has_required_fields(data): + return None + + if not self._check_condition(data): + return None + + value = self.ip_value(data) + if value is None: + return None + + return self.model_class.objects.filter(**{f'{self.ip_field}__net_host': value, f'{self.vrf_field}__isnull': True}) + +@dataclass +class VRFIPNetworkIPMatcher: + """Matches ip in a vrf, ignores mask.""" + + ip_field: str + vrf_field: str + model_class: Type[models.Model] + name: str + + def _check_condition(self, data: dict) -> bool: + """Check the condition for the custom field.""" + return data.get('vrf_id', None) is not None + + def fingerprint(self, data: dict) -> str|None: + """Fingerprint the custom field value.""" + if not self.has_required_fields(data): + return None + + if not self._check_condition(data): + return None + + value = self.ip_value(data) + if value is None: + return None + + vrf_id = data[self.vrf_field] + + return hash((self.model_class.__name__, self.name, value, vrf_id)) + + def has_required_fields(self, data: dict) -> bool: + """Returns True if the data given contains a value for all fields referenced by the constraint.""" + return self.ip_field in data and self.vrf_field in data + + def ip_value(self, data: dict) -> str|None: + """Get the IP value from the data.""" + value = data.get(self.ip_field) + if value is None: + return None + return _ip_only(value) + + def build_queryset(self, data: dict) -> models.QuerySet: + """Build a queryset for the custom field.""" + if not self.has_required_fields(data): + return None + + if not self._check_condition(data): + return None + + value = self.ip_value(data) + if value is None: + return None + + vrf_id = data[self.vrf_field] + return self.model_class.objects.filter(**{f'{self.ip_field}__net_host': value, f'{self.vrf_field}': vrf_id}) + + +def _ip_only(value: str) -> str|None: + try: + ip = netaddr.IPNetwork(value) + value = ip.ip + except netaddr.core.AddrFormatError: + return None + + return value + @dataclass class AutoSlugMatcher: """A special matcher that tries to match on auto generated slugs.""" diff --git a/netbox_diode_plugin/api/plugin_utils.py b/netbox_diode_plugin/api/plugin_utils.py index d85038c..a5d9cef 100644 --- a/netbox_diode_plugin/api/plugin_utils.py +++ b/netbox_diode_plugin/api/plugin_utils.py @@ -1,7 +1,7 @@ """Diode plugin helpers.""" # Generated code. DO NOT EDIT. -# Timestamp: 2025-04-13 13:20:10Z +# Timestamp: 2025-04-13 16:50:25Z from dataclasses import dataclass import datetime @@ -13,6 +13,8 @@ from core.models import ObjectType as NetBoxType from django.contrib.contenttypes.models import ContentType from django.db import models +import netaddr +from rest_framework.exceptions import ValidationError logger = logging.getLogger(__name__) @@ -1014,6 +1016,12 @@ def transform_float_to_decimal(value: float) -> decimal.Decimal: def int_from_int64string(value: str) -> int: return int(value) +def ip_network_defaulting(value: str) -> str: + try: + return str(netaddr.IPNetwork(value)) + except netaddr.AddrFormatError: + raise ValueError(f'Invalid IP network value: {value}') + def collect_integer_pairs(value: list[int]) -> list[tuple[int, int]]: if len(value) % 2 != 0: raise ValueError('Array must have an even number of elements') @@ -1114,6 +1122,7 @@ def wrapper(value): }, 'ipam.aggregate': { 'date_added': transform_timestamp_to_date_only, + 'prefix': ip_network_defaulting, }, 'ipam.asn': { 'asn': int_from_int64string, @@ -1128,6 +1137,16 @@ def wrapper(value): 'ipam.fhrpgroupassignment': { 'priority': int_from_int64string, }, + 'ipam.ipaddress': { + 'address': ip_network_defaulting, + }, + 'ipam.iprange': { + 'end_address': ip_network_defaulting, + 'start_address': ip_network_defaulting, + }, + 'ipam.prefix': { + 'prefix': ip_network_defaulting, + }, 'ipam.role': { 'weight': int_from_int64string, }, diff --git a/netbox_diode_plugin/tests/test_api_diff_and_apply.py b/netbox_diode_plugin/tests/test_api_diff_and_apply.py index 2367309..fdd3386 100644 --- a/netbox_diode_plugin/tests/test_api_diff_and_apply.py +++ b/netbox_diode_plugin/tests/test_api_diff_and_apply.py @@ -7,6 +7,7 @@ import logging from uuid import uuid4 + from core.models import ObjectType from dcim.models import Device, Interface, Site from ipam.models import VLANGroup @@ -15,6 +16,7 @@ from extras.models import CustomField from extras.models.customfields import CustomFieldTypeChoices from ipam.models import IPAddress +import netaddr from rest_framework import status from users.models import Token from utilities.testing import APITestCase @@ -638,6 +640,189 @@ def test_generate_diff_and_apply_vlan_group_with_vid_ranges(self): self.assertEqual(new_vlan_group.vid_ranges[1].lower, 12) self.assertEqual(new_vlan_group.vid_ranges[1].upper, 21) + def test_generate_diff_and_apply_ip_address_with_assigned_object_interface(self): + """Test ip.""" + payload = { + "timestamp": 1, + "object_type": "ipam.ipaddress", + "entity": { + "ip_address": { + "address": "254.198.174.116", + "status": "deprecated", + "role": "secondary", + "assigned_object_interface": { + "device": { + "name": "Device ABC", + "device_type": { + "manufacturer": { + "name": "Manufacturer ABC" + }, + "model": "Device Type ABC" + }, + "role": { + "name": "Role ABC" + }, + "platform": { + "name": "Platform ABC", + "manufacturer": { + "name": "Manufacturer ABC" + } + }, + "site": { + "name": "Site ABC" + } + }, + "name": "Interface ABC", + "type": "1000base-t", + "mode": "access" + }, + "description": "IP Address description", + "comments": "Lorem ipsum dolor sit amet", + "tags": [ + { + "name": "tag 1" + }, + { + "name": "tag 2" + } + ] + } + } + } + _, response = self.diff_and_apply(payload) + self.assertEqual(response.status_code, status.HTTP_200_OK) + + def test_generate_diff_update_ip_address(self): + """Test generate diff update ip address.""" + payload = { + "timestamp": 1, + "object_type": "ipam.ipaddress", + "entity": { + "ip_address": { + "address": "254.198.174.116", + "status": "deprecated", + "role": "secondary", + } + } + } + _, response = self.diff_and_apply(payload) + self.assertEqual(response.status_code, status.HTTP_200_OK) + + payload = { + "timestamp": 1, + "object_type": "ipam.ipaddress", + "entity": { + "ip_address": { + "address": "254.198.174.116", + "status": "deprecated", + "role": "secondary", + } + } + } + + response1 = self.client.post( + self.diff_url, data=payload, format="json", **self.user_header + ) + self.assertEqual(response1.status_code, status.HTTP_200_OK) + diff = response1.json().get("change_set", {}) + self.assertEqual(diff.get("changes", []), []) + + payload = { + "timestamp": 1, + "object_type": "ipam.ipaddress", + "entity": { + "ip_address": { + "address": "254.198.174.116/32", + "status": "deprecated", + "role": "secondary", + } + } + } + + response1 = self.client.post( + self.diff_url, data=payload, format="json", **self.user_header + ) + self.assertEqual(response1.status_code, status.HTTP_200_OK) + diff = response1.json().get("change_set", {}) + self.assertEqual(diff.get("changes", []), []) + + payload = { + "timestamp": 1, + "object_type": "ipam.ipaddress", + "entity": { + "ip_address": { + "address": "254.198.174.116", + "status": "active", + "role": "secondary", + } + } + } + + _ = self.diff_and_apply(payload) + ip = IPAddress.objects.get(address="254.198.174.116") + self.assertEqual(ip.status, "active") + + payload = { + "timestamp": 1, + "object_type": "ipam.ipaddress", + "entity": { + "ip_address": { + "address": "254.198.174.116/24", + "status": "deprecated", + } + } + } + _ = self.diff_and_apply(payload) + ip = IPAddress.objects.get(address="254.198.174.116/24") + self.assertEqual(ip.role, "secondary") + self.assertEqual(ip.status, "deprecated") + self.assertEqual(ip.address, netaddr.IPNetwork("254.198.174.0/24")) + + vrf_uuid = str(uuid4()) + payload = { + "timestamp": 1, + "object_type": "ipam.ipaddress", + "entity": { + "ip_address": { + "address": "254.198.174.116/24", + "status": "active", + "vrf": { + "name": f"VRF {vrf_uuid}" + } + } + } + } + _ = self.diff_and_apply(payload) + ip = IPAddress.objects.get(address="254.198.174.116/24", vrf__name=f"VRF {vrf_uuid}") + self.assertEqual(ip.vrf.name, f"VRF {vrf_uuid}") + self.assertEqual(ip.status, "active") + + ip2 = IPAddress.objects.get(address="254.198.174.116/24", vrf__isnull=True) + self.assertEqual(ip2.vrf, None) + self.assertEqual(ip2.status, "deprecated") + + payload = { + "timestamp": 1, + "object_type": "ipam.ipaddress", + "entity": { + "ip_address": { + "address": "254.198.174.116", + "status": "dhcp", + "vrf": { + "name": f"VRF {vrf_uuid}" + } + } + } + } + _ = self.diff_and_apply(payload) + ip = IPAddress.objects.get(address="254.198.174.116", vrf__name=f"VRF {vrf_uuid}") + self.assertEqual(ip.status, "dhcp") + + ip2 = IPAddress.objects.get(address="254.198.174.116/24", vrf__isnull=True) + self.assertEqual(ip2.vrf, None) + self.assertEqual(ip2.status, "deprecated") + + def diff_and_apply(self, payload): """Diff and apply the payload.""" From adf151f95b6770b247cbe1941f1c0ced5faf69cb Mon Sep 17 00:00:00 2001 From: Luke Tucker <64618+ltucker@users.noreply.github.com> Date: Sun, 13 Apr 2025 15:15:17 -0400 Subject: [PATCH 25/52] fix: fix complex vm interface update case (#82) * fix: fix complex vm interface update case * lint --- netbox_diode_plugin/api/differ.py | 4 +- netbox_diode_plugin/api/matcher.py | 2 +- netbox_diode_plugin/api/transformer.py | 4 + .../tests/test_api_diff_and_apply.py | 81 +++++++++++++++++-- 4 files changed, 83 insertions(+), 8 deletions(-) diff --git a/netbox_diode_plugin/api/differ.py b/netbox_diode_plugin/api/differ.py index ca18faf..63ad865 100644 --- a/netbox_diode_plugin/api/differ.py +++ b/netbox_diode_plugin/api/differ.py @@ -6,11 +6,11 @@ import datetime import logging +import netaddr from django.contrib.contenttypes.models import ContentType from django.core.exceptions import ValidationError -from utilities.data import shallow_compare_dict from django.db.backends.postgresql.psycopg_any import NumericRange -import netaddr +from utilities.data import shallow_compare_dict from .common import Change, ChangeSet, ChangeSetException, ChangeSetResult, ChangeType, error_from_validation_error from .plugin_utils import get_primary_value, legal_fields diff --git a/netbox_diode_plugin/api/matcher.py b/netbox_diode_plugin/api/matcher.py index c033a26..c0ee706 100644 --- a/netbox_diode_plugin/api/matcher.py +++ b/netbox_diode_plugin/api/matcher.py @@ -8,6 +8,7 @@ from functools import cache, lru_cache from typing import Type +import netaddr from core.models import ObjectType as NetBoxType from django.conf import settings from django.contrib.contenttypes.fields import ContentType @@ -17,7 +18,6 @@ from django.db.models.fields import SlugField from django.db.models.lookups import Exact from django.db.models.query_utils import Q -import netaddr from extras.models.customfields import CustomField from .common import AutoSlug, UnresolvedReference diff --git a/netbox_diode_plugin/api/transformer.py b/netbox_diode_plugin/api/transformer.py index 830e5a7..f95ec52 100644 --- a/netbox_diode_plugin/api/transformer.py +++ b/netbox_diode_plugin/api/transformer.py @@ -364,6 +364,10 @@ def _resolve_existing_references(entities: list[dict]) -> list[dict]: data = copy.deepcopy(data) _update_resolved_refs(data, new_refs) + if data.get('_is_post_create'): + resolved.append(data) + continue + existing = find_existing_object(data, object_type) if existing is not None: logger.debug(f"existing {data} -> {existing}") diff --git a/netbox_diode_plugin/tests/test_api_diff_and_apply.py b/netbox_diode_plugin/tests/test_api_diff_and_apply.py index fdd3386..9a58989 100644 --- a/netbox_diode_plugin/tests/test_api_diff_and_apply.py +++ b/netbox_diode_plugin/tests/test_api_diff_and_apply.py @@ -2,24 +2,24 @@ # Copyright 2024 NetBox Labs Inc """Diode NetBox Plugin - Tests.""" +import copy import datetime import decimal import logging from uuid import uuid4 - +import netaddr +from circuits.models import Circuit from core.models import ObjectType from dcim.models import Device, Interface, Site -from ipam.models import VLANGroup -from circuits.models import Circuit from django.contrib.auth import get_user_model from extras.models import CustomField from extras.models.customfields import CustomFieldTypeChoices -from ipam.models import IPAddress -import netaddr +from ipam.models import IPAddress, VLANGroup from rest_framework import status from users.models import Token from utilities.testing import APITestCase +from virtualization.models import VMInterface logger = logging.getLogger(__name__) @@ -822,7 +822,78 @@ def test_generate_diff_update_ip_address(self): self.assertEqual(ip2.vrf, None) self.assertEqual(ip2.status, "deprecated") + def test_generate_diff_and_apply_complex_vminterface(self): + """Test generate diff and apply and update a complex vm interface.""" + payload = { + "timestamp": 1, + "object_type": "virtualization.vminterface", + "entity": { + "vm_interface": { + "virtual_machine": { + "name": "Virtual Machine 15e00bdf-4294-41df-a450-ffcfec6c7f2b", + "status": "active", + "site": { + "name": "Site 10" + }, + "cluster": { + "name": "Cluster 10", + "type": { + "name": "Cluster type 10" + }, + "group": { + "name": "Cluster group 10" + }, + "status": "active", + "scope_site": { + "name": "Site 10" + } + }, + "role": { + "name": "Role 10" + }, + "platform": { + "name": "Platform 10", + "manufacturer": { + "name": "Manufacturer 10" + } + }, + "vcpus": 1.0, + "memory": "4096", + "disk": "100", + "description": "Virtual Machine A description", + "comments": "Lorem ipsum dolor sit amet", + "tags": [ + { + "name": "tag 1" + } + ] + }, + "name": "Interface 47e8a593-8b74-4e94-9a8e-c02113f0bf88", + "enabled": False, + "mtu": "1500", + "primary_mac_address": { + "mac_address": "00:00:00:00:00:00" + }, + "description": "Interface A description", + "tags": [ + { + "name": "tag 1" + } + ] + } + } + } + _ = self.diff_and_apply(payload) + payload2 = copy.deepcopy(payload) + payload2['entity']['vm_interface']["mtu"] = "2000" + payload2['entity']['vm_interface']["primary_mac_address"] = { + "mac_address": "00:00:00:00:00:01" + } + _ = self.diff_and_apply(payload2) + vm_interface = VMInterface.objects.get(name="Interface 47e8a593-8b74-4e94-9a8e-c02113f0bf88") + self.assertEqual(vm_interface.mtu, 2000) + self.assertEqual(vm_interface.primary_mac_address.mac_address, "00:00:00:00:00:01") def diff_and_apply(self, payload): """Diff and apply the payload.""" From d847a007f2c816312df6ac1c9f82be3e5be137d3 Mon Sep 17 00:00:00 2001 From: Luke Tucker <64618+ltucker@users.noreply.github.com> Date: Sun, 13 Apr 2025 17:01:47 -0400 Subject: [PATCH 26/52] fix: fix decimal / eui previous values (#83) --- netbox_diode_plugin/api/differ.py | 8 ++++++-- .../tests/test_api_diff_and_apply.py | 18 ++++++++++++++++++ 2 files changed, 24 insertions(+), 2 deletions(-) diff --git a/netbox_diode_plugin/api/differ.py b/netbox_diode_plugin/api/differ.py index 63ad865..7522464 100644 --- a/netbox_diode_plugin/api/differ.py +++ b/netbox_diode_plugin/api/differ.py @@ -4,12 +4,14 @@ import copy import datetime +import decimal import logging import netaddr from django.contrib.contenttypes.models import ContentType from django.core.exceptions import ValidationError from django.db.backends.postgresql.psycopg_any import NumericRange +from netaddr.eui import EUI from utilities.data import shallow_compare_dict from .common import Change, ChangeSet, ChangeSetException, ChangeSetResult, ChangeType, error_from_validation_error @@ -87,7 +89,7 @@ def prechange_data_from_instance(instance) -> dict: # noqa: C901 def _harmonize_formats(prechange_data): if prechange_data is None: return None - if isinstance(prechange_data, (str, int, float, bool)): + if isinstance(prechange_data, (str, int, float, bool, decimal.Decimal)): return prechange_data if isinstance(prechange_data, dict): return {k: _harmonize_formats(v) for k, v in prechange_data.items()} @@ -101,9 +103,11 @@ def _harmonize_formats(prechange_data): return (prechange_data.lower, prechange_data.upper-1) if isinstance(prechange_data, netaddr.IPNetwork): return str(prechange_data) + if isinstance(prechange_data, EUI): + return str(prechange_data) logger.warning(f"Unknown type in prechange_data: {type(prechange_data)}") - return str(prechange_data) + return prechange_data def clean_diff_data(data: dict, exclude_empty_values: bool = True) -> dict: """Clean diff data by removing null values.""" diff --git a/netbox_diode_plugin/tests/test_api_diff_and_apply.py b/netbox_diode_plugin/tests/test_api_diff_and_apply.py index 9a58989..f53b1b1 100644 --- a/netbox_diode_plugin/tests/test_api_diff_and_apply.py +++ b/netbox_diode_plugin/tests/test_api_diff_and_apply.py @@ -575,6 +575,24 @@ def test_generate_diff_and_apply_site_with_lat_lon(self): self.assertEqual(new_site.latitude, decimal.Decimal("23.456")) self.assertEqual(new_site.longitude, decimal.Decimal("78.910")) + payload = { + "timestamp": 1, + "object_type": "dcim.site", + "entity": { + "site": { + "name": f"Site {site_uuid}", + "latitude": 23.456, + "longitude": 78.910, + }, + }, + } + response1 = self.client.post( + self.diff_url, data=payload, format="json", **self.user_header + ) + self.assertEqual(response1.status_code, status.HTTP_200_OK) + diff = response1.json().get("change_set", {}) + self.assertEqual(diff.get("changes", []), []) + def test_generate_diff_and_apply_wrong_type_date(self): """Test generate diff and apply wrong type date.""" payload = { From bfe4788fdc14ee18ccf7e9d90186f5bcea1fdcdd Mon Sep 17 00:00:00 2001 From: Michal Fiedorowicz Date: Thu, 17 Apr 2025 21:30:41 +0200 Subject: [PATCH 27/52] fix: update dependency versions to allow for newer releases (#84) Signed-off-by: Michal Fiedorowicz --- pyproject.toml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 2a99b4e..dccad0a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,10 +25,10 @@ classifiers = [ ] dependencies = [ - "Brotli==1.1.0", - "certifi==2024.7.4", - "grpcio==1.62.1", - "protobuf==5.28.1", + "Brotli>=1.1.0", + "certifi>=2024.7.4", + "grpcio>=1.68.1", + "protobuf>=5.28.1", ] [project.optional-dependencies] From e85ebf9d849bf0ccdeacb96ad56c6b948642f7b9 Mon Sep 17 00:00:00 2001 From: Michal Fiedorowicz Date: Mon, 21 Apr 2025 13:18:27 +0200 Subject: [PATCH 28/52] feat: implement OAuth2 authentication and permissions for Diode NetBox Plugin - Added DiodeOAuth2Authentication class for handling OAuth2 token validation. - Introduced IsDiodeOAuth2Authenticated permission class to check OAuth2 authentication. - Updated GenerateDiffView and ApplyChangeSetView to use the new authentication and permission classes. Signed-off-by: Michal Fiedorowicz --- netbox_diode_plugin/api/authentication.py | 79 +++++++++++++++++++++++ netbox_diode_plugin/api/permissions.py | 19 +----- netbox_diode_plugin/api/views.py | 9 ++- 3 files changed, 88 insertions(+), 19 deletions(-) create mode 100644 netbox_diode_plugin/api/authentication.py diff --git a/netbox_diode_plugin/api/authentication.py b/netbox_diode_plugin/api/authentication.py new file mode 100644 index 0000000..e4189c3 --- /dev/null +++ b/netbox_diode_plugin/api/authentication.py @@ -0,0 +1,79 @@ +#!/usr/bin/env python +# Copyright 2024 NetBox Labs Inc +"""Diode NetBox Plugin - API Authentication.""" + +import os +import requests +import logging +import hashlib + +from django.core.cache import cache +from rest_framework.authentication import BaseAuthentication +from rest_framework.exceptions import AuthenticationFailed + +logger = logging.getLogger("netbox.diode_data") + +class DiodeOAuth2Authentication(BaseAuthentication): + """Diode OAuth2 Client Credentials Authentication.""" + + def authenticate(self, request): + """Authenticate the request and return the user info.""" + auth_header = request.headers.get("Authorization", "") + if not auth_header.startswith("Bearer "): + return None + + token = auth_header[7:].strip() + + user = self._introspect_token(token) + if not user: + raise AuthenticationFailed("Invalid OAuth2 token.") + + return (user, None) + + def _validate_token(self, token: str): + """Validate the token and return the user info.""" + hash_token = hashlib.sha256(token.encode()).hexdigest() + cache_key = f"diode:oauth2:introspect:{hash_token}" + cached = cache.get(cache_key) + if cached: + return cached + + # Load config from environment variables + # TODO: Move to plugin config + introspect_url = os.environ.get("OAUTH2_INTROSPECT_URL") + + if not introspect_url: + logger.error("OAuth2 configuration is missing.") + return None + + try: + response = requests.post( + introspect_url, + data={"token": token}, + timeout=5 + ) + response.raise_for_status() + data = response.json() + except Exception as e: + logger.error(f"OAuth2 introspection failed: {e}") + return None + + if data.get("active"): + # Check if token has the required scope for Diode NetBox access + scopes = data.get("scope", "").split() + has_diode_to_netbox_scope = any(scope.endswith(":diode:netbox") for scope in scopes) + + if not has_diode_to_netbox_scope: + logger.warning(f"Token missing required :diode:netbox scope. Scopes: {scopes}") + return None + + # Create an authenticated user-like object + user_info = type("DiodeOAuth2User", (), { + "is_authenticated": True, + "token_data": data + })() + expires_in = data.get("exp") - data.get("iat") if "exp" in data and "iat" in data else 300 + cache.set(cache_key, user_info, timeout=expires_in) + return user_info + + return None diff --git a/netbox_diode_plugin/api/permissions.py b/netbox_diode_plugin/api/permissions.py index 9eb8cf7..f156a27 100644 --- a/netbox_diode_plugin/api/permissions.py +++ b/netbox_diode_plugin/api/permissions.py @@ -5,21 +5,8 @@ from rest_framework.permissions import SAFE_METHODS, BasePermission -class IsDiodeReader(BasePermission): - """Custom permission to allow users that has permission "netbox_diode_plugin.view_objectstate" to view the object type.""" +class IsDiodeOAuth2Authenticated(BasePermission): + """Check if the request is authenticated via OAuth2.""" def has_permission(self, request, view): - """Check if the request is in SAFE_METHODS and user has netbox_diode_plugin.view_diode permission.""" - return request.method in SAFE_METHODS and request.user.has_perm( - "netbox_diode_plugin.view_diode" - ) - - -class IsDiodeWriter(BasePermission): - """Custom permission to allow users that has permission "netbox_diode_plugin.add_diode" and POST requests.""" - - def has_permission(self, request, view): - """Check if the request is in POST and user has netbox_diode_plugin.add_diode permission.""" - return request.method in ["POST"] and request.user.has_perm( - "netbox_diode_plugin.add_diode" - ) + return bool(getattr(request.user, "is_authenticated", False)) diff --git a/netbox_diode_plugin/api/views.py b/netbox_diode_plugin/api/views.py index d286175..3f5c3e7 100644 --- a/netbox_diode_plugin/api/views.py +++ b/netbox_diode_plugin/api/views.py @@ -15,7 +15,8 @@ from netbox_diode_plugin.api.applier import apply_changeset from netbox_diode_plugin.api.common import Change, ChangeSet, ChangeSetException, ChangeSetResult from netbox_diode_plugin.api.differ import generate_changeset -from netbox_diode_plugin.api.permissions import IsDiodeWriter +from netbox_diode_plugin.api.permissions import IsDiodeOAuth2Authenticated +from netbox_diode_plugin.api.authentication import DiodeOAuth2Authentication logger = logging.getLogger("netbox.diode_data") @@ -48,7 +49,8 @@ def get_valid_entity_keys(model_name): class GenerateDiffView(views.APIView): """GenerateDiff view.""" - permission_classes = [IsAuthenticated, IsDiodeWriter] + authentication_classes = [DiodeOAuth2Authentication] + permission_classes = [IsDiodeOAuth2Authenticated] def post(self, request, *args, **kwargs): """Generate diff for entity.""" @@ -106,7 +108,8 @@ def _post(self, request, *args, **kwargs): class ApplyChangeSetView(views.APIView): """ApplyChangeSet view.""" - permission_classes = [IsAuthenticated, IsDiodeWriter] + authentication_classes = [DiodeOAuth2Authentication] + permission_classes = [IsDiodeOAuth2Authenticated] def post(self, request, *args, **kwargs): """Apply change set for entity.""" From f0a8af438684ea3455516f9275f03247f06e905f Mon Sep 17 00:00:00 2001 From: Michal Fiedorowicz Date: Mon, 21 Apr 2025 13:20:38 +0200 Subject: [PATCH 29/52] chore: update copyright year in authentication, permissions, and views files Signed-off-by: Michal Fiedorowicz --- netbox_diode_plugin/api/authentication.py | 2 +- netbox_diode_plugin/api/permissions.py | 2 +- netbox_diode_plugin/api/views.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/netbox_diode_plugin/api/authentication.py b/netbox_diode_plugin/api/authentication.py index e4189c3..af88c80 100644 --- a/netbox_diode_plugin/api/authentication.py +++ b/netbox_diode_plugin/api/authentication.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc +# Copyright 2025 NetBox Labs, Inc. """Diode NetBox Plugin - API Authentication.""" import os diff --git a/netbox_diode_plugin/api/permissions.py b/netbox_diode_plugin/api/permissions.py index f156a27..f0a9a89 100644 --- a/netbox_diode_plugin/api/permissions.py +++ b/netbox_diode_plugin/api/permissions.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc +# Copyright 2025 NetBox Labs, Inc. """Diode NetBox Plugin - API Permissions.""" from rest_framework.permissions import SAFE_METHODS, BasePermission diff --git a/netbox_diode_plugin/api/views.py b/netbox_diode_plugin/api/views.py index 3f5c3e7..6049d3f 100644 --- a/netbox_diode_plugin/api/views.py +++ b/netbox_diode_plugin/api/views.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc +# Copyright 2025 NetBox Labs, Inc. """Diode NetBox Plugin - API Views.""" import json import logging From 0d3522ac1a821c351302fcd383ff68078f95428b Mon Sep 17 00:00:00 2001 From: Luke Tucker <64618+ltucker@users.noreply.github.com> Date: Mon, 21 Apr 2025 12:31:00 -0400 Subject: [PATCH 30/52] fix: create and fix failing tests (#87) * fix: handle callable defaults, defaults with non serializable values * fix: mark circuits.provider.accounts as circular * fix: error deduplicating device types * fix: report merge errors as 400 not 500 * fix: mark device oob_ip as circular * fix: adjust fingerprint matching for complex module bay case * fix: test virtual machine with primary_ip4 errors, logical cluster matching * fix: harmonize ZoneInfo values * fix: fix logical prefix in vrf lookup * fix: handle IntegrityError nicely * fix: support cached scope for matching * fix: add additional logical matchers * fix: don't alter arrays. output prior states for other ref types * fix: add table test for create/update of various types * fix: additional fixes from testing * fix: reduce logging, guard expensive trace logging with flag * fix: linting --- netbox_diode_plugin/api/applier.py | 13 +- netbox_diode_plugin/api/common.py | 34 +- netbox_diode_plugin/api/differ.py | 56 +- netbox_diode_plugin/api/matcher.py | 319 +- netbox_diode_plugin/api/transformer.py | 215 +- .../tests/test_api_diff_and_apply.py | 440 +- .../tests/test_api_generate_diff.py | 84 + netbox_diode_plugin/tests/test_updates.py | 177 + .../tests/test_updates_cases.json | 5902 +++++++++++++++++ 9 files changed, 7087 insertions(+), 153 deletions(-) create mode 100644 netbox_diode_plugin/tests/test_updates.py create mode 100644 netbox_diode_plugin/tests/test_updates_cases.json diff --git a/netbox_diode_plugin/api/applier.py b/netbox_diode_plugin/api/applier.py index 0267302..aab9a2b 100644 --- a/netbox_diode_plugin/api/applier.py +++ b/netbox_diode_plugin/api/applier.py @@ -9,6 +9,7 @@ from django.contrib.contenttypes.models import ContentType from django.core.exceptions import ObjectDoesNotExist from django.db import models +from django.db.utils import IntegrityError from rest_framework.exceptions import ValidationError as ValidationError from .common import NON_FIELD_ERRORS, Change, ChangeSet, ChangeSetException, ChangeSetResult, ChangeType, error_from_validation_error @@ -41,10 +42,14 @@ def apply_changeset(change_set: ChangeSet, request) -> ChangeSetResult: except TypeError as e: # this indicates a problem in model validation (should raise ValidationError) # but raised non-validation error (TypeError) -- we don't know which field trigged it. - logger.error(f"invalid data type for unspecified field (validation raised non-validation error): {data}: {e}") - raise _err("invalid data type for field", object_type, "__all__") - # ConstraintViolationError ? - # ... + import traceback + traceback.print_exc() + logger.error(f"validation raised TypeError error on unspecified field of {object_type}: {data}: {e}") + logger.error(traceback.format_exc()) + raise _err("invalid data type for field (TypeError)", object_type, "__all__") + except IntegrityError as e: + logger.error(f"Integrity error {object_type}: {e} {data}") + raise _err(f"created a conflict with an existing {object_type}", object_type, "__all__") return ChangeSetResult( id=change_set.id, diff --git a/netbox_diode_plugin/api/common.py b/netbox_diode_plugin/api/common.py index 8ea7b1e..8a7d754 100644 --- a/netbox_diode_plugin/api/common.py +++ b/netbox_diode_plugin/api/common.py @@ -2,23 +2,30 @@ # Copyright 2025 NetBox Labs Inc """Diode NetBox Plugin - API - Common types and utilities.""" +import datetime +import decimal import logging import uuid from collections import defaultdict from dataclasses import dataclass, field from enum import Enum +import netaddr from django.apps import apps from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation from django.contrib.contenttypes.models import ContentType from django.core.exceptions import ValidationError from django.db import models +from django.db.backends.postgresql.psycopg_any import NumericRange from extras.models import CustomField +from netaddr.eui import EUI from rest_framework import status +from zoneinfo import ZoneInfo logger = logging.getLogger("netbox.diode_data") NON_FIELD_ERRORS = "__all__" +_TRACE = False @dataclass class UnresolvedReference: @@ -43,6 +50,8 @@ def __hash__(self): def __lt__(self, other): """Less than operator.""" + if not isinstance(other, UnresolvedReference): + return False return self.object_type < other.object_type or (self.object_type == other.object_type and self.uuid < other.uuid) @@ -238,7 +247,7 @@ class AutoSlug: def error_from_validation_error(e, object_name): - """Convert a from rest_framework.exceptions.ValidationError to a ChangeSetException.""" + """Convert a from DRF ValidationError to a ChangeSetException.""" errors = {} if e.detail: if isinstance(e.detail, dict): @@ -252,3 +261,26 @@ def error_from_validation_error(e, object_name): NON_FIELD_ERRORS: [e.detail] } return ChangeSetException("validation error", errors=errors) + +def harmonize_formats(data): + """Puts all data in a format that can be serialized and compared.""" + match data: + case None: + return None + case str() | int() | float() | bool() | decimal.Decimal() | UnresolvedReference(): + return data + case dict(): + return {k: harmonize_formats(v) if not k.startswith("_") else v for k, v in data.items()} + case list() | tuple(): + return [harmonize_formats(v) for v in data] + case datetime.datetime(): + return data.strftime("%Y-%m-%dT%H:%M:%SZ") + case datetime.date(): + return data.strftime("%Y-%m-%d") + case NumericRange(): + return (data.lower, data.upper-1) + case netaddr.IPNetwork() | EUI() | ZoneInfo(): + return str(data) + case _: + logger.warning(f"Unknown type in harmonize_formats: {type(data)}") + return data diff --git a/netbox_diode_plugin/api/differ.py b/netbox_diode_plugin/api/differ.py index 7522464..07ee4d2 100644 --- a/netbox_diode_plugin/api/differ.py +++ b/netbox_diode_plugin/api/differ.py @@ -9,12 +9,21 @@ import netaddr from django.contrib.contenttypes.models import ContentType -from django.core.exceptions import ValidationError from django.db.backends.postgresql.psycopg_any import NumericRange from netaddr.eui import EUI +from rest_framework import serializers from utilities.data import shallow_compare_dict -from .common import Change, ChangeSet, ChangeSetException, ChangeSetResult, ChangeType, error_from_validation_error +from .common import ( + NON_FIELD_ERRORS, + Change, + ChangeSet, + ChangeSetException, + ChangeSetResult, + ChangeType, + error_from_validation_error, + harmonize_formats, +) from .plugin_utils import get_primary_value, legal_fields from .supported_models import extract_supported_models from .transformer import cleanup_unresolved_references, set_custom_field_defaults, transform_proto_json @@ -36,11 +45,15 @@ def prechange_data_from_instance(instance) -> dict: # noqa: C901 model = SUPPORTED_MODELS.get(object_type) if not model: - raise ValidationError(f"Model {model_class.__name__} is not supported") + raise serializers.ValidationError({ + NON_FIELD_ERRORS: [f"Model {model_class.__name__} is not supported"] + }) fields = model.get("fields", {}) if not fields: - raise ValidationError(f"Model {model_class.__name__} has no fields") + raise serializers.ValidationError({ + NON_FIELD_ERRORS: [f"Model {model_class.__name__} has no fields"] + }) diode_fields = legal_fields(model_class) @@ -52,9 +65,6 @@ def prechange_data_from_instance(instance) -> dict: # noqa: C901 if not hasattr(instance, field_name): continue - if field_info["type"] == "ForeignKey" and field_info.get("is_many_to_one_rel", False): - continue - value = getattr(instance, field_name) if hasattr(value, "all"): # Handle many-to-many and many-to-one relationships # For any relationship that has an 'all' method, get all related objects' primary keys @@ -82,33 +92,11 @@ def prechange_data_from_instance(instance) -> dict: # noqa: C901 else: cfmap[cf.name] = cf.serialize(value) prechange_data["custom_fields"] = cfmap - prechange_data = _harmonize_formats(prechange_data) - return prechange_data - + prechange_data = harmonize_formats(prechange_data) -def _harmonize_formats(prechange_data): - if prechange_data is None: - return None - if isinstance(prechange_data, (str, int, float, bool, decimal.Decimal)): - return prechange_data - if isinstance(prechange_data, dict): - return {k: _harmonize_formats(v) for k, v in prechange_data.items()} - if isinstance(prechange_data, (list, tuple)): - return [_harmonize_formats(v) for v in prechange_data] - if isinstance(prechange_data, datetime.datetime): - return prechange_data.strftime("%Y-%m-%dT%H:%M:%SZ") - if isinstance(prechange_data, datetime.date): - return prechange_data.strftime("%Y-%m-%d") - if isinstance(prechange_data, NumericRange): - return (prechange_data.lower, prechange_data.upper-1) - if isinstance(prechange_data, netaddr.IPNetwork): - return str(prechange_data) - if isinstance(prechange_data, EUI): - return str(prechange_data) - - logger.warning(f"Unknown type in prechange_data: {type(prechange_data)}") return prechange_data + def clean_diff_data(data: dict, exclude_empty_values: bool = True) -> dict: """Clean diff data by removing null values.""" result = {} @@ -139,7 +127,6 @@ def diff_to_change( change_type = ChangeType.UPDATE if len(prechange_data) > 0 else ChangeType.CREATE if change_type == ChangeType.UPDATE and not len(changed_attrs) > 0: change_type = ChangeType.NOOP - primary_value = str(get_primary_value(prechange_data | postchange_data, object_type)) if primary_value is None: primary_value = "(unnamed)" @@ -173,8 +160,7 @@ def sort_dict_recursively(d): if isinstance(d, dict): return {k: sort_dict_recursively(v) for k, v in sorted(d.items())} if isinstance(d, list): - # Convert all items to strings for comparison - return sorted([sort_dict_recursively(item) for item in d], key=str) + return [sort_dict_recursively(item) for item in d] return d def generate_changeset(entity: dict, object_type: str) -> ChangeSetResult: @@ -183,7 +169,7 @@ def generate_changeset(entity: dict, object_type: str) -> ChangeSetResult: return _generate_changeset(entity, object_type) except ChangeSetException: raise - except ValidationError as e: + except serializers.ValidationError as e: raise error_from_validation_error(e, object_type) except Exception as e: logger.error(f"Unexpected error generating changeset: {e}") diff --git a/netbox_diode_plugin/api/matcher.py b/netbox_diode_plugin/api/matcher.py index c0ee706..f9ebea5 100644 --- a/netbox_diode_plugin/api/matcher.py +++ b/netbox_diode_plugin/api/matcher.py @@ -20,7 +20,7 @@ from django.db.models.query_utils import Q from extras.models.customfields import CustomField -from .common import AutoSlug, UnresolvedReference +from .common import _TRACE, AutoSlug, UnresolvedReference from .plugin_utils import content_type_id, get_object_type, get_object_type_model logger = logging.getLogger(__name__) @@ -46,20 +46,48 @@ condition=Q(assigned_object_id__isnull=True), ), ], + "ipam.aggregate": lambda: [ + ObjectMatchCriteria( + fields=("prefix",), + name="logical_aggregate_prefix_no_rir", + model_class=get_object_type_model("ipam.aggregate"), + condition=Q(rir__isnull=True), + ), + ObjectMatchCriteria( + fields=("prefix", "rir"), + name="logical_aggregate_prefix_within_rir", + model_class=get_object_type_model("ipam.aggregate"), + condition=Q(rir__isnull=False), + ), + ], "ipam.ipaddress": lambda: [ GlobalIPNetworkIPMatcher( - ip_field="address", + ip_fields=("address",), vrf_field="vrf", model_class=get_object_type_model("ipam.ipaddress"), name="logical_ip_address_global_no_vrf", ), VRFIPNetworkIPMatcher( - ip_field="address", + ip_fields=("address",), vrf_field="vrf", model_class=get_object_type_model("ipam.ipaddress"), name="logical_ip_address_within_vrf", ), ], + "ipam.iprange": lambda: [ + GlobalIPNetworkIPMatcher( + ip_fields=("start_address", "end_address"), + vrf_field="vrf", + model_class=get_object_type_model("ipam.iprange"), + name="logical_ip_range_start_end_global_no_vrf", + ), + VRFIPNetworkIPMatcher( + ip_fields=("start_address", "end_address"), + vrf_field="vrf", + model_class=get_object_type_model("ipam.iprange"), + name="logical_ip_range_start_end_within_vrf", + ), + ], "ipam.prefix": lambda: [ ObjectMatchCriteria( fields=("prefix",), @@ -68,12 +96,115 @@ condition=Q(vrf__isnull=True), ), ObjectMatchCriteria( - fields=("prefix", "vrf_id"), + fields=("prefix", "vrf"), name="logical_prefix_within_vrf", model_class=get_object_type_model("ipam.prefix"), condition=Q(vrf__isnull=False), ), ], + "virtualization.cluster": lambda: [ + ObjectMatchCriteria( + fields=("name", "scope_type", "scope_id"), + name="logical_cluster_within_scope", + model_class=get_object_type_model("virtualization.cluster"), + condition=Q(scope_type__isnull=False), + ), + ObjectMatchCriteria( + fields=("name",), + name="logical_cluster_with_no_scope_or_group", + model_class=get_object_type_model("virtualization.cluster"), + condition=Q(scope_type__isnull=True, group__isnull=True), + ), + ], + "ipam.vlan": lambda: [ + ObjectMatchCriteria( + fields=("vid",), + name="logical_vlan_vid_no_group_or_svlan", + model_class=get_object_type_model("ipam.vlan"), + condition=Q(group__isnull=True, qinq_svlan__isnull=True), + ), + ], + "ipam.vlangroup": lambda: [ + ObjectMatchCriteria( + fields=("name",), + name="logical_vlan_group_name_no_scope", + model_class=get_object_type_model("ipam.vlangroup"), + condition=Q(scope_type__isnull=True), + ), + ], + "wireless.wirelesslan": lambda: [ + ObjectMatchCriteria( + fields=("ssid",), + name="logical_wireless_lan_ssid_no_group_or_vlan", + model_class=get_object_type_model("wireless.wirelesslan"), + condition=Q(group__isnull=True, vlan__isnull=True), + ), + ObjectMatchCriteria( + fields=("ssid", "group"), + name="logical_wireless_lan_ssid_in_group", + model_class=get_object_type_model("wireless.wirelesslan"), + condition=Q(group__isnull=False), + ), + ObjectMatchCriteria( + fields=("ssid", "vlan"), + name="logical_wireless_lan_ssid_in_vlan", + model_class=get_object_type_model("wireless.wirelesslan"), + condition=Q(vlan__isnull=False), + ), + ], + "virtualization.virtualmachine": lambda: [ + ObjectMatchCriteria( + fields=("name",), + name="logical_virtual_machine_name_no_cluster", + model_class=get_object_type_model("virtualization.virtualmachine"), + condition=Q(cluster__isnull=True), + ), + ], + "ipam.service": lambda: [ + ObjectMatchCriteria( + fields=("name",), + name="logical_service_name_no_device_or_vm", + model_class=get_object_type_model("ipam.service"), + condition=Q(device__isnull=True, virtual_machine__isnull=True), + ), + ObjectMatchCriteria( + fields=("name", "device"), + name="logical_service_name_on_device", + model_class=get_object_type_model("ipam.service"), + condition=Q(device__isnull=False), + ), + ObjectMatchCriteria( + fields=("name", "virtual_machine"), + name="logical_service_name_on_vm", + model_class=get_object_type_model("ipam.service"), + condition=Q(virtual_machine__isnull=False), + ), + ], + "dcim.modulebay": lambda: [ + ObjectMatchCriteria( + fields=("name", "device"), + name="logical_module_bay_name_on_device", + model_class=get_object_type_model("dcim.modulebay"), + ) + ], + "dcim.inventoryitem": lambda: [ + # TODO: this may be handleable by the existing constraints. + # we ignore it due to null values for parent but could have + # better coverage of this case perhaps. + ObjectMatchCriteria( + fields=("name", "device"), + name="logical_inventory_item_name_on_device_no_parent", + model_class=get_object_type_model("dcim.inventoryitem"), + condition=Q(parent__isnull=True), + ) + ], + "ipam.fhrpgroup": lambda: [ + ObjectMatchCriteria( + fields=("group_id",), + name="logical_fhrp_group_id", + model_class=get_object_type_model("ipam.fhrpgroup"), + ) + ], } @dataclass @@ -166,33 +297,45 @@ def fingerprint(self, data: dict) -> str|None: return hash((self.model_class.__name__, self.name, tuple(values))) def _check_condition(self, data) -> bool: - if self.condition is None: - return True - # TODO: handle evaluating complex conditions, - # there are only simple ones currently - if self.condition.connector != Q.AND: - logger.warning(f"Unhandled condition {self.condition}") - return False + return self._check_condition_1(data, self.condition) - if len(self.condition.children) != 1: - logger.warning(f"Unhandled condition {self.condition}") - return False + def _check_condition_1(self, data, condition) -> bool: + if condition is None: + return True + if _TRACE: logger.debug(f"checking condition {condition}") # noqa: E701 + if isinstance(condition, tuple): + return self._check_simple_condition(data, condition) + + if hasattr(condition, "connector") and condition.connector == Q.AND: + result = True + for child in condition.children: + if not self._check_condition_1(data, child): + result = False + break + if condition.negated: + if _TRACE: logger.debug(f"negated condition {condition} => {not result}") # noqa: E701 + return not result + return result + # TODO handle OR ? + logger.warning(f"Unhandled condition {condition}") + return False - if len(self.condition.children[0]) != 2: - logger.warning(f"Unhandled condition {self.condition}") - return False + def _check_simple_condition(self, data, condition) -> bool: + if condition is None: + return True - k, v = self.condition.children[0] + k, v = condition + if _TRACE: logger.debug(f"checking simple condition {k} => {v}") # noqa: E701 result = False if k.endswith("__isnull"): k = k[:-8] - result = k not in data or data[k] is None + is_null = k not in data or data[k] is None + if _TRACE: logger.debug(f"checking isnull {k}? ({is_null}) want {v}") # noqa: E701 + result = is_null == v else: + if _TRACE: logger.debug(f"checking equality {k} => {data.get(k)} == {v}") # noqa: E701 result = k in data and data[k] == v - if self.condition.negated: - result = not result - return result def build_queryset(self, data) -> models.QuerySet: @@ -203,21 +346,25 @@ def build_queryset(self, data) -> models.QuerySet: return self._build_expressions_queryset(data) raise ValueError("No fields or expressions to build queryset from") - def _build_fields_queryset(self, data) -> models.QuerySet: + def _build_fields_queryset(self, data) -> models.QuerySet: # noqa: C901 """Builds a queryset for a simple set-of-fields constraint.""" + if not self._check_condition(data): + if _TRACE: logger.debug(f" * cannot build fields queryset for {self.name} (condition not met)") # noqa: E701 + return None + data = self._prepare_data(data) lookup_kwargs = {} for field_name in self.fields: field = self.model_class._meta.get_field(field_name) if field_name not in data: - logger.debug(f" * cannot build fields queryset for {self.name} (missing field {field_name})") + if _TRACE: logger.debug(f" * cannot build fields queryset for {self.name} (missing field {field_name})") # noqa: E701 return None # cannot match, missing field data lookup_value = data.get(field_name) if isinstance(lookup_value, UnresolvedReference): - logger.debug(f" * cannot build fields queryset for {self.name} ({field_name} is unresolved reference)") + if _TRACE: logger.debug(f" * cannot build fields queryset for {self.name} ({field_name} is unresolved reference)") # noqa: E701 return None # cannot match, missing field data if isinstance(lookup_value, dict): - logger.debug(f" * cannot build fields queryset for {self.name} ({field_name} is dict)") + if _TRACE: logger.debug(f" * cannot build fields queryset for {self.name} ({field_name} is dict)") # noqa: E701 return None # cannot match, missing field data lookup_kwargs[field.name] = lookup_value @@ -242,10 +389,10 @@ def _build_expressions_queryset(self, data) -> models.QuerySet: refs = [F(ref) for ref in _get_refs(expr)] for ref in refs: if ref not in replacements: - logger.debug(f" * cannot build expr queryset for {self.name} (missing field {ref})") + if _TRACE: logger.debug(f" * cannot build expr queryset for {self.name} (missing field {ref})") # noqa: E701 return None # cannot match, missing field data if isinstance(replacements[ref], UnresolvedReference): - logger.debug(f" * cannot build expr queryset for {self.name} ({ref} is unresolved reference)") + if _TRACE: logger.debug(f" * cannot build expr queryset for {self.name} ({ref} is unresolved reference)") # noqa: E701 return None # cannot match, missing field data rhs = expr.replace_expressions(replacements) @@ -313,7 +460,7 @@ def has_required_fields(self, data: dict) -> bool: class GlobalIPNetworkIPMatcher: """A matcher that ignores the mask.""" - ip_field: str + ip_fields: tuple[str] vrf_field: str model_class: Type[models.Model] name: str @@ -330,19 +477,22 @@ def fingerprint(self, data: dict) -> str|None: if not self._check_condition(data): return None - value = self.ip_value(data) - if value is None: - return None + values = [] + for field in self.ip_fields: + value = self.ip_value(data, field) + if value is None: + return None + values.append(value) - return hash((self.model_class.__name__, self.name, value)) + return hash((self.model_class.__name__, self.name, tuple(values))) def has_required_fields(self, data: dict) -> bool: """Returns True if the data given contains a value for all fields referenced by the constraint.""" - return self.ip_field in data + return all(field in data for field in self.ip_fields) - def ip_value(self, data: dict) -> str|None: + def ip_value(self, data: dict, field: str) -> str|None: """Get the IP value from the data.""" - value = data.get(self.ip_field) + value = data.get(field) if value is None: return None return _ip_only(value) @@ -350,29 +500,37 @@ def ip_value(self, data: dict) -> str|None: def build_queryset(self, data: dict) -> models.QuerySet: """Build a queryset for the custom field.""" if not self.has_required_fields(data): + if _TRACE: logger.debug(f" * cannot build expr queryset for {self.name} (missing field {self.ip_field})") # noqa: E701 return None if not self._check_condition(data): + if _TRACE: logger.debug(f" * cannot build expr queryset for {self.name} (condition not met)") # noqa: E701 return None - value = self.ip_value(data) - if value is None: - return None + filter = { + f'{self.vrf_field}__isnull': True, + } + for field in self.ip_fields: + value = self.ip_value(data, field) + if value is None: + if _TRACE: logger.debug(f" * cannot build expr queryset for {self.name} (ip value is None)") # noqa: E701 + return None + filter[f'{field}__net_host'] = value - return self.model_class.objects.filter(**{f'{self.ip_field}__net_host': value, f'{self.vrf_field}__isnull': True}) + return self.model_class.objects.filter(**filter) @dataclass class VRFIPNetworkIPMatcher: """Matches ip in a vrf, ignores mask.""" - ip_field: str + ip_fields: tuple[str] vrf_field: str model_class: Type[models.Model] name: str def _check_condition(self, data: dict) -> bool: """Check the condition for the custom field.""" - return data.get('vrf_id', None) is not None + return data.get(self.vrf_field, None) is not None def fingerprint(self, data: dict) -> str|None: """Fingerprint the custom field value.""" @@ -382,21 +540,24 @@ def fingerprint(self, data: dict) -> str|None: if not self._check_condition(data): return None - value = self.ip_value(data) - if value is None: - return None + values = [] + for field in self.ip_fields: + value = self.ip_value(data, field) + if value is None: + return None + values.append(value) vrf_id = data[self.vrf_field] - return hash((self.model_class.__name__, self.name, value, vrf_id)) + return hash((self.model_class.__name__, self.name, tuple(values), vrf_id)) def has_required_fields(self, data: dict) -> bool: """Returns True if the data given contains a value for all fields referenced by the constraint.""" - return self.ip_field in data and self.vrf_field in data + return all(field in data for field in self.ip_fields) and self.vrf_field in data - def ip_value(self, data: dict) -> str|None: + def ip_value(self, data: dict, field: str) -> str|None: """Get the IP value from the data.""" - value = data.get(self.ip_field) + value = data.get(field) if value is None: return None return _ip_only(value) @@ -404,17 +565,28 @@ def ip_value(self, data: dict) -> str|None: def build_queryset(self, data: dict) -> models.QuerySet: """Build a queryset for the custom field.""" if not self.has_required_fields(data): + if _TRACE: logger.debug(f" * cannot build expr queryset for {self.name} (missing field {self.ip_field})") # noqa: E701 return None if not self._check_condition(data): + if _TRACE: logger.debug(f" * cannot build expr queryset for {self.name} (condition not met)") # noqa: E701 return None - value = self.ip_value(data) - if value is None: - return None + filter = {} + for field in self.ip_fields: + value = self.ip_value(data, field) + if value is None: + if _TRACE: logger.debug(f" * cannot build expr queryset for {self.name} (ip value is None)") # noqa: E701 + return None + filter[f'{field}__net_host'] = value vrf_id = data[self.vrf_field] - return self.model_class.objects.filter(**{f'{self.ip_field}__net_host': value, f'{self.vrf_field}': vrf_id}) + if isinstance(vrf_id, UnresolvedReference): + if _TRACE: logger.debug(f" * cannot build expr queryset for {self.name} ({self.vrf_field} is unresolved reference)") # noqa: E701 + return None + filter[f'{self.vrf_field}'] = vrf_id + + return self.model_class.objects.filter(**filter) def _ip_only(value: str) -> str|None: @@ -608,31 +780,32 @@ def _fingerprint_all(data: dict) -> str: return hash(tuple(values)) -def fingerprint(data: dict, object_type: str) -> str: +def fingerprints(data: dict, object_type: str) -> list[str]: """ - Fingerprint a data structure. + Get fingerprints for a data structure. - This uses the first matcher that has all - required fields or else uses all fields. - - TODO: This means there are pathological? cases where - the same object is being referenced but by - different unique constraints in the same diff... - this could lead to some unexpected behavior. + This returns all fingerprints for the given data that + have required fields. """ if data is None: return None model_class = get_object_type_model(object_type) # check any known match criteria + fps = [] for matcher in get_model_matchers(model_class): fp = matcher.fingerprint(data) if fp is not None: - return fp - # fall back to fingerprinting all the data - return _fingerprint_all(data) + fps.append(fp) + if _TRACE: logger.debug(f" ** matcher {matcher.name} gave fingerprint {fp}") # noqa: E701 + else: + if _TRACE: logger.debug(f" ** skipped matcher {matcher.name}") # noqa: E701 + fp = _fingerprint_all(data) + if _TRACE: logger.debug(f" ** matcher _fingerprint_all gave fingerprint {fp}") # noqa: E701 + fps.append(fp) + return fps -def find_existing_object(data: dict, object_type: str): +def find_existing_object(data: dict, object_type: str): # noqa: C901 """ Find an existing object that matches the given data. @@ -641,21 +814,21 @@ def find_existing_object(data: dict, object_type: str): Returns the object if found, otherwise None. """ - logger.debug(f"resolving {data}") + if _TRACE: logger.debug(f"resolving {data}") # noqa: E701 model_class = get_object_type_model(object_type) for matcher in get_model_matchers(model_class): if not matcher.has_required_fields(data): - logger.debug(f" * skipped matcher {matcher.name} (missing fields)") + if _TRACE: logger.debug(f" * skipped matcher {matcher.name} (missing fields)") # noqa: E701 continue q = matcher.build_queryset(data) if q is None: - logger.debug(f" * skipped matcher {matcher.name} (no queryset)") + if _TRACE: logger.debug(f" * skipped matcher {matcher.name} (no queryset)") # noqa: E701 continue - logger.debug(f" * trying query {q.query}") + if _TRACE: logger.debug(f" * trying query {q.query}") # noqa: E701 existing = q.order_by('pk').first() if existing is not None: - logger.debug(f" -> Found object {existing} via {matcher.name}") + if _TRACE: logger.debug(f" -> Found object {existing} via {matcher.name}") # noqa: E701 return existing - logger.debug(f" -> No object found for matcher {matcher.name}") - logger.debug(" * No matchers found an existing object") + if _TRACE: logger.debug(f" -> No object found for matcher {matcher.name}") # noqa: E701 + if _TRACE: logger.debug(" * No matchers found an existing object") # noqa: E701 return None diff --git a/netbox_diode_plugin/api/transformer.py b/netbox_diode_plugin/api/transformer.py index f95ec52..07f99fd 100644 --- a/netbox_diode_plugin/api/transformer.py +++ b/netbox_diode_plugin/api/transformer.py @@ -12,12 +12,12 @@ from uuid import uuid4 import graphlib -from django.core.exceptions import ValidationError from django.utils.text import slugify from extras.models.customfields import CustomField +from rest_framework import serializers -from .common import AutoSlug, ChangeSetException, UnresolvedReference -from .matcher import find_existing_object, fingerprint +from .common import _TRACE, NON_FIELD_ERRORS, AutoSlug, ChangeSetException, UnresolvedReference, harmonize_formats +from .matcher import find_existing_object, fingerprints from .plugin_utils import ( CUSTOM_FIELD_OBJECT_REFERENCE_TYPE, apply_format_transformations, @@ -50,6 +50,22 @@ def _camel_to_snake_case(name): "assigned_object_id": UnresolvedReference(object_type=object_type, uuid=uuid), }, }, + "virtualization.virtualmachine": { + "primary_ip4": lambda object_type, uuid: { + "__force_after": UnresolvedReference(object_type=object_type, uuid=uuid), + }, + "primary_ip6": lambda object_type, uuid: { + "__force_after": UnresolvedReference(object_type=object_type, uuid=uuid), + }, + }, + "dcim.virtualdevicecontext": { + "primary_ip4": lambda object_type, uuid: { + "__force_after": UnresolvedReference(object_type=object_type, uuid=uuid), + }, + "primary_ip6": lambda object_type, uuid: { + "__force_after": UnresolvedReference(object_type=object_type, uuid=uuid), + }, + }, } def _no_context(object_type, uuid): @@ -61,15 +77,17 @@ def _nested_context(object_type, uuid, field_name): _IS_CIRCULAR_REFERENCE = { "dcim.interface": frozenset(["primary_mac_address"]), "virtualization.vminterface": frozenset(["primary_mac_address"]), - "dcim.device": frozenset(["primary_ip4", "primary_ip6"]), + "dcim.device": frozenset(["primary_ip4", "primary_ip6", "oob_ip"]), "dcim.virtualdevicecontext": frozenset(["primary_ip4", "primary_ip6"]), "virtualization.virtualmachine": frozenset(["primary_ip4", "primary_ip6"]), + "circuits.provider": frozenset(["accounts"]), + "dcim.modulebay": frozenset(["module"]), # this isn't allowed to be circular, but gives a better error } def _is_circular_reference(object_type, field_name): return field_name in _IS_CIRCULAR_REFERENCE.get(object_type, frozenset()) -def transform_proto_json(proto_json: dict, object_type: str, supported_models: dict) -> list[dict]: +def transform_proto_json(proto_json: dict, object_type: str, supported_models: dict) -> list[dict]: # noqa: C901 """ Transform keys of proto json dict to flattened dictionaries with model field keys. @@ -77,24 +95,28 @@ def transform_proto_json(proto_json: dict, object_type: str, supported_models: d a certain form of deduplication and resolution of existing objects. """ entities = _transform_proto_json_1(proto_json, object_type) - logger.debug(f"_transform_proto_json_1 entities: {json.dumps(entities, default=lambda o: str(o), indent=4)}") + if _TRACE: logger.debug(f"_transform_proto_json_1 entities: {json.dumps(entities, default=lambda o: str(o), indent=4)}") # noqa: E701 entities = _topo_sort(entities) - logger.debug(f"_topo_sort: {json.dumps(entities, default=lambda o: str(o), indent=4)}") + if _TRACE: logger.debug(f"_topo_sort: {json.dumps(entities, default=lambda o: str(o), indent=4)}") # noqa: E701 deduplicated = _fingerprint_dedupe(entities) - logger.debug(f"_fingerprint_dedupe: {json.dumps(deduplicated, default=lambda o: str(o), indent=4)}") + if _TRACE: logger.debug(f"_fingerprint_dedupe: {json.dumps(deduplicated, default=lambda o: str(o), indent=4)}") # noqa: E701 deduplicated = _topo_sort(deduplicated) - logger.debug(f"_topo_sort: {json.dumps(deduplicated, default=lambda o: str(o), indent=4)}") + if _TRACE: logger.debug(f"_topo_sort: {json.dumps(deduplicated, default=lambda o: str(o), indent=4)}") # noqa: E701 _set_auto_slugs(deduplicated, supported_models) - logger.debug(f"_set_auto_slugs: {json.dumps(deduplicated, default=lambda o: str(o), indent=4)}") + if _TRACE: logger.debug(f"_set_auto_slugs: {json.dumps(deduplicated, default=lambda o: str(o), indent=4)}") # noqa: E701 + _handle_cached_scope(deduplicated, supported_models) + if _TRACE: logger.debug(f"_handle_cached_scope: {json.dumps(deduplicated, default=lambda o: str(o), indent=4)}") # noqa: E701 resolved = _resolve_existing_references(deduplicated) - logger.debug(f"_resolve_references: {json.dumps(resolved, default=lambda o: str(o), indent=4)}") - _set_defaults(resolved, supported_models) - logger.debug(f"_set_defaults: {json.dumps(resolved, default=lambda o: str(o), indent=4)}") + if _TRACE: logger.debug(f"_resolve_references: {json.dumps(resolved, default=lambda o: str(o), indent=4)}") # noqa: E701 + _strip_cached_scope(resolved) + if _TRACE: logger.debug(f"_strip_cached_scope: {json.dumps(resolved, default=lambda o: str(o), indent=4)}") # noqa: E701 + defaulted = _set_defaults(resolved, supported_models) + if _TRACE: logger.debug(f"_set_defaults: {json.dumps(defaulted, default=lambda o: str(o), indent=4)}") # noqa: E701 # handle post-create steps - output = _handle_post_creates(resolved) - logger.debug(f"_handle_post_creates: {json.dumps(output, default=lambda o: str(o), indent=4)}") + output = _handle_post_creates(defaulted) + if _TRACE: logger.debug(f"_handle_post_creates: {json.dumps(output, default=lambda o: str(o), indent=4)}") # noqa: E701 _check_unresolved_refs(output) for entity in output: @@ -117,7 +139,8 @@ def _transform_proto_json_1(proto_json: dict, object_type: str, context=None) -> # context pushed down from parent nodes if context is not None: for k, v in context.items(): - node[k] = v + if not k.startswith("_"): + node[k] = v if isinstance(v, UnresolvedReference): node['_refs'].add(v.uuid) @@ -221,31 +244,123 @@ def _topo_sort(entities: list[dict]) -> list[dict]: except graphlib.CycleError as e: # TODO the cycle error references the cycle here ... raise ChangeSetException(f"Circular reference in entities: {e}", errors={ - "__all__": { - "message": "Unable to resolve circular reference in entities", + NON_FIELD_ERRORS: { + NON_FIELD_ERRORS: "Unable to resolve circular reference in entities", } }) def _set_defaults(entities: list[dict], supported_models: dict): + out = [] for entity in entities: + entity = copy.deepcopy(entity) model_fields = supported_models.get(entity['_object_type']) if model_fields is None: - raise ValidationError(f"Model for object type {entity['_object_type']} is not supported") + raise serializers.ValidationError({ + NON_FIELD_ERRORS: [f"Model for object type {entity['_object_type']} is not supported"] + }) auto_slug = entity.pop("_auto_slug", None) if entity.get("_instance"): + out.append(entity) continue if auto_slug: if auto_slug.field_name not in entity: entity[auto_slug.field_name] = auto_slug.value + legal = legal_fields(entity['_object_type']) for field_name, field_info in model_fields.get('fields', {}).items(): + if field_name not in legal: + continue if entity.get(field_name) is None and field_info.get("default") is not None: - entity[field_name] = field_info["default"] + default = field_info["default"] + if callable(default): + default = default() + entity[field_name] = default set_custom_field_defaults(entity, model_fields['model']) + out.append(harmonize_formats(entity)) + return out +def _handle_cached_scope(entities: list[dict], supported_models: dict): + by_type_id = { + (entity['_object_type'], entity['_uuid']): entity + for entity in entities + } + for entity in entities: + model = supported_models.get(entity['_object_type'], {}).get("model") + if _has_cached_scope(model): + _handle_cached_scope_1(entity, by_type_id) + +def _strip_cached_scope(entities: list[dict]): + for entity in entities: + entity.pop("_region", None) + entity.pop("_site_group", None) + entity.pop("_site", None) + entity.pop("_location", None) + +@lru_cache(maxsize=256) +def _has_cached_scope(model): + return hasattr(model, "cache_related_objects") and hasattr(model, "scope") + +def _handle_cached_scope_1(entity: dict, by_type_id: dict): + # these are some auto-set fields that cache scope information, + # some indexes rely on them. Here we attempt to emulate that behavior + # for the purpose of matching. These generally only exist after save. + scope_type = entity.get("scope_type") + scope_id = entity.get("scope_id") + + if scope_type and scope_id: + scope = by_type_id.get((scope_type, scope_id.uuid)) + if scope_type == "dcim.region": + _cache_region_ref(entity, scope_id) + elif scope_type == "dcim.sitegroup": + _cache_site_group_ref(entity, scope_id) + elif scope_type == "dcim.site": + _cache_site_ref(entity, scope_id) + _cache_region_ref(entity, scope.get("region")) + _cache_site_group_ref(entity, scope.get("group")) + elif scope_type == "dcim.location": + _cache_location_ref(entity, scope_id) + site_ref = scope.get("site") + if site_ref is not None and isinstance(site_ref, UnresolvedReference): + _cache_site_ref(entity, site_ref) + site_obj = by_type_id.get((site_ref.object_type, site_ref.uuid)) + if site_obj is not None: + _cache_region_ref(entity, site_obj.get("region")) + _cache_site_group_ref(entity, site_obj.get("group")) + +def _cache_region_ref(entity: dict, ref: UnresolvedReference|None): + if ref is None: + return + entity["_region"] = UnresolvedReference( + object_type=ref.object_type, + uuid=ref.uuid, + ) + +def _cache_site_group_ref(entity: dict, ref: UnresolvedReference|None): + if ref is None: + return + entity["_site_group"] = UnresolvedReference( + object_type=ref.object_type, + uuid=ref.uuid, + ) + +def _cache_site_ref(entity: dict, ref: UnresolvedReference|None): + if ref is None: + return + entity["_site"] = UnresolvedReference( + object_type=ref.object_type, + uuid=ref.uuid, + ) + +def _cache_location_ref(entity: dict, ref: UnresolvedReference|None): + if ref is None: + return + entity["_location"] = UnresolvedReference( + object_type=ref.object_type, + uuid=ref.uuid, + ) def set_custom_field_defaults(entity: dict, model): """Set default values for custom fields in an entity.""" @@ -263,7 +378,9 @@ def _set_auto_slugs(entities: list[dict], supported_models: dict): for entity in entities: model_fields = supported_models.get(entity['_object_type']) if model_fields is None: - raise ValidationError(f"Model for object type {entity['_object_type']} is not supported") + raise serializers.ValidationError({ + NON_FIELD_ERRORS: [f"Model for object type {entity['_object_type']} is not supported"] + }) for field_name, field_info in model_fields.get('fields', {}).items(): if field_info["type"] == "SlugField" and entity.get(field_name) is None: @@ -280,38 +397,52 @@ def _generate_slug(object_type, data): return slugify(str(source_value)) return None -def _fingerprint_dedupe(entities: list[dict]) -> list[dict]: +def _fingerprint_dedupe(entities: list[dict]) -> list[dict]: # noqa: C901 """ Deduplicates/merges entities by fingerprint. *list must be in topo order by reference already* """ + by_uuid = {} by_fp = {} deduplicated = [] new_refs = {} # uuid -> uuid for entity in entities: + if _TRACE: logger.debug(f"fingerprint_dedupe: {entity}") # noqa: E701 if entity.get('_is_post_create'): fp = entity['_uuid'] - existing = None + existing_uuid = None else: - fp = fingerprint(entity, entity['_object_type']) - existing = by_fp.get(fp) - - if existing is None: - logger.debug(" * entity is new.") + _update_unresolved_refs(entity, new_refs) + fps = fingerprints(entity, entity['_object_type']) + if _TRACE: logger.debug(f" ==> {fps}") # noqa: E701 + for fp in fps: + existing_uuid = by_fp.get(fp) + if existing_uuid is not None: + break + + if existing_uuid is None: + if _TRACE: logger.debug(" * entity is new.") # noqa: E701 new_entity = copy.deepcopy(entity) _update_unresolved_refs(new_entity, new_refs) - by_fp[fp] = new_entity - deduplicated.append(fp) + primary_uuid = new_entity['_uuid'] + for fp in fps: + by_fp[fp] = primary_uuid + by_uuid[primary_uuid] = new_entity + deduplicated.append(primary_uuid) else: - logger.debug(" * entity already exists.") + if _TRACE: logger.debug(" * entity already exists.") # noqa: E701 + existing = by_uuid[existing_uuid] new_refs[entity['_uuid']] = existing['_uuid'] merged = _merge_nodes(existing, entity) _update_unresolved_refs(merged, new_refs) - by_fp[fp] = merged + for fp in fps: + by_fp[fp] = existing_uuid + by_uuid[existing_uuid] = merged + deduplicated.append(existing_uuid) - return [by_fp[fp] for fp in deduplicated] + return [by_uuid[u] for u in deduplicated] def _merge_nodes(a: dict, b: dict) -> dict: """ @@ -328,7 +459,15 @@ def _merge_nodes(a: dict, b: dict) -> dict: if k.startswith("_"): continue if k in merged and merged[k] != v: - raise ValueError(f"Conflict merging {a} and {b} on {k}: {merged[k]} and {v}") + ov = { + ok: v for ok, v in a.items() + if ok != k and not ok.startswith("_") + } + raise serializers.ValidationError({ + NON_FIELD_ERRORS: [ + f"Conflicting values for '{k}' merging duplicate {a.get('_object_type')}," + f" `{merged[k]}` != `{v}` other values : {ov}"] + }) merged[k] = v return merged @@ -370,7 +509,7 @@ def _resolve_existing_references(entities: list[dict]) -> list[dict]: existing = find_existing_object(data, object_type) if existing is not None: - logger.debug(f"existing {data} -> {existing}") + if _TRACE: logger.debug(f"existing {data} -> {existing}") # noqa: E701 fp = (object_type, existing.id) if fp in seen: logger.warning(f"objects resolved to the same existing id after deduplication: {seen[fp]} and {data}") @@ -397,7 +536,7 @@ def _update_resolved_refs(data, new_refs): new_items.append(new_refs[item.uuid]) else: new_items.append(item) - data[k] = new_items + data[k] = sorted(new_items) elif isinstance(v, dict): _update_resolved_refs(v, new_refs) @@ -513,7 +652,9 @@ def _prepare_custom_fields(object_type: str, custom_fields: dict) -> tuple[dict, )) out[key] = vals else: - raise ValueError(f"Custom field {keyname} has unknown type: {value_type}") + raise serializers.ValidationError({ + keyname: [f"Custom field {keyname} has unknown type: {value_type}"] + }) except ValueError as e: raise ChangeSetException( f"Custom field {keyname} is invalid: {value}", diff --git a/netbox_diode_plugin/tests/test_api_diff_and_apply.py b/netbox_diode_plugin/tests/test_api_diff_and_apply.py index f53b1b1..651bfe4 100644 --- a/netbox_diode_plugin/tests/test_api_diff_and_apply.py +++ b/netbox_diode_plugin/tests/test_api_diff_and_apply.py @@ -9,9 +9,9 @@ from uuid import uuid4 import netaddr -from circuits.models import Circuit +from circuits.models import Circuit, Provider from core.models import ObjectType -from dcim.models import Device, Interface, Site +from dcim.models import Device, Interface, ModuleBay, Site from django.contrib.auth import get_user_model from extras.models import CustomField from extras.models.customfields import CustomFieldTypeChoices @@ -19,12 +19,14 @@ from rest_framework import status from users.models import Token from utilities.testing import APITestCase -from virtualization.models import VMInterface +from virtualization.models import Cluster, VMInterface logger = logging.getLogger(__name__) User = get_user_model() +def _get_error(response, object_name, field): + return response.json().get("errors", {}).get(object_name, {}).get(field, []) class GenerateDiffAndApplyTestCase(APITestCase): """GenerateDiff -> ApplyChangeSet test cases.""" @@ -416,6 +418,92 @@ def test_generate_diff_and_apply_create_device_with_primary_ip4(self): device = Device.objects.get(name=f"Device {device_uuid}") self.assertEqual(device.primary_ip4.pk, new_ipaddress.pk) + def test_generate_diff_and_apply_create_device_with_primary_ip6(self): + """Test generate diff and apply create device with primary ip6.""" + device_uuid = str(uuid4()) + interface_uuid = str(uuid4()) + addr = "2001:db8::1" + payload = { + "timestamp": 1, + "object_type": "ipam.ipaddress", + "entity": { + "ip_address": { + "address": addr, + "assigned_object_interface": { + "name": f"Interface {interface_uuid}", + "type": "1000base-t", + "device": { + "name": f"Device {device_uuid}", + "role": { + "name": f"Role {uuid4()}", + }, + "site": { + "name": f"Site {uuid4()}", + }, + "device_type": { + "manufacturer": { + "name": f"Manufacturer {uuid4()}", + }, + "model": f"Device Type {uuid4()}", + }, + "primary_ip6": { + "address": addr, + }, + }, + }, + }, + }, + } + + _, response = self.diff_and_apply(payload) + new_ipaddress = IPAddress.objects.get(address=addr) + self.assertEqual(new_ipaddress.assigned_object.name, f"Interface {interface_uuid}") + device = Device.objects.get(name=f"Device {device_uuid}") + self.assertEqual(device.primary_ip6.pk, new_ipaddress.pk) + + def test_generate_diff_and_apply_create_device_with_oob_ip(self): + """Test generate diff and apply create device with oob ip.""" + device_uuid = str(uuid4()) + interface_uuid = str(uuid4()) + addr = "192.168.1.1/24" + payload = { + "timestamp": 1, + "object_type": "ipam.ipaddress", + "entity": { + "ip_address": { + "address": addr, + "assigned_object_interface": { + "name": f"Interface {interface_uuid}", + "type": "1000base-t", + "device": { + "name": f"Device {device_uuid}", + "role": { + "name": f"Role {uuid4()}", + }, + "site": { + "name": f"Site {uuid4()}", + }, + "device_type": { + "manufacturer": { + "name": f"Manufacturer {uuid4()}", + }, + "model": f"Device Type {uuid4()}", + }, + "oob_ip": { + "address": addr, + }, + }, + }, + }, + }, + } + + _, response = self.diff_and_apply(payload) + new_ipaddress = IPAddress.objects.get(address=addr) + self.assertEqual(new_ipaddress.assigned_object.name, f"Interface {interface_uuid}") + device = Device.objects.get(name=f"Device {device_uuid}") + self.assertEqual(device.oob_ip.pk, new_ipaddress.pk) + def test_generate_diff_and_apply_create_and_update_site_with_custom_field(self): """Test generate diff and apply create and update site with custom field.""" site_uuid = str(uuid4()) @@ -913,6 +1001,352 @@ def test_generate_diff_and_apply_complex_vminterface(self): self.assertEqual(vm_interface.mtu, 2000) self.assertEqual(vm_interface.primary_mac_address.mac_address, "00:00:00:00:00:01") + def test_generate_diff_and_apply_dedupe_devicetype(self): + """Test generate diff and apply dedupe devicetype in wireless link.""" + payload = { + "timestamp": "2025-04-16T02:58:20.564615Z", + "object_type": "wireless.wirelesslink", + "entity": { + "wireless_link": { + "interface_a": { + "device": { + "name": "Device 1", + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "role": {"name": "Device Role 1"}, + "site": {"name": "Site 1"} + }, + "name": "Radio0/1", + "type": "ieee802.11ac", + "enabled": True + }, + "interface_b": { + "device": { + "name": "Device 2", + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "role": {"name": "Device Role 1"}, + "site": {"name": "Site 1"} + }, + "name": "Radio0/1", + "type": "ieee802.11ac", + "enabled": True + }, + "ssid": "P2P-Link-1", + "status": "connected", + "tenant": {"name": "Tenant 1"}, + "auth_type": "wpa-personal", + "auth_cipher": "aes", + "auth_psk": "P2PLinkKey123!", + "distance": 1.5, + "distance_unit": "km", + "description": "Point-to-point wireless backhaul link", + "comments": "Building A to Building B wireless bridge", + "tags": [ + { + "name": "Tag 1" + }, + { + "name": "Tag 2" + } + ] + } + } + } + + _ = self.diff_and_apply(payload) + + def test_generate_diff_and_apply_provider_with_accounts(self): + """Test generate diff and apply provider with accounts.""" + payload = { + "timestamp": "2025-04-16T02:58:20.564615Z", + "object_type": "circuits.provider", + "entity": { + "provider": { + "name": "Level 3 Communications", + "slug": "level3", + "description": "Global Tier 1 Internet Service Provider", + "comments": "Primary transit provider for data center connectivity", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}], + "accounts": [ + { + "provider": {"name": "Level 3 Communications"}, + "name": "East Coast Account", + "account": "L3-12345", + "description": "East Coast regional services account", + "comments": "Managed through regional NOC" + }, + { + "provider": {"name": "Level 3 Communications"}, + "name": "West Coast Account", + "account": "L3-67890", + "description": "West Coast regional services account", + "comments": "Managed through regional NOC" + } + ], + "asns": [ + { + "asn": "3356", + "rir": {"name": "ARIN"}, + "tenant": {"name": "Tenant 1"}, + "description": "Level 3 Global ASN", + "comments": "Primary transit ASN" + } + ] + } + } + } + + _ = self.diff_and_apply(payload) + provider = Provider.objects.get(name="Level 3 Communications") + self.assertEqual(provider.accounts.count(), 2) + self.assertEqual(provider.asns.count(), 1) + + def test_generate_diff_and_apply_module_bay_with_module(self): + """Test generate diff and apply module bay with module.""" + payload = { + "timestamp": "2025-04-16T02:58:20.564615Z", + "object_type": "dcim.modulebay", + "entity": { + "module_bay": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "name": "Stack Module Bay 2", + "module": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "module_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S-STACK" + }, + "module_bay": { + "name": "Module Bay 1", + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + } + } + + }, + "label": "STACK-2", + "position": "Rear", + "description": "Secondary stacking module bay", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + } + } + _ = self.diff_and_apply(payload) + module_bay = ModuleBay.objects.get(name="Stack Module Bay 2") + self.assertEqual(module_bay.module.device.name, "Device 1") + self.assertEqual(module_bay.module.module_type.manufacturer.name, "Cisco") + self.assertEqual(module_bay.module.module_type.model, "C2960S-STACK") + self.assertEqual(module_bay.module.module_bay.name, "Module Bay 1") + + def test_generate_diff_and_apply_module_bay_circular_ref_fails(self): + """Test generate diff and apply module bay.""" + payload = { + "timestamp": "2025-04-16T02:58:20.564615Z", + "object_type": "dcim.modulebay", + "entity": { + "module_bay": { + "name": "Module Bay 1", + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "module": { + "asset_tag": "1234567890", + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "module_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S-STACK" + }, + "module_bay": { + "name": "Module Bay 1", + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "module": { + "asset_tag": "1234567890", + } + } + }, + "label": "STACK-2", + "position": "Rear", + "description": "Secondary stacking module bay", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + } + } + response1 = self.client.post( + self.diff_url, data=payload, format="json", **self.user_header + ) + self.assertEqual(response1.status_code, status.HTTP_200_OK) + diff = response1.json().get("change_set", {}) + + response2 = self.client.post( + self.apply_url, data=diff, format="json", **self.user_header + ) + self.assertEqual(response2.status_code, status.HTTP_400_BAD_REQUEST) + + self.assertIn( + "A module bay cannot belong to a module installed within it.", + _get_error(response2, "dcim.modulebay", "__all__") + ) + + def test_generate_diff_and_apply_virtual_machine_with_primary_ip_4_ok(self): + """Test generate diff and apply virtual machine with primary ip 4 assigned.""" + payload = { + "timestamp": "2025-04-16T02:58:20.564615Z", + "object_type": "virtualization.virtualmachine", + "entity": { + "timestamp": "2025-04-16T13:45:02.045208Z", + "virtual_machine": { + "name": "app-server-01", + "status": "active", + "site": {"name": "Site 1"}, + "cluster": { + "name": "Cluster 1", + "type": {"name": "Cluster Type 1"} + }, + "device": { + "name": "Device 1", + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "role": {"name": "Device Role 1"}, + "site": {"name": "Site 1"}, + "cluster": { + "name": "Cluster 1", + "type": {"name": "Cluster Type 1"} + } + }, + "serial": "VM-2023-001", + "role": {"name": "Application Server"}, + "tenant": {"name": "Tenant 1"}, + "platform": {"name": "Ubuntu 22.04"}, + "primary_ip4": { + "address": "192.168.2.10", + "assigned_object_vm_interface": { + "virtual_machine": { + "name": "app-server-01", + "cluster": { + "name": "Cluster 1", + "type": {"name": "Cluster Type 1"} + }, + "tenant": {"name": "Tenant 1"}, + }, + "name": "eth0", + "enabled": True, + "mtu": "1500", + } + }, + "vcpus": 4.0, + "memory": "214748364", + "disk": "147483647", + "description": "Primary application server instance", + "comments": "Hosts critical business applications", + "tags": [ + { + "name": "Tag 1" + }, + { + "name": "Tag 2" + } + ] + } + } + } + _ = self.diff_and_apply(payload) + + def test_generate_diff_and_apply_update_cluster_location(self): + """Test generate diff and apply update cluster location, same site.""" + payload = { + "timestamp": "2025-04-16T02:58:20.564615Z", + "object_type": "virtualization.cluster", + "entity": { + "cluster": { + "name": "Cluster A", + "type": {"name": "Cluster Type 1"}, + "group": {"name": "Cluster Group 1"}, + "status": "active", + "tenant": {"name": "Tenant 1"}, + "scope_site": {"name": "Site 1"}, + "description": "Cluster 1 Description", + "comments": "Cluster 1 Comments", + "tags": [{"name": "Tag 1"}] + } + }, + } + _ = self.diff_and_apply(payload) + + cluster = Cluster.objects.get(name="Cluster A") + self.assertEqual(cluster.scope.name, "Site 1") + + payload = { + "timestamp": "2025-04-16T02:58:20.564615Z", + "object_type": "virtualization.cluster", + "entity": { + "cluster": { + "name": "Cluster A", + "type": {"name": "Cluster Type 1"}, + "group": {"name": "Cluster Group 1"}, + "status": "active", + "tenant": {"name": "Tenant 1"}, + "scope_location": {"name": "Location 1", "site": {"name": "Site 1"}}, + "description": "Cluster 1 Description", + "comments": "Cluster 1 Comments", + "tags": [{"name": "Tag 1"}] + } + }, + } + _ = self.diff_and_apply(payload) + cluster = Cluster.objects.get(name="Cluster A") + self.assertEqual(cluster.scope.name, "Location 1") + def diff_and_apply(self, payload): """Diff and apply the payload.""" response1 = self.client.post( diff --git a/netbox_diode_plugin/tests/test_api_generate_diff.py b/netbox_diode_plugin/tests/test_api_generate_diff.py index 0c84fd9..d55ec35 100644 --- a/netbox_diode_plugin/tests/test_api_generate_diff.py +++ b/netbox_diode_plugin/tests/test_api_generate_diff.py @@ -2,6 +2,7 @@ # Copyright 2024 NetBox Labs Inc """Diode NetBox Plugin - Tests.""" +import logging from uuid import uuid4 from core.models import ObjectType @@ -15,6 +16,12 @@ User = get_user_model() +logger = logging.getLogger(__name__) + +def _get_error(response, object_name, field): + return response.json().get("errors", {}).get(object_name, {}).get(field, []) + + class GenerateDiffTestCase(APITestCase): """GenerateDiff test cases.""" @@ -279,6 +286,83 @@ def test_generate_diff_update_rack_type_camel_case(self): before = change.get("before", {}) self.assertEqual(before.get("model"), "Rack Type 1") + def test_merge_states_failed(self): + """Test merge states failed.""" + payload = { + "timestamp": 1, + "object_type": "ipam.vrf", + "entity": { + "vrf": { + "name": "Customer-A-VRF", + "rd": "65000:100", + "tenant": {"name": "Tenant 1"}, + "enforce_unique": True, + "description": "Isolated routing domain for Customer A", + "comments": "Used for customer's private network services", + "tags": [ + { + "name": "Tag 1" + }, + { + "name": "Tag 2" + } + ], + "import_targets": [ + { + "name": "65000:100", + "description": "Primary import route target" + }, + { + "name": "65000:101", + "description": "Backup import route target" + } + ], + "export_targets": [ + { + "name": "65000:100", + "description": "Primary export route target" + } + ] + } + } + } + + response = self.send_request(payload, status.HTTP_400_BAD_REQUEST) + logger.error(response.json()) + errs = _get_error(response, "ipam.vrf", "__all__") + self.assertEqual(len(errs), 1) + err = errs[0] + self.assertTrue(err.startswith("Conflicting values for 'description' merging duplicate ipam.routetarget")) + + def test_vlangroup_error(self): + """Test vlangroup error.""" + payload = { + "timestamp": 1, + "object_type": "ipam.vlangroup", + "entity": { + "vlan_group": { + "name": "Data Center Core", + "slug": "dc-core", + "scope_site": { + "name": "Data Center West", + "slug": "dc-west", + "status": "active" + }, + "description": "Core network VLANs for data center infrastructure", + "tags": [ + { + "name": "Tag 1" + }, + { + "name": "Tag 2" + } + ] + } + } + } + _ = self.send_request(payload) + + def send_request(self, payload, status_code=status.HTTP_200_OK): """Post the payload to the url and return the response.""" response = self.client.post( diff --git a/netbox_diode_plugin/tests/test_updates.py b/netbox_diode_plugin/tests/test_updates.py new file mode 100644 index 0000000..d2c9fc5 --- /dev/null +++ b/netbox_diode_plugin/tests/test_updates.py @@ -0,0 +1,177 @@ +#!/usr/bin/env python +# Copyright 2024 NetBox Labs Inc +"""Diode NetBox Plugin - Tests.""" + +import copy +import datetime +import decimal +import inspect +import json +import logging +import os +from functools import wraps + +from django.contrib.auth import get_user_model +from django.db import models +from django.db.models import QuerySet +from rest_framework import status +from users.models import Token +from utilities.testing import APITestCase + +from netbox_diode_plugin.api.common import harmonize_formats +from netbox_diode_plugin.api.plugin_utils import get_object_type_model + +logger = logging.getLogger(__name__) + +User = get_user_model() + +def _get_error(response, object_name, field): + return response.json().get("errors", {}).get(object_name, {}).get(field, []) + +def load_test_cases(cls): + """Class decorator to load test cases and create test methods.""" + logger.error("**** Loading test cases") + current_dir = os.path.dirname(os.path.abspath(__file__)) + test_data_path = os.path.join(current_dir, "test_updates_cases.json") + logger.error(f"**** Looking for test data at {test_data_path}") + + if not os.path.exists(test_data_path): + logger.error(f"**** Test data file not found at {test_data_path}") + raise FileNotFoundError(f"Test data file not found at {test_data_path}") + + def _create_and_update_test_case(case): + object_type = case["object_type"] + + def test_func(self): + model = get_object_type_model(object_type) + + payload = { + "timestamp": 1, + "object_type": object_type, + "entity": case["create"], + } + res = self.send_request(self.diff_url, payload) + self.assertEqual(res.status_code, status.HTTP_200_OK) + diff = res.json().get("change_set", {}) + res = self.client.post( + self.apply_url, data=diff, format="json", **self.user_header + ) + self.assertEqual(res.status_code, status.HTTP_200_OK) + # lookup the object and check fields + obj = model.objects.get(**case["lookup"]) + self._check_expect(obj, case["create_expect"]) + + # resending the same payload should not change anything + payload = { + "timestamp": 2, + "object_type": object_type, + "entity": case["create"], + } + res = self.send_request(self.diff_url, payload) + self.assertEqual(res.status_code, status.HTTP_200_OK) + + change_set = res.json().get("change_set", {}) + if change_set.get("changes", []) != []: + logger.error(f"Unexpected change set {json.dumps(change_set, indent=4)}") + + self.assertEqual(res.json().get("change_set", {}).get("changes", []), []) + + # updating the object + payload = { + "timestamp": 3, + "object_type": object_type, + "entity": case["update"], + } + res = self.send_request(self.diff_url, payload) + self.assertEqual(res.status_code, status.HTTP_200_OK) + + diff = res.json().get("change_set", {}) + res = self.client.post( + self.apply_url, data=diff, format="json", **self.user_header + ) + self.assertEqual(res.status_code, status.HTTP_200_OK) + obj = model.objects.get(**case["lookup"]) + self._check_expect(obj, case["update_expect"]) + + test_func.__name__ = f"test_updates_{case['name']}" + return test_func + + with open(test_data_path) as f: + test_cases = json.load(f) + for case in test_cases: + t = _create_and_update_test_case(case) + logger.error(f"**** Creating test case {t.__name__}") + setattr(cls, t.__name__, t) + + return cls + +@load_test_cases +class ApplyUpdatesTestCase(APITestCase): + """diff/create/update test cases.""" + + @classmethod + def setUpClass(cls): + """Set up the test cases.""" + super().setUpClass() + + def setUp(self): + """Set up the test case.""" + self.diff_url = "/netbox/api/plugins/diode/generate-diff/" + self.apply_url = "/netbox/api/plugins/diode/apply-change-set/" + self.user = User.objects.create_user(username="testcommonuser") + self.user_token = Token.objects.create(user=self.user) + self.user_header = {"HTTP_AUTHORIZATION": f"Token {self.user_token.key}"} + + self.add_permissions("netbox_diode_plugin.add_diode") + + def _follow_path(self, obj, path): + cur = obj + for i, p in enumerate(path): + if p.isdigit(): + p = int(p) + cur = cur[p] + else: + cur = getattr(cur, p) + if i != len(path) - 1: + self.assertIsNotNone(cur) + if callable(cur): + try: + signature = inspect.signature(cur) + if len(signature.parameters) == 0: + cur = cur() + except ValueError: + pass + return harmonize_formats(cur) + + def _check_set_by(self, obj, path, value): + key = path[-1][len("__by_"):] + path = path[:-1] + cur = self._follow_path(obj, path) + + if isinstance(value, (list, tuple)): + vals = set(value) + else: + vals = {value} + + cvals = {harmonize_formats(getattr(c, key)) for c in cur} + self.assertEqual(cvals, vals) + + def _check_equals(self, obj, path, value): + cur = self._follow_path(obj, path) + self.assertEqual(cur, value) + + def _check_expect(self, obj, expect): + for field, value in expect.items(): + path = field.strip().split(".") + if path[-1].startswith("__by_"): + self._check_set_by(obj, path, value) + else: + self._check_equals(obj, path, value) + + def send_request(self, url, payload, status_code=status.HTTP_200_OK): + """Post the payload to the url and return the response.""" + response = self.client.post( + url, data=payload, format="json", **self.user_header + ) + self.assertEqual(response.status_code, status_code) + return response diff --git a/netbox_diode_plugin/tests/test_updates_cases.json b/netbox_diode_plugin/tests/test_updates_cases.json new file mode 100644 index 0000000..8d94c65 --- /dev/null +++ b/netbox_diode_plugin/tests/test_updates_cases.json @@ -0,0 +1,5902 @@ +[ + { + "name": "ipam_asn_1", + "object_type": "ipam.asn", + "lookup": {"asn": 555}, + "create_expect": { + "asn": 555, + "description": "ASN 555 Description", + "rir.name": "RIR 1" + }, + "create": { + "asn": { + "asn": "555", + "rir": {"name": "RIR 1"}, + "tenant": {"name": "Tenant 1"}, + "description": "ASN 555 Description", + "comments": "ASN 555 Comments", + "tags": [{"name": "Tag 1"}] + } + }, + "update": { + "asn": { + "asn": "555", + "rir": {"name": "RIR 1"}, + "tenant": {"name": "Tenant 1"}, + "description": "ASN 555 Description Updated", + "comments": "ASN 555 Comments", + "tags": [{"name": "Tag 1"}] + } + }, + "update_expect": { + "description": "ASN 555 Description Updated" + } + }, + { + "name": "ipam_asnrange_1", + "object_type": "ipam.asnrange", + "lookup": {"name": "ASN Range 1"}, + "create_expect": { + "name": "ASN Range 1", + "start": 1, + "end": 2, + "rir.name": "RIR 1" + }, + "create": { + "asn_range": { + "name": "ASN Range 1", + "slug": "asn-range-1", + "rir": {"name": "RIR 1"}, + "start": "1", + "end": "2", + "tenant": {"name": "Tenant 1"}, + "description": "ASN Range 1 Description", + "tags": [{"name": "Tag 1"}] + } + }, + "update": { + "asn_range": { + "name": "ASN Range 1", + "slug": "asn-range-1", + "rir": {"name": "RIR 1"}, + "start": "1", + "end": "2", + "tenant": {"name": "Tenant 1"}, + "description": "ASN Range 1 Description Updated", + "tags": [{"name": "Tag 1"}] + } + }, + "update_expect": { + "description": "ASN Range 1 Description Updated" + } + }, + { + "name": "ipam_aggregate_1", + "object_type": "ipam.aggregate", + "lookup": {"prefix": "182.82.82.0/24"}, + "create_expect": { + "prefix": "182.82.82.0/24", + "rir.name": "RIR 1", + "description": "Aggregate Description" + }, + "create": { + "aggregate": { + "prefix": "182.82.82.0/24", + "rir": {"name": "RIR 1"}, + "tenant": {"name": "Tenant 1"}, + "date_added": "2025-04-14T08:08:55Z", + "description": "Aggregate Description", + "comments": "Aggregate Comments", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "aggregate": { + "prefix": "182.82.82.0/24", + "rir": {"name": "RIR 1"}, + "tenant": {"name": "Tenant 1"}, + "date_added": "2025-04-14T08:08:55Z", + "description": "Aggregate Description Updated", + "comments": "Aggregate Comments", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "Aggregate Description Updated" + } + }, + { + "name": "circuits_circuit_1", + "object_type": "circuits.circuit", + "lookup": {"cid": "Circuit 1"}, + "create_expect": { + "cid": "Circuit 1", + "provider.name": "Provider 1", + "type.name": "Circuit Type 1", + "description": "Circuit 1 Description" + }, + "create": { + "circuit": { + "cid": "Circuit 1", + "provider": {"name": "Provider 1"}, + "provider_account": { + "provider": {"name": "Provider 1"}, + "account": "account1" + }, + "type": {"name": "Circuit Type 1"}, + "status": "offline", + "tenant": {"name": "Tenant 1"}, + "install_date": "2025-04-14T00:00:00Z", + "termination_date": "2025-04-14T00:00:00Z", + "commit_rate": "10", + "description": "Circuit 1 Description", + "distance": 12.4, + "distance_unit": "ft", + "comments": "Circuit 1 Comments", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "circuit": { + "cid": "Circuit 1", + "provider": {"name": "Provider 1"}, + "provider_account": { + "provider": {"name": "Provider 1"}, + "account": "account1" + }, + "type": {"name": "Circuit Type 1"}, + "status": "offline", + "tenant": {"name": "Tenant 1"}, + "install_date": "2025-04-14T00:00:00Z", + "termination_date": "2025-04-14T00:00:00Z", + "commit_rate": "10", + "description": "Circuit 1 Description Updated", + "distance": 12.4, + "distance_unit": "ft", + "comments": "Circuit 1 Comments", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "Circuit 1 Description Updated" + } + }, + { + "name": "circuits_circuitgroup_1", + "object_type": "circuits.circuitgroup", + "lookup": {"name": "Circuit Group 1"}, + "create_expect": { + "name": "Circuit Group 1", + "description": "Circuit Group 1 Description" + }, + "create": { + "circuit_group": { + "name": "Circuit Group 1", + "description": "Circuit Group 1 Description", + "tenant": {"name": "Tenant 1"}, + "tags": [{"name": "Tag 1"}] + } + }, + "update": { + "circuit_group": { + "name": "Circuit Group 1", + "description": "Circuit Group 1 Description Updated", + "tenant": {"name": "Tenant 1"}, + "tags": [{"name": "Tag 1"}] + } + }, + "update_expect": { + "description": "Circuit Group 1 Description Updated" + } + }, + { + "name": "circuits_circuitgroupassignment_1", + "object_type": "circuits.circuitgroupassignment", + "lookup": { + "group__name": "Circuit Group 1" + }, + "create_expect": { + "group.name": "Circuit Group 1", + "member.cid": "Circuit 1", + "priority": "tertiary" + }, + "create": { + "circuit_group_assignment": { + "group": {"name": "Circuit Group 1"}, + "member_circuit": { + "cid": "Circuit 1", + "type": {"name": "Circuit Type 1"}, + "provider": {"name": "Provider 1"} + }, + "priority": "tertiary" + } + }, + "update": { + "circuit_group_assignment": { + "group": {"name": "Circuit Group 1"}, + "member_circuit": { + "cid": "Circuit 1", + "type": {"name": "Circuit Type 1"}, + "provider": {"name": "Provider 1"} + }, + "priority": "secondary" + } + }, + "update_expect": { + "priority": "secondary" + } + }, + { + "name": "circuits_circuitgroupassignment_2", + "object_type": "circuits.circuitgroupassignment", + "lookup": { + "group__name": "Circuit Group 1" + }, + "create_expect": { + "group.name": "Circuit Group 1", + "member.cid": "Virtual Circuit 1", + "priority": "tertiary" + }, + "create": { + "circuit_group_assignment": { + "group": {"name": "Circuit Group 1"}, + "member_virtual_circuit": { + "cid": "Virtual Circuit 1", + "type": {"name": "Virtual Circuit Type 1"}, + "provider_network": { + "name": "Provider Network 1", + "provider": {"name": "Provider 1"} + } + }, + "priority": "tertiary" + } + }, + "update": { + "circuit_group_assignment": { + "group": {"name": "Circuit Group 1"}, + "member_virtual_circuit": { + "cid": "Virtual Circuit 1", + "type": {"name": "Virtual Circuit Type 1"}, + "provider_network": { + "name": "Provider Network 1", + "provider": {"name": "Provider 1"} + } + }, + "priority": "secondary" + } + }, + "update_expect": { + "priority": "secondary" + } + }, + { + "name": "circuits_circuittermination_1", + "object_type": "circuits.circuittermination", + "lookup": { + "circuit__cid": "Circuit 1", + "term_side": "A" + }, + "create_expect": { + "circuit.cid": "Circuit 1", + "term_side": "A", + "port_speed": 9600, + "description": "description" + }, + "create": { + "circuit_termination": { + "circuit": { + "cid": "Circuit 1", + "type": {"name": "Circuit Type 1"}, + "provider": {"name": "Provider 1"} + }, + "term_side": "A", + "termination_location": { + "name": "attic", + "site": {"name": "Site 1"} + }, + "port_speed": "9600", + "upstream_speed": "14400", + "xconnect_id": "xconnect.1", + "pp_info": "pp info", + "description": "description", + "mark_connected": true, + "tags": [{"name": "Tag 1"}] + } + }, + "update": { + "circuit_termination": { + "circuit": { + "cid": "Circuit 1", + "type": {"name": "Circuit Type 1"}, + "provider": {"name": "Provider 1"} + }, + "term_side": "A", + "termination_location": { + "name": "attic", + "site": {"name": "Site 1"} + }, + "port_speed": "9600", + "upstream_speed": "14400", + "xconnect_id": "xconnect.1", + "pp_info": "pp info", + "description": "description Updated", + "mark_connected": true, + "tags": [{"name": "Tag 1"}] + } + }, + "update_expect": { + "description": "description Updated" + } + }, + { + "name": "circuits_circuittermination_2", + "object_type": "circuits.circuittermination", + "lookup": { + "circuit__cid": "Circuit 1", + "term_side": "A" + }, + "create_expect": { + "circuit.cid": "Circuit 1", + "term_side": "A", + "port_speed": 9600, + "description": "description" + }, + "create": { + "circuit_termination": { + "circuit": { + "cid": "Circuit 1", + "type": {"name": "Circuit Type 1"}, + "provider": {"name": "Provider 1"} + }, + "term_side": "A", + "termination_provider_network": { + "provider": {"name": "Provider 1"}, + "name": "Provider Network 1", + "service_id": "service.1" + }, + "port_speed": "9600", + "upstream_speed": "14400", + "xconnect_id": "xconnect.1", + "pp_info": "pp info", + "description": "description", + "mark_connected": true, + "tags": [{"name": "Tag 1"}] + } + }, + "update": { + "circuit_termination": { + "circuit": { + "cid": "Circuit 1", + "type": {"name": "Circuit Type 1"}, + "provider": {"name": "Provider 1"} + }, + "term_side": "A", + "termination_provider_network": { + "provider": {"name": "Provider 1"}, + "name": "Provider Network 1", + "service_id": "service.1" + }, + "port_speed": "9600", + "upstream_speed": "14400", + "xconnect_id": "xconnect.1", + "pp_info": "pp info", + "description": "description Updated", + "mark_connected": true, + "tags": [{"name": "Tag 1"}] + } + }, + "update_expect": { + "description": "description Updated" + } + }, + { + "name": "circuits_circuittermination_3", + "object_type": "circuits.circuittermination", + "lookup": { + "circuit__cid": "Circuit 1", + "term_side": "A" + }, + "create_expect": { + "circuit.cid": "Circuit 1", + "term_side": "A", + "port_speed": 9600, + "description": "description" + }, + "create": { + "circuit_termination": { + "circuit": { + "cid": "Circuit 1", + "type": {"name": "Circuit Type 1"}, + "provider": {"name": "Provider 1"} + }, + "term_side": "A", + "termination_site": {"name": "Site 1"}, + "port_speed": "9600", + "upstream_speed": "14400", + "xconnect_id": "xconnect.1", + "pp_info": "pp info", + "description": "description", + "mark_connected": true, + "tags": [{"name": "Tag 1"}] + } + }, + "update": { + "circuit_termination": { + "circuit": { + "cid": "Circuit 1", + "type": {"name": "Circuit Type 1"}, + "provider": {"name": "Provider 1"} + }, + "term_side": "A", + "termination_site": {"name": "Site 1"}, + "port_speed": "9600", + "upstream_speed": "14400", + "xconnect_id": "xconnect.1", + "pp_info": "pp info", + "description": "description Updated", + "mark_connected": true, + "tags": [{"name": "Tag 1"}] + } + }, + "update_expect": { + "description": "description Updated" + } + }, + { + "name": "circuits_circuittype_1", + "object_type": "circuits.circuittype", + "lookup": {"name": "Circuit Type 1"}, + "create_expect": { + "name": "Circuit Type 1", + "description": "Circuit Type 1 Description" + }, + "create": { + "circuit_type": { + "name": "Circuit Type 1", + "slug": "circuit-type-1", + "color": "0000ff", + "description": "Circuit Type 1 Description", + "tags": [{"name": "Tag 1"}] + } + }, + "update": { + "circuit_type": { + "name": "Circuit Type 1", + "slug": "circuit-type-1", + "color": "0000ff", + "description": "Circuit Type 1 Description Updated", + "tags": [{"name": "Tag 1"}] + } + }, + "update_expect": { + "description": "Circuit Type 1 Description Updated" + } + }, + { + "name": "virtualization_cluster_1", + "object_type": "virtualization.cluster", + "lookup": {"name": "Cluster A"}, + "create_expect": { + "name": "Cluster A", + "type.name": "Cluster Type 1", + "description": "Cluster 1 Description" + }, + "create": { + "cluster": { + "name": "Cluster A", + "type": {"name": "Cluster Type 1"}, + "group": {"name": "Cluster Group 1"}, + "status": "active", + "tenant": {"name": "Tenant 1"}, + "scope_location": { + "name": "Location 1", + "site": {"name": "Site 1"} + }, + "description": "Cluster 1 Description", + "comments": "Cluster 1 Comments", + "tags": [{"name": "Tag 1"}] + } + }, + "update": { + "cluster": { + "name": "Cluster A", + "type": {"name": "Cluster Type 1"}, + "group": {"name": "Cluster Group 1"}, + "status": "active", + "tenant": {"name": "Tenant 1"}, + "scope_location": { + "name": "Location 1", + "site": {"name": "Site 1"} + }, + "description": "Cluster 1 Description Updated", + "comments": "Cluster 1 Comments", + "tags": [{"name": "Tag 1"}] + } + }, + "update_expect": { + "description": "Cluster 1 Description Updated" + } + }, + { + "name": "virtualization_cluster_2", + "object_type": "virtualization.cluster", + "lookup": {"name": "Cluster 2"}, + "create_expect": { + "name": "Cluster 2", + "type.name": "Cluster Type 1", + "description": "Cluster 1 Description" + }, + "create": { + "cluster": { + "name": "Cluster 2", + "type": {"name": "Cluster Type 1"}, + "group": {"name": "Cluster Group 1"}, + "status": "active", + "tenant": {"name": "Tenant 1"}, + "scope_region": {"name": "Region 1"}, + "description": "Cluster 1 Description", + "comments": "Cluster 1 Comments", + "tags": [{"name": "Tag 1"}] + } + }, + "update": { + "cluster": { + "name": "Cluster 2", + "type": {"name": "Cluster Type 1"}, + "group": {"name": "Cluster Group 1"}, + "status": "active", + "tenant": {"name": "Tenant 1"}, + "scope_region": {"name": "Region 1"}, + "description": "Cluster 1 Description Updated", + "comments": "Cluster 1 Comments", + "tags": [{"name": "Tag 1"}] + } + }, + "update_expect": { + "description": "Cluster 1 Description Updated" + } + }, + { + "name": "virtualization_cluster_3", + "object_type": "virtualization.cluster", + "lookup": {"name": "Cluster 3"}, + "create_expect": { + "name": "Cluster 3", + "type.name": "Cluster Type 1", + "description": "Cluster 1 Description" + }, + "create": { + "cluster": { + "name": "Cluster 3", + "type": {"name": "Cluster Type 1"}, + "group": {"name": "Cluster Group 1"}, + "status": "active", + "tenant": {"name": "Tenant 1"}, + "scope_site": {"name": "Site 1"}, + "description": "Cluster 1 Description", + "comments": "Cluster 1 Comments", + "tags": [{"name": "Tag 1"}] + } + }, + "update": { + "cluster": { + "name": "Cluster 3", + "type": {"name": "Cluster Type 1"}, + "group": {"name": "Cluster Group 1"}, + "status": "active", + "tenant": {"name": "Tenant 1"}, + "scope_site": {"name": "Site 1"}, + "description": "Cluster 1 Description Updated", + "comments": "Cluster 1 Comments", + "tags": [{"name": "Tag 1"}] + } + }, + "update_expect": { + "description": "Cluster 1 Description Updated" + } + }, + { + "name": "virtualization_cluster_4", + "object_type": "virtualization.cluster", + "lookup": {"name": "Cluster 4"}, + "create_expect": { + "name": "Cluster 4", + "type.name": "Cluster Type 1", + "description": "Cluster 1 Description" + }, + "create": { + "cluster": { + "name": "Cluster 4", + "type": {"name": "Cluster Type 1"}, + "group": {"name": "Cluster Group 1"}, + "status": "active", + "tenant": {"name": "Tenant 1"}, + "scope_site_group": {"name": "Site Group 1"}, + "description": "Cluster 1 Description", + "comments": "Cluster 1 Comments", + "tags": [{"name": "Tag 1"}] + } + }, + "update": { + "cluster": { + "name": "Cluster 4", + "type": {"name": "Cluster Type 1"}, + "group": {"name": "Cluster Group 1"}, + "status": "active", + "tenant": {"name": "Tenant 1"}, + "scope_site_group": {"name": "Site Group 1"}, + "description": "Cluster 1 Description Updated", + "comments": "Cluster 1 Comments", + "tags": [{"name": "Tag 1"}] + } + }, + "update_expect": { + "description": "Cluster 1 Description Updated" + } + }, + { + "name": "virtualization_clustergroup_1", + "object_type": "virtualization.clustergroup", + "lookup": {"name": "Cluster Group 1"}, + "create_expect": { + "name": "Cluster Group 1", + "description": "Cluster Group 1 Description" + }, + "create": { + "cluster_group": { + "name": "Cluster Group 1", + "description": "Cluster Group 1 Description", + "tags": [{"name": "Tag 1"}] + } + }, + "update": { + "cluster_group": { + "name": "Cluster Group 1", + "description": "Cluster Group 1 Description Updated", + "tags": [{"name": "Tag 1"}] + } + }, + "update_expect": { + "description": "Cluster Group 1 Description Updated" + } + }, + { + "name": "virtualization_clustertype_1", + "object_type": "virtualization.clustertype", + "lookup": {"name": "Cluster Type 1"}, + "create_expect": { + "name": "Cluster Type 1", + "description": "Cluster Type 1 Description" + }, + "create": { + "cluster_type": { + "name": "Cluster Type 1", + "description": "Cluster Type 1 Description", + "tags": [{"name": "Tag 1"}] + } + }, + "update": { + "cluster_type": { + "name": "Cluster Type 1", + "description": "Cluster Type 1 Description Updated", + "tags": [{"name": "Tag 1"}] + } + }, + "update_expect": { + "description": "Cluster Type 1 Description Updated" + } + }, + { + "name": "dcim_consoleport_1", + "object_type": "dcim.consoleport", + "lookup": {"name": "Console Port 1"}, + "create_expect": { + "name": "Console Port 1", + "description": "Console Port 1 Description" + }, + "create": { + "console_port": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "module": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "module_bay": { + "name": "Module Bay 1", + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + } + }, + "module_type": { + "manufacturer": { + "name": "Manufacturer 1" + }, + "model": "Module Type 1" + } + }, + "name": "Console Port 1", + "label": "Console Port 1 Label", + "type": "db-25", + "speed": "1200", + "description": "Console Port 1 Description", + "mark_connected": true, + "tags": [{"name": "Tag 1"}] + } + }, + "update": { + "console_port": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "module": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "module_bay": { + "name": "Module Bay 1", + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + } + }, + "module_type": { + "manufacturer": { + "name": "Manufacturer 1" + }, + "model": "Module Type 1" + } + }, + "name": "Console Port 1", + "label": "Console Port 1 Label", + "type": "db-25", + "speed": "1200", + "description": "Console Port 1 Description Updated", + "mark_connected": true, + "tags": [{"name": "Tag 1"}] + } + }, + "update_expect": { + "description": "Console Port 1 Description Updated" + } + }, + { + "name": "dcim_consoleserverport_1", + "object_type": "dcim.consoleserverport", + "lookup": {"name": "Console Server Port 1"}, + "create_expect": { + "name": "Console Server Port 1", + "description": "Console Server Port 1 Description" + }, + "create": { + "console_server_port": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "module": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "module_bay": { + "name": "Module Bay 1", + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + } + }, + "module_type": { + "manufacturer": { + "name": "Manufacturer 1" + }, + "model": "Module Type 1" + } + }, + "name": "Console Server Port 1", + "label": "Console Server Port 1 Label", + "type": "db-25", + "speed": "1200", + "description": "Console Server Port 1 Description", + "mark_connected": true, + "tags": [{"name": "Tag 1"}] + } + }, + "update": { + "console_server_port": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "module": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "module_bay": { + "name": "Module Bay 1", + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + } + }, + "module_type": { + "manufacturer": { + "name": "Manufacturer 1" + }, + "model": "Module Type 1" + } + }, + "name": "Console Server Port 1", + "label": "Console Server Port 1 Label", + "type": "db-25", + "speed": "1200", + "description": "Console Server Port 1 Description Updated", + "mark_connected": true, + "tags": [{"name": "Tag 1"}] + } + }, + "update_expect": { + "description": "Console Server Port 1 Description Updated" + } + }, + { + "name": "tenancy_contact_1", + "object_type": "tenancy.contact", + "lookup": {"name": "Contact 1"}, + "create_expect": { + "name": "Contact 1", + "group.name": "Contact Group 1", + "description": "Contact 1 Description" + }, + "create": { + "contact": { + "group": {"name": "Contact Group 1"}, + "name": "Contact 1", + "title": "Contact 1 Title", + "phone": "1234567890", + "email": "contact1@example.com", + "address": "1234 Main St, Anytown, USA", + "link": "https://example.com", + "description": "Contact 1 Description", + "comments": "Contact 1 Comments", + "tags": [{"name": "Tag 1"}] + } + }, + "update": { + "contact": { + "group": {"name": "Contact Group 1"}, + "name": "Contact 1", + "title": "Contact 1 Title", + "phone": "1234567890", + "email": "contact1@example.com", + "address": "1234 Main St, Anytown, USA", + "link": "https://example.com", + "description": "Contact 1 Description Updated", + "comments": "Contact 1 Comments", + "tags": [{"name": "Tag 1"}] + } + }, + "update_expect": { + "description": "Contact 1 Description Updated" + } + }, + { + "name": "tenancy_contactassignment_1", + "object_type": "tenancy.contactassignment", + "lookup": { + "contact__name": "Contact 1", + "role__name": "Contact Role 1" + }, + "create_expect": { + "contact.name": "Contact 1", + "role.name": "Contact Role 1", + "priority": "primary" + }, + "create": { + "contact_assignment": { + "contact": { + "name": "Contact 1", + "group": {"name": "Contact Group 1"}, + "title": "Contact 1 Title" + }, + "role": {"name": "Contact Role 1"}, + "priority": "primary", + "tags": [{"name": "Tag 1"}], + "object_site": {"name": "Site 1"} + } + }, + "update": { + "contact_assignment": { + "contact": { + "name": "Contact 1", + "group": {"name": "Contact Group 1"}, + "title": "Contact 1 Title" + }, + "role": {"name": "Contact Role 1"}, + "priority": "secondary", + "tags": [{"name": "Tag 1"}], + "object_site": {"name": "Site 1"} + } + }, + "update_expect": { + "priority": "secondary" + } + }, + { + "name": "tenancy_contactgroup_1", + "object_type": "tenancy.contactgroup", + "lookup": {"name": "Contact Group 1"}, + "create_expect": { + "name": "Contact Group 1", + "description": "Contact Group 1 Description" + }, + "create": { + "contact_group": { + "name": "Contact Group 1", + "parent": {"name": "Contact Group 2"}, + "description": "Contact Group 1 Description", + "tags": [{"name": "Tag 1"}] + } + }, + "update": { + "contact_group": { + "name": "Contact Group 1", + "parent": {"name": "Contact Group 2"}, + "description": "Contact Group 1 Description Updated", + "tags": [{"name": "Tag 1"}] + } + }, + "update_expect": { + "description": "Contact Group 1 Description Updated" + } + }, + { + "name": "tenancy_contactrole_1", + "object_type": "tenancy.contactrole", + "lookup": {"name": "Contact Role 1"}, + "create_expect": { + "name": "Contact Role 1", + "description": "Contact Role 1 Description" + }, + "create": { + "contact_role": { + "name": "Contact Role 1", + "description": "Contact Role 1 Description", + "tags": [{"name": "Tag 1"}] + } + }, + "update": { + "contact_role": { + "name": "Contact Role 1", + "description": "Contact Role 1 Description Updated", + "tags": [{"name": "Tag 1"}] + } + }, + "update_expect": { + "description": "Contact Role 1 Description Updated" + } + }, + { + "name": "dcim_device_1", + "object_type": "dcim.device", + "lookup": {"name": "Device ABC"}, + "create_expect": { + "name": "Device ABC", + "device_type.manufacturer.name": "Cisco", + "device_type.model": "C2960S", + "role.name": "Device Role 1", + "description": "Device 1 Description" + }, + "create": { + "device": { + "name": "Device ABC", + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "role": {"name": "Device Role 1"}, + "tenant": {"name": "Tenant 1"}, + "platform": {"name": "Platform 1"}, + "serial": "1234567890", + "asset_tag": "asset.1", + "site": {"name": "Site 1"}, + "location": { + "name": "Location 1", + "site": {"name": "Site 1"} + }, + "rack": { + "name": "Rack 1", + "site": {"name": "Site 1"}, + "location": { + "name": "Location 1", + "site": {"name": "Site 1"} + } + }, + "position": 1.0, + "face": "front", + "status": "active", + "airflow": "bottom-to-top", + "description": "Device 1 Description", + "comments": "Device 1 Comments", + "tags": [{"name": "Tag 1"}] + } + }, + "update": { + "device": { + "name": "Device ABC", + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "role": {"name": "Device Role 1"}, + "tenant": {"name": "Tenant 1"}, + "platform": {"name": "Platform 1"}, + "serial": "1234567890", + "asset_tag": "asset.1", + "site": {"name": "Site 1"}, + "location": { + "name": "Location 1", + "site": {"name": "Site 1"} + }, + "rack": { + "name": "Rack 1", + "site": {"name": "Site 1"}, + "location": { + "name": "Location 1", + "site": {"name": "Site 1"} + } + }, + "position": 1.0, + "face": "front", + "status": "active", + "airflow": "bottom-to-top", + "description": "Device 1 Description Updated", + "comments": "Device 1 Comments", + "tags": [{"name": "Tag 1"}] + } + }, + "update_expect": { + "description": "Device 1 Description Updated" + } + }, + { + "name": "dcim_devicebay_1", + "object_type": "dcim.devicebay", + "lookup": { + "device__name": "Device 1", + "name": "Device Bay 1" + }, + "create_expect": { + "device.name": "Device 1", + "name": "Device Bay 1", + "description": "Device Bay 1 Description" + }, + "create": { + "device_bay": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C3P0", + "subdevice_role": "parent" + }, + "site": {"name": "Site 1"} + }, + "name": "Device Bay 1", + "label": "Device Bay 1 Label", + "description": "Device Bay 1 Description", + "installed_device": { + "name": "Device 2", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "tags": [{"name": "Tag 1"}] + } + }, + "update": { + "device_bay": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C3P0", + "subdevice_role": "parent" + }, + "site": {"name": "Site 1"} + }, + "name": "Device Bay 1", + "label": "Device Bay 1 Label", + "description": "Device Bay 1 Description Updated", + "installed_device": { + "name": "Device 2", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "tags": [{"name": "Tag 1"}] + } + }, + "update_expect": { + "description": "Device Bay 1 Description Updated" + } + }, + { + "name": "dcim_devicerole_1", + "object_type": "dcim.devicerole", + "lookup": {"name": "Core Router"}, + "create_expect": { + "name": "Core Router", + "description": "Primary network routing device" + }, + "create": { + "device_role": { + "name": "Core Router", + "slug": "core-router", + "color": "ff0000", + "vm_role": true, + "description": "Primary network routing device", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "device_role": { + "name": "Core Router", + "slug": "core-router", + "color": "ff0000", + "vm_role": true, + "description": "Primary network routing device Updated", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "Primary network routing device Updated" + } + }, + { + "name": "dcim_devicetype_1", + "object_type": "dcim.devicetype", + "lookup": { + "manufacturer__name": "Cisco", + "model": "Catalyst 9300" + }, + "create_expect": { + "manufacturer.name": "Cisco", + "model": "Catalyst 9300", + "description": "Enterprise Series Switch" + }, + "create": { + "device_type": { + "manufacturer": {"name": "Cisco"}, + "default_platform": {"name": "IOS-XE"}, + "model": "Catalyst 9300", + "slug": "catalyst-9300", + "part_number": "C9300-48P-E", + "u_height": 1.0, + "exclude_from_utilization": false, + "is_full_depth": true, + "subdevice_role": "parent", + "airflow": "front-to-rear", + "weight": 14.5, + "weight_unit": "lb", + "description": "Enterprise Series Switch", + "comments": "High-performance access switch", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "device_type": { + "manufacturer": {"name": "Cisco"}, + "default_platform": {"name": "IOS-XE"}, + "model": "Catalyst 9300", + "slug": "catalyst-9300", + "part_number": "C9300-48P-E", + "u_height": 1.0, + "exclude_from_utilization": false, + "is_full_depth": true, + "subdevice_role": "parent", + "airflow": "front-to-rear", + "weight": 14.5, + "weight_unit": "lb", + "description": "Enterprise Series Switch Updated", + "comments": "High-performance access switch", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "Enterprise Series Switch Updated" + } + }, + { + "name": "ipam_fhrpgroup_1", + "object_type": "ipam.fhrpgroup", + "lookup": {"name": "HSRP Group 10"}, + "create_expect": { + "name": "HSRP Group 10", + "protocol": "hsrp", + "group_id": 10, + "description": "Core Router HSRP Group" + }, + "create": { + "fhrp_group": { + "name": "HSRP Group 10", + "protocol": "hsrp", + "group_id": "10", + "auth_type": "md5", + "auth_key": "secretkey123", + "description": "Core Router HSRP Group", + "comments": "Primary gateway redundancy group", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "fhrp_group": { + "name": "HSRP Group 10", + "protocol": "hsrp", + "group_id": "10", + "auth_type": "md5", + "auth_key": "secretkey123", + "description": "Core Router HSRP Group Updated", + "comments": "Primary gateway redundancy group", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "Core Router HSRP Group Updated" + } + }, + { + "name": "ipam_fhrpgroupassignment_1", + "object_type": "ipam.fhrpgroupassignment", + "lookup": { + "group__name": "HSRP Group 10" + }, + "create_expect": { + "group.name": "HSRP Group 10", + "priority": 100 + }, + "create": { + "fhrp_group_assignment": { + "group": { + "name": "HSRP Group 10", + "protocol": "hsrp", + "group_id": "10" + }, + "interface_interface": { + "device": { + "name": "Device 1", + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "role": {"name": "Device Role 1"}, + "site": {"name": "Site 1"} + }, + "name": "GigabitEthernet1/0/1", + "type": "1000base-t", + "enabled": true + }, + "priority": 100 + } + }, + "update": { + "fhrp_group_assignment": { + "group": { + "name": "HSRP Group 10", + "protocol": "hsrp", + "group_id": "10" + }, + "interface_interface": { + "device": { + "name": "Device 1", + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "role": {"name": "Device Role 1"}, + "site": {"name": "Site 1"} + }, + "name": "GigabitEthernet1/0/1", + "type": "1000base-t", + "enabled": true + }, + "priority": 200 + } + }, + "update_expect": { + "priority": 200 + } + }, + { + "name": "dcim_frontport_1", + "object_type": "dcim.frontport", + "lookup": { + "device__name": "Device 1", + "name": "Front Port 1" + }, + "create_expect": { + "device.name": "Device 1", + "name": "Front Port 1", + "description": "Front fiber port" + }, + "create": { + "front_port": { + "device": { + "name": "Device 1", + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "role": {"name": "Device Role 1"}, + "site": {"name": "Site 1"} + }, + "module": { + "device": { + "name": "Device 1", + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "role": {"name": "Device Role 1"}, + "site": {"name": "Site 1"} + }, + "module_bay": { + "name": "Module Bay 1", + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + } + }, + "module_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S-MODULE" + } + }, + "name": "Front Port 1", + "label": "FP1", + "type": "lc-apc", + "color": "0000ff", + "rear_port": { + "device": { + "name": "Device 1", + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "role": {"name": "Device Role 1"}, + "site": {"name": "Site 1"} + }, + "name": "Rear Port 1", + "type": "lc-apc" + }, + "rear_port_position": "1", + "description": "Front fiber port", + "mark_connected": true, + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "front_port": { + "device": { + "name": "Device 1", + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "role": {"name": "Device Role 1"}, + "site": {"name": "Site 1"} + }, + "module": { + "device": { + "name": "Device 1", + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "role": {"name": "Device Role 1"}, + "site": {"name": "Site 1"} + }, + "module_bay": { + "name": "Module Bay 1", + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + } + }, + "module_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S-MODULE" + } + }, + "name": "Front Port 1", + "label": "FP1", + "type": "lc-apc", + "color": "0000ff", + "rear_port": { + "device": { + "name": "Device 1", + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "role": {"name": "Device Role 1"}, + "site": {"name": "Site 1"} + }, + "name": "Rear Port 1", + "type": "lc-apc" + }, + "rear_port_position": "1", + "description": "Front fiber port Updated", + "mark_connected": true, + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "Front fiber port Updated" + } + }, + { + "name": "vpn_ikepolicy_1", + "object_type": "vpn.ikepolicy", + "lookup": {"name": "IKE-POLICY-1"}, + "create_expect": { + "name": "IKE-POLICY-1", + "version": 2, + "description": "Main IPSec IKE Policy" + }, + "create": { + "ike_policy": { + "name": "IKE-POLICY-1", + "description": "Main IPSec IKE Policy", + "version": "2", + "preshared_key": "secretPSK123!", + "comments": "Primary IKE policy for VPN tunnels", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}], + "proposals": [ + { + "name": "IKE-PROPOSAL-1", + "description": "AES-256 with SHA-256", + "authentication_method": "preshared-keys", + "encryption_algorithm": "aes-256-cbc", + "authentication_algorithm": "hmac-sha256", + "group": "14", + "sa_lifetime": "28800" + } + ] + } + }, + "update": { + "ike_policy": { + "name": "IKE-POLICY-1", + "description": "Main IPSec IKE Policy Updated", + "version": "2", + "preshared_key": "secretPSK123!", + "comments": "Primary IKE policy for VPN tunnels", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}], + "proposals": [ + { + "name": "IKE-PROPOSAL-1", + "description": "AES-256 with SHA-256", + "authentication_method": "preshared-keys", + "encryption_algorithm": "aes-256-cbc", + "authentication_algorithm": "hmac-sha256", + "group": "14", + "sa_lifetime": "28800" + } + ] + } + }, + "update_expect": { + "description": "Main IPSec IKE Policy Updated" + } + }, + { + "name": "vpn_ikeproposal_1", + "object_type": "vpn.ikeproposal", + "lookup": {"name": "IKE-PROPOSAL-2"}, + "create_expect": { + "name": "IKE-PROPOSAL-2", + "description": "High Security IKE Proposal" + }, + "create": { + "ike_proposal": { + "name": "IKE-PROPOSAL-2", + "description": "High Security IKE Proposal", + "authentication_method": "certificates", + "encryption_algorithm": "aes-256-gcm", + "authentication_algorithm": "hmac-sha512", + "group": "21", + "sa_lifetime": "86400", + "comments": "Enhanced security proposal for critical VPNs", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "ike_proposal": { + "name": "IKE-PROPOSAL-2", + "description": "High Security IKE Proposal Updated", + "authentication_method": "certificates", + "encryption_algorithm": "aes-256-gcm", + "authentication_algorithm": "hmac-sha512", + "group": "21", + "sa_lifetime": "86400", + "comments": "Enhanced security proposal for critical VPNs", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "High Security IKE Proposal Updated" + } + }, + { + "name": "ipam_ipaddress_1", + "object_type": "ipam.ipaddress", + "lookup": {"address": "192.168.100.1/24"}, + "create_expect": { + "address": "192.168.100.1/24", + "vrf.name": "PROD-VRF", + "description": "Production VIP Address" + }, + "create": { + "ip_address": { + "address": "192.168.100.1/24", + "vrf": { + "name": "PROD-VRF", + "rd": "65000:1" + }, + "tenant": {"name": "Tenant 1"}, + "status": "active", + "role": "vip", + "assigned_object_interface": { + "device": { + "name": "Device 1", + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "role": {"name": "Device Role 1"}, + "site": {"name": "Site 1"} + }, + "name": "GigabitEthernet1/0/1", + "type": "1000base-t" + }, + "nat_inside": { + "address": "10.0.0.1/24" + }, + "dns_name": "prod-vip.example.com", + "description": "Production VIP Address", + "comments": "Primary virtual IP for load balancing", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "ip_address": { + "address": "192.168.100.1/24", + "vrf": { + "name": "PROD-VRF", + "rd": "65000:1" + }, + "tenant": {"name": "Tenant 1"}, + "status": "active", + "role": "vip", + "assigned_object_interface": { + "device": { + "name": "Device 1", + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "role": {"name": "Device Role 1"}, + "site": {"name": "Site 1"} + }, + "name": "GigabitEthernet1/0/1", + "type": "1000base-t" + }, + "nat_inside": { + "address": "10.0.0.1/24" + }, + "dns_name": "prod-vip.example.com", + "description": "Production VIP Address Updated", + "comments": "Primary virtual IP for load balancing", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "Production VIP Address Updated" + } + }, + { + "name": "ipam_iprange_1", + "object_type": "ipam.iprange", + "lookup": { + "start_address": "10.100.0.1", + "end_address": "10.100.0.254" + }, + "create_expect": { + "start_address": "10.100.0.1/32", + "end_address": "10.100.0.254/32", + "description": "Production Server IP Range" + }, + "create": { + "ip_range": { + "start_address": "10.100.0.1", + "end_address": "10.100.0.254", + "vrf": { + "name": "PROD-VRF", + "rd": "65000:1" + }, + "tenant": {"name": "Tenant 1"}, + "status": "active", + "role": { + "name": "Server Pool", + "slug": "server-pool" + }, + "description": "Production Server IP Range", + "comments": "Allocated for production server deployments", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}], + "mark_utilized": true + } + }, + "update": { + "ip_range": { + "start_address": "10.100.0.1", + "end_address": "10.100.0.254", + "vrf": { + "name": "PROD-VRF", + "rd": "65000:1" + }, + "tenant": {"name": "Tenant 1"}, + "status": "active", + "role": { + "name": "Server Pool", + "slug": "server-pool" + }, + "description": "Production Server IP Range Updated", + "comments": "Allocated for production server deployments", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}], + "mark_utilized": true + } + }, + "update_expect": { + "description": "Production Server IP Range Updated" + } + }, + { + "name": "vpn_ipsecpolicy_1", + "object_type": "vpn.ipsecpolicy", + "lookup": {"name": "IPSEC-POLICY-1"}, + "create_expect": { + "name": "IPSEC-POLICY-1", + "description": "Site-to-Site VPN Policy" + }, + "create": { + "ip_sec_policy": { + "name": "IPSEC-POLICY-1", + "description": "Site-to-Site VPN Policy", + "pfs_group": "14", + "comments": "High-security IPSec policy for site-to-site VPN", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}], + "proposals": [ + { + "name": "IPSEC-PROPOSAL-1", + "description": "AES-256-GCM with ESP", + "encryption_algorithm": "aes-256-gcm", + "sa_lifetime_seconds": "28800", + "sa_lifetime_data": "28800", + "comments": "Strong encryption proposal for VPN tunnels" + } + ] + } + }, + "update": { + "ip_sec_policy": { + "name": "IPSEC-POLICY-1", + "description": "Site-to-Site VPN Policy Updated", + "pfs_group": "14", + "comments": "High-security IPSec policy for site-to-site VPN", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}], + "proposals": [ + { + "name": "IPSEC-PROPOSAL-1", + "description": "AES-256-GCM with ESP", + "encryption_algorithm": "aes-256-gcm", + "sa_lifetime_seconds": "28800", + "sa_lifetime_data": "28800", + "comments": "Strong encryption proposal for VPN tunnels" + } + ] + } + }, + "update_expect": { + "description": "Site-to-Site VPN Policy Updated" + } + }, + { + "name": "vpn_ipsecprofile_1", + "object_type": "vpn.ipsecprofile", + "lookup": {"name": "IPSEC-PROFILE-1"}, + "create_expect": { + "name": "IPSEC-PROFILE-1", + "description": "Remote Access VPN Profile" + }, + "create": { + "ip_sec_profile": { + "name": "IPSEC-PROFILE-1", + "description": "Remote Access VPN Profile", + "mode": "esp", + "ike_policy": { + "name": "IKE-POLICY-1", + "version": "2", + "preshared_key": "secretkey123" + }, + "ipsec_policy": { + "name": "IPSEC-POLICY-1", + "description": "Strong encryption policy", + "pfs_group": "14" + }, + "comments": "Standard IPSec profile for remote access VPN tunnels", + "tags": [{"name": "VPN"}, {"name": "Remote-Access"}] + } + }, + "update": { + "ip_sec_profile": { + "name": "IPSEC-PROFILE-1", + "description": "Remote Access VPN Profile Updated", + "mode": "esp", + "ike_policy": { + "name": "IKE-POLICY-1", + "version": "2", + "preshared_key": "secretkey123" + }, + "ipsec_policy": { + "name": "IPSEC-POLICY-1", + "description": "Strong encryption policy", + "pfs_group": "14" + }, + "comments": "Standard IPSec profile for remote access VPN tunnels", + "tags": [{"name": "VPN"}, {"name": "Remote-Access"}] + } + }, + "update_expect": { + "description": "Remote Access VPN Profile Updated" + } + }, + { + "name": "vpn_ipsecproposal_1", + "object_type": "vpn.ipsecproposal", + "lookup": {"name": "IPSec-Proposal-AES256"}, + "create_expect": { + "name": "IPSec-Proposal-AES256", + "description": "High security IPSec proposal using AES-256-GCM" + }, + "create": { + "ip_sec_proposal": { + "name": "IPSec-Proposal-AES256", + "description": "High security IPSec proposal using AES-256-GCM", + "encryption_algorithm": "aes-256-gcm", + "authentication_algorithm": "hmac-sha512", + "sa_lifetime_seconds": "28800", + "sa_lifetime_data": "42949", + "comments": "Used for critical infrastructure VPNs", + "tags": [ + { + "name": "high-security", + "slug": "high-security", + "color": "0000ff" + }, + { + "name": "production", + "slug": "production", + "color": "0000ff" + } + ] + } + }, + "update": { + "ip_sec_proposal": { + "name": "IPSec-Proposal-AES256", + "description": "High security IPSec proposal using AES-256-GCM Updated", + "encryption_algorithm": "aes-256-gcm", + "authentication_algorithm": "hmac-sha512", + "sa_lifetime_seconds": "28800", + "sa_lifetime_data": "42949", + "comments": "Used for critical infrastructure VPNs", + "tags": [ + { + "name": "high-security", + "slug": "high-security", + "color": "0000ff" + }, + { + "name": "production", + "slug": "production", + "color": "0000ff" + } + ] + } + }, + "update_expect": { + "description": "High security IPSec proposal using AES-256-GCM Updated" + } + }, + { + "name": "dcim_interface_1", + "object_type": "dcim.interface", + "lookup": {"name": "GigabitEthernet1/0/1"}, + "create_expect": { + "name": "GigabitEthernet1/0/1", + "label": "Core Link 1", + "tagged_vlans.all.0.vid": 101, + "tagged_vlans.all.1.vid": 102 + }, + "create": { + "interface": { + "name": "GigabitEthernet1/0/1", + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "module": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "module_bay": { + "name": "Module Bay 1", + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + } + }, + "module_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S-MODULE" + } + }, + "label": "Core Link 1", + "type": "1000base-t", + "enabled": true, + "bridge": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "name": "Bridge1", + "type": "bridge" + }, + "lag": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "name": "Port-Channel2", + "type": "lag" + }, + "mtu": "9000", + "primary_mac_address": { + "mac_address": "00:11:22:33:44:55" + }, + "speed": "1000000000", + "duplex": "full", + "wwn": "50:01:43:80:00:00:00:00", + "vrf": { + "name": "PROD-VRF", + "rd": "65000:1" + }, + "description": "Core network interface", + "mode": "tagged", + "mark_connected": true, + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}], + "mgmt_only": false, + "poe_mode": "pse", + "poe_type": "type3-ieee802.3bt", + "untagged_vlan": { + "vid": 100, + "name": "Data VLAN", + "status": "active" + }, + "vlan_translation_policy": { + "name": "Customer Translation Policy", + "description": "VLAN translation for customer traffic" + }, + "vdcs": [ + { + "name": "VDC1", + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "identifier": 1, + "status": "active", + "description": "Primary VDC" + } + ], + "tagged_vlans": [ + { + "vid": 101, + "name": "Voice VLAN", + "status": "active" + }, + { + "vid": 102, + "name": "Data VLAN", + "status": "active" + } + ] + } + }, + "update": { + "interface": { + "name": "GigabitEthernet1/0/1", + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "module": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "module_bay": { + "name": "Module Bay 1", + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + } + }, + "module_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S-MODULE" + } + }, + "label": "Core Link 1", + "type": "1000base-t", + "enabled": true, + "bridge": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "name": "Bridge1", + "type": "bridge" + }, + "lag": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "name": "Port-Channel2", + "type": "lag" + }, + "mtu": "9000", + "primary_mac_address": { + "mac_address": "00:11:22:33:44:55" + }, + "speed": "1000000000", + "duplex": "full", + "wwn": "50:01:43:80:00:00:00:00", + "vrf": { + "name": "PROD-VRF", + "rd": "65000:1" + }, + "description": "Core network interface", + "mode": "q-in-q", + "mark_connected": true, + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}, {"name": "Tag 3"}], + "mgmt_only": false, + "poe_mode": "pse", + "poe_type": "type3-ieee802.3bt", + "untagged_vlan": { + "vid": 100, + "name": "Data VLAN", + "status": "active" + }, + "vlan_translation_policy": { + "name": "Customer Translation Policy", + "description": "VLAN translation for customer traffic" + }, + "vdcs": [ + { + "name": "VDC1", + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "identifier": 1, + "status": "active", + "description": "Primary VDC" + } + ], + "tagged_vlans": [ + { + "vid": 101, + "name": "Voice VLAN", + "status": "active" + }, + { + "vid": 102, + "name": "Data VLAN", + "status": "active" + } + ] + } + }, + "update_expect": { + "tags.all.__by_name": ["Tag 1", "Tag 2", "Tag 3"] + } + }, + { + "name": "dcim_interface_2", + "object_type": "dcim.interface", + "lookup": {"name": "WirelessGigabitEthernet1/0/1"}, + "create_expect": { + "name": "WirelessGigabitEthernet1/0/1", + "label": "Core Link 1" + }, + "create": { + "interface": { + "name": "WirelessGigabitEthernet1/0/1", + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "module": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "module_bay": { + "name": "Module Bay 1", + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + } + }, + "module_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S-MODULE" + } + }, + "label": "Core Link 1", + "type": "other-wireless", + "enabled": true, + "bridge": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "name": "Bridge1", + "type": "bridge" + }, + "lag": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "name": "Port-Channel2", + "type": "lag" + }, + "mtu": "9000", + "primary_mac_address": { + "mac_address": "00:11:22:33:44:55" + }, + "speed": "1000000000", + "duplex": "full", + "wwn": "50:01:43:80:00:00:00:00", + "rf_role": "ap", + "rf_channel": "2.4g-1-2412-22", + "tx_power": "20", + "wireless_lans": [ + { + "ssid": "Corp-Secure", + "description": "Corporate secure wireless network", + "group": { + "name": "Corporate Networks", + "slug": "corporate-networks" + }, + "status": "active", + "vlan": { + "vid": 800, + "name": "Production Servers" + }, + "tenant": {"name": "Tenant 1"} + } + ], + "vrf": { + "name": "PROD-VRF", + "rd": "65000:1" + }, + "description": "Core network interface", + "mode": "access", + "mark_connected": true, + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}], + "mgmt_only": false, + "untagged_vlan": { + "vid": 900, + "name": "Data VLAN", + "status": "active" + }, + "vlan_translation_policy": { + "name": "Customer Translation Policy", + "description": "VLAN translation for customer traffic" + }, + "vdcs": [ + { + "name": "VDC1", + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "identifier": 1, + "status": "active", + "description": "Primary VDC" + } + ] + } + }, + "update": { + "interface": { + "name": "WirelessGigabitEthernet1/0/1", + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "module": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "module_bay": { + "name": "Module Bay 1", + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + } + }, + "module_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S-MODULE" + } + }, + "label": "Core Link 1", + "type": "other-wireless", + "enabled": true, + "bridge": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "name": "Bridge1", + "type": "bridge" + }, + "lag": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "name": "Port-Channel2", + "type": "lag" + }, + "mtu": "9000", + "primary_mac_address": { + "mac_address": "00:11:22:33:44:55" + }, + "speed": "1000000000", + "duplex": "full", + "wwn": "50:01:43:80:00:00:00:00", + "rf_role": "ap", + "rf_channel": "2.4g-1-2412-22", + "tx_power": "20", + "wireless_lans": [ + { + "ssid": "Corp-Secure", + "description": "Corporate secure wireless network", + "group": { + "name": "Corporate Networks", + "slug": "corporate-networks" + }, + "status": "active", + "vlan": { + "vid": 800, + "name": "Production Servers" + }, + "tenant": {"name": "Tenant 1"} + } + ], + "vrf": { + "name": "PROD-VRF", + "rd": "65000:1" + }, + "description": "Core network interface", + "mode": "access", + "mark_connected": true, + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}, {"name": "Tag 3"}], + "mgmt_only": false, + "untagged_vlan": { + "vid": 900, + "name": "Data VLAN", + "status": "active" + }, + "vlan_translation_policy": { + "name": "Customer Translation Policy", + "description": "VLAN translation for customer traffic" + }, + "vdcs": [ + { + "name": "VDC1", + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "identifier": 1, + "status": "active", + "description": "Primary VDC" + } + ] + } + }, + "update_expect": { + "tags.all.__by_name": ["Tag 1", "Tag 2", "Tag 3"] + } + }, + { + "name": "dcim_interface_3", + "object_type": "dcim.interface", + "lookup": {"name": "VirtualGigabitEthernet1/0/1"}, + "create_expect": { + "name": "VirtualGigabitEthernet1/0/1", + "label": "Core Link 1" + }, + "create": { + "interface": { + "name": "VirtualGigabitEthernet1/0/1", + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "parent": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "name": "Port-Channel1", + "type": "1000base-t" + }, + "module": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "module_bay": { + "name": "Module Bay 1", + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + } + }, + "module_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S-MODULE" + } + }, + "label": "Core Link 1", + "type": "virtual", + "enabled": true, + "bridge": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "name": "Bridge1", + "type": "bridge" + }, + "mtu": "9000", + "primary_mac_address": { + "mac_address": "00:11:22:33:44:55" + }, + "speed": "1000000000", + "duplex": "full", + "wwn": "50:01:43:80:00:00:00:00", + "vrf": { + "name": "PROD-VRF", + "rd": "65000:1" + }, + "description": "Core network interface", + "mode": "q-in-q", + "mark_connected": false, + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}], + "mgmt_only": false, + "untagged_vlan": { + "vid": 444, + "name": "Data VLAN", + "status": "active" + }, + "qinq_svlan": { + "vid": 2000, + "name": "Service VLAN", + "status": "active" + }, + "vlan_translation_policy": { + "name": "Customer Translation Policy", + "description": "VLAN translation for customer traffic" + }, + "vdcs": [ + { + "name": "VDC1", + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "identifier": 1, + "status": "active", + "description": "Primary VDC" + } + ] + } + }, + "update": { + "interface": { + "name": "VirtualGigabitEthernet1/0/1", + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "parent": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "name": "Port-Channel1", + "type": "1000base-t" + }, + "module": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "module_bay": { + "name": "Module Bay 1", + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + } + }, + "module_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S-MODULE" + } + }, + "label": "Core Link 1", + "type": "virtual", + "enabled": true, + "bridge": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "name": "Bridge1", + "type": "bridge" + }, + "mtu": "9000", + "primary_mac_address": { + "mac_address": "00:11:22:33:44:55" + }, + "speed": "1000000000", + "duplex": "full", + "wwn": "50:01:43:80:00:00:00:00", + "vrf": { + "name": "PROD-VRF", + "rd": "65000:1" + }, + "description": "Core network interface", + "mode": "q-in-q", + "mark_connected": false, + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}, {"name": "Tag 3"}], + "mgmt_only": false, + "untagged_vlan": { + "vid": 444, + "name": "Data VLAN", + "status": "active" + }, + "qinq_svlan": { + "vid": 2000, + "name": "Service VLAN", + "status": "active" + }, + "vlan_translation_policy": { + "name": "Customer Translation Policy", + "description": "VLAN translation for customer traffic" + }, + "vdcs": [ + { + "name": "VDC1", + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "identifier": 1, + "status": "active", + "description": "Primary VDC" + } + ] + } + }, + "update_expect": { + "tags.all.__by_name": ["Tag 1", "Tag 2", "Tag 3"] + } + }, + { + "name": "vpn_l2vpn_1", + "object_type": "vpn.l2vpn", + "lookup": {"name": "Customer-VPLS-1"}, + "create_expect": { + "name": "Customer-VPLS-1", + "type": "vpls", + "description": "Customer VPLS service for multi-site connectivity" + }, + "create": { + "l2vpn": { + "name": "Customer-VPLS-1", + "slug": "customer-vpls-1", + "type": "vpls", + "identifier": "65000", + "import_targets": [ + { + "name": "65000:1001", + "description": "Primary import target" + }, + { + "name": "65000:1002", + "description": "Secondary import target" + } + ], + "export_targets": [ + { + "name": "65000:1003", + "description": "Primary export target" + } + ], + "description": "Customer VPLS service for multi-site connectivity", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "l2vpn": { + "name": "Customer-VPLS-1", + "slug": "customer-vpls-1", + "type": "vpls", + "identifier": "65000", + "import_targets": [ + { + "name": "65000:1001", + "description": "Primary import target" + }, + { + "name": "65000:1002", + "description": "Secondary import target" + } + ], + "export_targets": [ + { + "name": "65000:1003", + "description": "Primary export target" + } + ], + "description": "Customer VPLS service for multi-site connectivity Updated", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "Customer VPLS service for multi-site connectivity Updated" + } + }, + { + "name": "dcim_inventoryitem_1", + "object_type": "dcim.inventoryitem", + "lookup": {"name": "Power Supply 1"}, + "create_expect": { + "name": "Power Supply 1", + "description": "715W AC Power Supply" + }, + "create": { + "inventory_item": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "parent": { + "name": "Chassis 1", + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + } + }, + "name": "Power Supply 1", + "label": "PSU1", + "role": { + "name": "Power Supply", + "color": "00ff00" + }, + "manufacturer": {"name": "Cisco"}, + "part_id": "PWR-C1-715WAC", + "serial": "ABC123XYZ", + "asset_tag": "ASSET-001", + "discovered": true, + "description": "715W AC Power Supply", + "status": "active", + "component_power_port": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "name": "PSU1 Power Port", + "type": "iec-60320-c14", + "maximum_draw": 715, + "allocated_draw": 500, + "description": "Power input port for PSU1" + }, + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "inventory_item": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "parent": { + "name": "Chassis 1", + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + } + }, + "name": "Power Supply 1", + "label": "PSU1", + "role": { + "name": "Power Supply", + "color": "00ff00" + }, + "manufacturer": {"name": "Cisco"}, + "part_id": "PWR-C1-715WAC", + "serial": "ABC123XYZ", + "asset_tag": "ASSET-001", + "discovered": true, + "description": "715W AC Power Supply Updated", + "status": "active", + "component_power_port": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "name": "PSU1 Power Port", + "type": "iec-60320-c14", + "maximum_draw": 715, + "allocated_draw": 500, + "description": "Power input port for PSU1" + }, + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "715W AC Power Supply Updated" + } + }, + { + "name": "dcim_inventoryitemrole_1", + "object_type": "dcim.inventoryitemrole", + "lookup": {"name": "Line Card"}, + "create_expect": { + "name": "Line Card", + "description": "Network switch line card module" + }, + "create": { + "inventory_item_role": { + "name": "Line Card", + "slug": "line-card", + "color": "0000ff", + "description": "Network switch line card module", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "inventory_item_role": { + "name": "Line Card", + "slug": "line-card", + "color": "0000ff", + "description": "Network switch line card module Updated", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "Network switch line card module Updated" + } + }, + { + "name": "vpn_l2vpn_1", + "object_type": "vpn.l2vpn", + "lookup": {"name": "Customer-VPLS-1"}, + "create_expect": { + "name": "Customer-VPLS-1", + "type": "vpls", + "identifier": 65000, + "description": "Customer VPLS service for multi-site connectivity" + }, + "create": { + "l2vpn": { + "name": "Customer-VPLS-1", + "slug": "customer-vpls-1", + "type": "vpls", + "identifier": "65000", + "import_targets": [ + { + "name": "65000:1001", + "description": "Primary import target" + }, + { + "name": "65000:1002", + "description": "Secondary import target" + } + ], + "export_targets": [ + { + "name": "65000:1003", + "description": "Primary export target" + } + ], + "description": "Customer VPLS service for multi-site connectivity", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "l2vpn": { + "name": "Customer-VPLS-1", + "slug": "customer-vpls-1", + "type": "vpls", + "identifier": "65000", + "import_targets": [ + { + "name": "65000:1001", + "description": "Primary import target" + }, + { + "name": "65000:1002", + "description": "Secondary import target" + } + ], + "export_targets": [ + { + "name": "65000:1003", + "description": "Primary export target" + } + ], + "description": "Customer VPLS service for multi-site connectivity Updated", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "Customer VPLS service for multi-site connectivity Updated" + } + }, + { + "name": "vpn_l2vpntermination_1", + "object_type": "vpn.l2vpntermination", + "lookup": { + "l2vpn__name": "Customer-VPLS-1" + }, + "create_expect": { + "l2vpn.name": "Customer-VPLS-1", + "l2vpn.type": "vpls" + }, + "create": { + "l2vpn_termination": { + "l2vpn": { + "name": "Customer-VPLS-1", + "type": "vpls", + "identifier": "65000" + }, + "assigned_object_interface": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "name": "GigabitEthernet1/0/1", + "type": "1000base-t", + "enabled": true + }, + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "l2vpn_termination": { + "l2vpn": { + "name": "Customer-VPLS-1", + "type": "vpls", + "identifier": "65000" + }, + "assigned_object_interface": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "name": "GigabitEthernet1/0/1", + "type": "1000base-t", + "enabled": true + }, + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}, {"name": "Tag 3"}] + } + }, + "update_expect": { + "tags.all.__by_name": ["Tag 1", "Tag 2", "Tag 3"] + } + }, + { + "name": "dcim_location_1", + "object_type": "dcim.location", + "lookup": {"name": "Data Center East Wing"}, + "create_expect": { + "name": "Data Center East Wing", + "description": "East wing of the main data center facility" + }, + "create": { + "location": { + "name": "Data Center East Wing", + "slug": "dc-east-wing", + "site": {"name": "Site 1"}, + "parent": { + "name": "Main Data Center", + "slug": "main-dc", + "site": {"name": "Site 1"} + }, + "status": "active", + "tenant": {"name": "Tenant 1"}, + "facility": "Building A, Floor 3", + "description": "East wing of the main data center facility", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "location": { + "name": "Data Center East Wing", + "slug": "dc-east-wing", + "site": {"name": "Site 1"}, + "parent": { + "name": "Main Data Center", + "slug": "main-dc", + "site": {"name": "Site 1"} + }, + "status": "active", + "tenant": {"name": "Tenant 1"}, + "facility": "Building A, Floor 3", + "description": "East wing of the main data center facility Updated", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "East wing of the main data center facility Updated" + } + }, + { + "name": "dcim_macaddress_1", + "object_type": "dcim.macaddress", + "lookup": {"mac_address": "00:1A:2B:3C:4D:5E"}, + "create_expect": { + "mac_address": "00:1A:2B:3C:4D:5E", + "description": "Primary management interface MAC" + }, + "create": { + "mac_address": { + "mac_address": "00:1A:2B:3C:4D:5E", + "assigned_object_interface": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "name": "GigabitEthernet1/0/1", + "type": "1000base-t", + "enabled": true + }, + "description": "Primary management interface MAC", + "comments": "Reserved for network management access", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "mac_address": { + "mac_address": "00:1A:2B:3C:4D:5E", + "assigned_object_interface": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "name": "GigabitEthernet1/0/1", + "type": "1000base-t", + "enabled": true + }, + "description": "Primary management interface MAC Updated", + "comments": "Reserved for network management access", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "Primary management interface MAC Updated" + } + }, + { + "name": "dcim_manufacturer_1", + "object_type": "dcim.manufacturer", + "lookup": {"name": "Arista Networks"}, + "create_expect": { + "name": "Arista Networks", + "slug": "arista-networks", + "description": "Leading provider of cloud networking solutions" + }, + "create": { + "manufacturer": { + "name": "Arista Networks", + "slug": "arista-networks", + "description": "Leading provider of cloud networking solutions", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + + } + }, + "update": { + "manufacturer": { + "name": "Arista Networks", + "slug": "arista-networks", + "description": "Leading provider of cloud networking solutions Updated", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "Leading provider of cloud networking solutions Updated" + } + }, + { + "name": "dcim_module_1", + "object_type": "dcim.module", + "lookup": {"asset_tag": "MOD-001"}, + "create_expect": { + "status": "active", + "serial": "MOD123XYZ", + "asset_tag": "MOD-001", + "description": "Stacking module for switch interconnect" + }, + "create": { + "module": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "module_bay": { + "name": "Module Bay 1", + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + } + }, + "module_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S-STACK" + }, + "status": "active", + "serial": "MOD123XYZ", + "asset_tag": "MOD-001", + "description": "Stacking module for switch interconnect", + "comments": "Primary stack member module", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "module": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "module_bay": { + "name": "Module Bay 1", + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + } + }, + "module_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S-STACK" + }, + "status": "active", + "serial": "MOD123XYZ", + "asset_tag": "MOD-001", + "description": "Stacking module for switch interconnect Updated", + "comments": "Primary stack member module", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "Stacking module for switch interconnect Updated" + } + }, + { + "name": "dcim_modulebay_1", + "object_type": "dcim.modulebay", + "lookup": {"name": "Stack Module Bay 2"}, + "create_expect": { + "name": "Stack Module Bay 2", + "description": "Secondary stacking module bay" + }, + "create": { + "module_bay": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "name": "Stack Module Bay 2", + "module": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "module_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S-STACK" + }, + "module_bay": { + "name": "Module Bay 1", + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + } + } + + }, + "label": "STACK-2", + "position": "Rear", + "description": "Secondary stacking module bay", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "module_bay": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "name": "Stack Module Bay 2", + "module": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "module_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S-STACK" + }, + "module_bay": { + "name": "Module Bay 1", + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + } + } + + }, + "label": "STACK-2", + "position": "Rear", + "description": "Secondary stacking module bay Updated", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "Secondary stacking module bay Updated" + } + }, + { + "name": "dcim_moduletype_1", + "object_type": "dcim.moduletype", + "lookup": { + "manufacturer__name": "Cisco", + "model": "C9300-NM-8X" + }, + "create_expect": { + "manufacturer.name": "Cisco", + "model": "C9300-NM-8X", + "description": "Catalyst 9300 8 x 10GE Network Module" + }, + "create": { + "module_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C9300-NM-8X", + "part_number": "C9300-NM-8X=", + "airflow": "front-to-rear", + "weight": 0.7, + "weight_unit": "kg", + "description": "Catalyst 9300 8 x 10GE Network Module", + "comments": "Hot-swappable uplink module for C9300 series switches", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "module_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C9300-NM-8X", + "part_number": "C9300-NM-8X=", + "airflow": "front-to-rear", + "weight": 0.7, + "weight_unit": "kg", + "description": "Catalyst 9300 8 x 10GE Network Module Updated", + "comments": "Hot-swappable uplink module for C9300 series switches", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "Catalyst 9300 8 x 10GE Network Module Updated" + } + }, + { + "name": "dcim_platform_1", + "object_type": "dcim.platform", + "lookup": {"name": "Cisco IOS-XE"}, + "create_expect": { + "name": "Cisco IOS-XE", + "manufacturer.name": "Cisco", + "description": "Enterprise-class IOS operating system for Catalyst switches and ISR routers" + }, + "create": { + "platform": { + "name": "Cisco IOS-XE", + "slug": "cisco-ios-xe", + "manufacturer": {"name": "Cisco"}, + "description": "Enterprise-class IOS operating system for Catalyst switches and ISR routers", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "platform": { + "name": "Cisco IOS-XE", + "slug": "cisco-ios-xe", + "manufacturer": {"name": "Cisco"}, + "description": "Enterprise-class IOS operating system for Catalyst switches and ISR routers Updated", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "Enterprise-class IOS operating system for Catalyst switches and ISR routers Updated" + } + }, + { + "name": "dcim_powerfeed_1", + "object_type": "dcim.powerfeed", + "lookup": {"name": "Power Feed A1"}, + "create_expect": { + "name": "Power Feed A1", + "power_panel.name": "Panel A", + "description": "Primary power feed for network equipment rack" + }, + "create": { + "power_feed": { + "power_panel": { + "site": {"name": "Site 1"}, + "name": "Panel A" + }, + "rack": { + "name": "Rack 1", + "site": {"name": "Site 1"}, + "location": { + "name": "Location 1", + "site": {"name": "Site 1"} + } + }, + "name": "Power Feed A1", + "status": "active", + "type": "primary", + "supply": "ac", + "phase": "three-phase", + "voltage": "208", + "amperage": "30", + "max_utilization": "80", + "mark_connected": true, + "description": "Primary power feed for network equipment rack", + "tenant": {"name": "Tenant 1"}, + "comments": "Connected to UPS system A with redundant backup", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "power_feed": { + "power_panel": { + "site": {"name": "Site 1"}, + "name": "Panel A" + }, + "rack": { + "name": "Rack 1", + "site": {"name": "Site 1"}, + "location": { + "name": "Location 1", + "site": {"name": "Site 1"} + } + }, + "name": "Power Feed A1", + "status": "active", + "type": "primary", + "supply": "ac", + "phase": "three-phase", + "voltage": "208", + "amperage": "30", + "max_utilization": "80", + "mark_connected": true, + "description": "Primary power feed for network equipment rack Updated", + "tenant": {"name": "Tenant 1"}, + "comments": "Connected to UPS system A with redundant backup", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "Primary power feed for network equipment rack Updated" + } + }, + { + "name": "dcim_poweroutlet_1", + "object_type": "dcim.poweroutlet", + "lookup": { + "device__name": "Device 1", + "name": "PSU1-Outlet1" + }, + "create_expect": { + "device.name": "Device 1", + "name": "PSU1-Outlet1", + "description": "Power outlet for network switch PSU" + }, + "create": { + "power_outlet": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "module": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "module_type": { + "manufacturer": {"name": "Cisco"}, + "model": "PWR-C1-715WAC" + }, + "module_bay": { + "name": "Module Bay 1", + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + } + } + }, + "name": "PSU1-Outlet1", + "label": "OUT-1", + "type": "iec-60320-c13", + "color": "0000ff", + "power_port": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "name": "PSU1" + }, + "feed_leg": "A", + "description": "Power outlet for network switch PSU", + "mark_connected": true, + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "power_outlet": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "module": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "module_type": { + "manufacturer": {"name": "Cisco"}, + "model": "PWR-C1-715WAC" + }, + "module_bay": { + "name": "Module Bay 1", + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + } + } + }, + "name": "PSU1-Outlet1", + "label": "OUT-1", + "type": "iec-60320-c13", + "color": "0000ff", + "power_port": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "name": "PSU1" + }, + "feed_leg": "A", + "description": "Power outlet for network switch PSU Updated", + "mark_connected": true, + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "Power outlet for network switch PSU Updated" + } + }, + { + "name": "dcim_powerpanel_1", + "object_type": "dcim.powerpanel", + "lookup": { + "site__name": "Site 1", + "name": "Panel A" + }, + "create_expect": { + "site.name": "Site 1", + "name": "Panel A", + "description": "Main power distribution panel" + }, + "create": { + "power_panel": { + "site": {"name": "Site 1"}, + "location": { + "name": "Location 1", + "site": {"name": "Site 1"} + }, + "name": "Panel A", + "description": "Main power distribution panel", + "comments": "Primary power distribution for data center", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "power_panel": { + "site": {"name": "Site 1"}, + "location": { + "name": "Location 1", + "site": {"name": "Site 1"} + }, + "name": "Panel A", + "description": "Main power distribution panel Updated", + "comments": "Primary power distribution for data center", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "Main power distribution panel Updated" + } + }, + { + "name": "dcim_powerport_1", + "object_type": "dcim.powerport", + "lookup": { + "device__name": "Device 1", + "name": "PSU1" + }, + "create_expect": { + "device.name": "Device 1", + "name": "PSU1", + "description": "Primary power supply unit" + }, + "create": { + "power_port": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "module": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "module_type": { + "manufacturer": {"name": "Cisco"}, + "model": "PWR-C1-715WAC" + }, + "module_bay": { + "name": "Module Bay 1", + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + } + } + }, + "name": "PSU1", + "label": "PSU-1", + "type": "iec-60320-c14", + "maximum_draw": 715, + "allocated_draw": 650, + "description": "Primary power supply unit", + "mark_connected": true, + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "power_port": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "module": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "module_type": { + "manufacturer": {"name": "Cisco"}, + "model": "PWR-C1-715WAC" + }, + "module_bay": { + "name": "Module Bay 1", + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + } + } + }, + "name": "PSU1", + "label": "PSU-1", + "type": "iec-60320-c14", + "maximum_draw": 715, + "allocated_draw": 650, + "description": "Primary power supply unit Updated", + "mark_connected": true, + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "Primary power supply unit Updated" + } + }, + { + "name": "ipam_prefix_1", + "object_type": "ipam.prefix", + "lookup": {"prefix": "10.100.0.0/16"}, + "create_expect": { + "prefix": "10.100.0.0/16", + "description": "Production network address space" + }, + "create": { + "prefix": { + "prefix": "10.100.0.0/16", + "vrf": { + "name": "PROD-VRF", + "rd": "65000:1" + }, + "scope_site": {"name": "Site 1"}, + "tenant": {"name": "Tenant 1"}, + "vlan": { + "name": "Production VLAN", + "vid": "112" + }, + "status": "active", + "role": { + "name": "Production", + "slug": "production" + }, + "is_pool": true, + "mark_utilized": true, + "description": "Production network address space", + "comments": "Primary address allocation for production services", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "prefix": { + "prefix": "10.100.0.0/16", + "vrf": { + "name": "PROD-VRF", + "rd": "65000:1" + }, + "scope_site": {"name": "Site 1"}, + "tenant": {"name": "Tenant 1"}, + "vlan": { + "name": "Production VLAN", + "vid": "112" + }, + "status": "active", + "role": { + "name": "Production", + "slug": "production" + }, + "is_pool": true, + "mark_utilized": true, + "description": "Production network address space Updated", + "comments": "Primary address allocation for production services", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "Production network address space Updated" + } + }, + { + "name": "circuits_provider_1", + "object_type": "circuits.provider", + "lookup": {"name": "Level 3 Communications"}, + "create_expect": { + "name": "Level 3 Communications", + "slug": "level3", + "description": "Global Tier 1 Internet Service Provider" + }, + "create": { + "provider": { + "name": "Level 3 Communications", + "slug": "level3", + "description": "Global Tier 1 Internet Service Provider", + "comments": "Primary transit provider for data center connectivity", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}], + "accounts": [ + { + "provider": {"name": "Level 3 Communications"}, + "name": "East Coast Account", + "account": "L3-12345", + "description": "East Coast regional services account", + "comments": "Managed through regional NOC" + }, + { + "provider": {"name": "Level 3 Communications"}, + "name": "West Coast Account", + "account": "L3-67890", + "description": "West Coast regional services account", + "comments": "Managed through regional NOC" + } + ], + "asns": [ + { + "asn": "3356", + "rir": {"name": "ARIN"}, + "tenant": {"name": "Tenant 1"}, + "description": "Level 3 Global ASN", + "comments": "Primary transit ASN" + } + ] + } + }, + "update": { + "provider": { + "name": "Level 3 Communications", + "slug": "level3", + "description": "Global Tier 1 Internet Service Provider Updated", + "comments": "Primary transit provider for data center connectivity", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}], + "accounts": [ + { + "provider": {"name": "Level 3 Communications"}, + "name": "East Coast Account", + "account": "L3-12345", + "description": "East Coast regional services account", + "comments": "Managed through regional NOC" + }, + { + "provider": {"name": "Level 3 Communications"}, + "name": "West Coast Account", + "account": "L3-67890", + "description": "West Coast regional services account", + "comments": "Managed through regional NOC" + } + ], + "asns": [ + { + "asn": "3356", + "rir": {"name": "ARIN"}, + "tenant": {"name": "Tenant 1"}, + "description": "Level 3 Global ASN", + "comments": "Primary transit ASN" + } + ] + } + }, + "update_expect": { + "description": "Global Tier 1 Internet Service Provider Updated" + } + }, + { + "name": "circuits_provideraccount_1", + "object_type": "circuits.provideraccount", + "lookup": { + "provider__name": "Level 3 Communications", + "account": "ACCT-12345" + }, + "create_expect": { + "provider.name": "Level 3 Communications", + "account": "ACCT-12345", + "description": "Primary enterprise account" + }, + "create": { + "provider_account": { + "provider": {"name": "Level 3 Communications"}, + "account": "ACCT-12345", + "name": "L3 Enterprise", + "description": "Primary enterprise account", + "comments": "Global services contract", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "provider_account": { + "provider": {"name": "Level 3 Communications"}, + "account": "ACCT-12345", + "name": "L3 Enterprise", + "description": "Primary enterprise account Updated", + "comments": "Global services contract", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "Primary enterprise account Updated" + } + }, + { + "name": "circuits_providernetwork_1", + "object_type": "circuits.providernetwork", + "lookup": {"name": "Global MPLS Network"}, + "create_expect": { + "name": "Global MPLS Network", + "provider.name": "Level 3 Communications", + "description": "Global MPLS backbone network" + }, + "create": { + "provider_network": { + "provider": {"name": "Level 3 Communications"}, + "name": "Global MPLS Network", + "service_id": "L3-MPLS-001", + "description": "Global MPLS backbone network", + "comments": "Primary enterprise MPLS network infrastructure", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "provider_network": { + "provider": {"name": "Level 3 Communications"}, + "name": "Global MPLS Network", + "service_id": "L3-MPLS-001", + "description": "Global MPLS backbone network Updated", + "comments": "Primary enterprise MPLS network infrastructure", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "Global MPLS backbone network Updated" + } + }, + { + "name": "ipam_rir_1", + "object_type": "ipam.rir", + "lookup": {"name": "ARIN"}, + "create_expect": { + "name": "ARIN", + "description": "American Registry for Internet Numbers" + }, + "create": { + "rir": { + "name": "ARIN", + "slug": "arin", + "is_private": false, + "description": "American Registry for Internet Numbers", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "rir": { + "name": "ARIN", + "slug": "arin", + "is_private": false, + "description": "American Registry for Internet Numbers Updated", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "American Registry for Internet Numbers Updated" + } + }, + { + "name": "dcim_rack_1", + "object_type": "dcim.rack", + "lookup": {"asset_tag": "RACK-009"}, + "create_expect": { + "name": "Rack ZZ", + "site.name": "Site 1", + "description": "Standard 42U server rack" + }, + "create": { + "rack": { + "name": "Rack ZZ", + "facility_id": "FAC-001", + "site": {"name": "Site 1"}, + "location": {"name": "Data Center East Wing", "site": {"name": "Site 1"}}, + "tenant": {"name": "Tenant 1"}, + "status": "active", + "role": { + "name": "Server Rack", + "slug": "server-rack", + "color": "0000ff", + "description": "Primary server rack role" + }, + "serial": "RACK123XYZ", + "asset_tag": "RACK-009", + "rack_type": { + "manufacturer": {"name": "Manufacturer 1"}, + "model": "R2000", + "slug": "r2000", + "form_factor": "4-post-cabinet" + }, + "form_factor": "4-post-cabinet", + "width": "19", + "u_height": "42", + "starting_unit": "1", + "desc_units": false, + "airflow": "front-to-rear", + "description": "Standard 42U server rack", + "comments": "Located in primary data center", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "rack": { + "name": "Rack ZZ", + "facility_id": "FAC-001", + "site": {"name": "Site 1"}, + "location": {"name": "Data Center East Wing", "site": {"name": "Site 1"}}, + "tenant": {"name": "Tenant 1"}, + "status": "active", + "role": { + "name": "Server Rack", + "slug": "server-rack", + "color": "0000ff", + "description": "Primary server rack role" + }, + "serial": "RACK123XYZ", + "asset_tag": "RACK-009", + "rack_type": { + "manufacturer": {"name": "Manufacturer 1"}, + "model": "R2000", + "slug": "r2000", + "form_factor": "4-post-cabinet" + }, + "form_factor": "4-post-cabinet", + "width": "19", + "u_height": "42", + "starting_unit": "1", + "desc_units": false, + "mounting_depth": "30", + "airflow": "front-to-rear", + "description": "Standard 42U server rack Updated", + "comments": "Located in primary data center", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "Standard 42U server rack Updated" + } + }, + { + "name": "dcim_racktype_1", + "object_type": "dcim.racktype", + "lookup": { + "manufacturer__name": "Manufacturer 1", + "model": "R2000" + }, + "create_expect": { + "manufacturer.name": "Manufacturer 1", + "model": "R2000", + "description": "Standard 42U server rack" + }, + "create": { + "rack_type": { + "manufacturer": {"name": "Manufacturer 1"}, + "model": "R2000", + "slug": "r2000", + "description": "Standard 42U server rack", + "form_factor": "4-post-cabinet", + "width": "19", + "u_height": "42", + "starting_unit": "1", + "desc_units": false, + "outer_width": "24", + "outer_depth": "36", + "outer_unit": "in", + "weight": "350.5", + "max_weight": "1000", + "weight_unit": "lb", + "mounting_depth": "30", + "comments": "Standard enterprise rack configuration", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "rack_type": { + "manufacturer": {"name": "Manufacturer 1"}, + "model": "R2000", + "slug": "r2000", + "description": "Standard 42U server rack Updated", + "form_factor": "4-post-cabinet", + "width": "19", + "u_height": "42", + "starting_unit": "1", + "desc_units": false, + "outer_width": "24", + "outer_depth": "36", + "outer_unit": "in", + "weight": "350.5", + "max_weight": "1000", + "weight_unit": "lb", + "mounting_depth": "30", + "comments": "Standard enterprise rack configuration", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "Standard 42U server rack Updated" + } + }, + { + "name": "dcim_rearport_1", + "object_type": "dcim.rearport", + "lookup": { + "device__name": "Device 1", + "name": "Rear Port 1" + }, + "create_expect": { + "device.name": "Device 1", + "name": "Rear Port 1", + "description": "Rear fiber port" + }, + "create": { + "rear_port": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "module": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "module_bay": { + "name": "Module Bay 1", + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + } + }, + "module_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S-MODULE" + } + }, + "name": "Rear Port 1", + "label": "RP1", + "type": "lc-apc", + "color": "0000ff", + "positions": "1", + "description": "Rear fiber port", + "mark_connected": true, + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "rear_port": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "module": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "module_bay": { + "name": "Module Bay 1", + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + } + }, + "module_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S-MODULE" + } + }, + "name": "Rear Port 1", + "label": "RP1", + "type": "lc-apc", + "color": "0000ff", + "positions": "1", + "description": "Rear fiber port Updated", + "mark_connected": true, + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "Rear fiber port Updated" + } + }, + { + "name": "dcim_region_1", + "object_type": "dcim.region", + "lookup": {"name": "North America"}, + "create_expect": { + "name": "North America", + "parent.name": "Global", + "description": "North American Region" + }, + "create": { + "region": { + "name": "North America", + "slug": "north-america", + "parent": { + "name": "Global", + "slug": "global", + "description": "Global Region", + "tags": [{"name": "Tag 1"}] + }, + "description": "North American Region", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "region": { + "name": "North America", + "slug": "north-america", + "parent": { + "name": "Global", + "slug": "global", + "description": "Global Region", + "tags": [{"name": "Tag 1"}] + }, + "description": "North American Region Updated", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "North American Region Updated" + } + }, + { + "name": "ipam_role_1", + "object_type": "ipam.role", + "lookup": {"name": "Network Administrator"}, + "create_expect": { + "name": "Network Administrator", + "weight": 1000, + "description": "Primary network administration role" + }, + "create": { + "role": { + "name": "Network Administrator", + "slug": "network-admin", + "weight": "1000", + "description": "Primary network administration role", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "role": { + "name": "Network Administrator", + "slug": "network-admin", + "weight": "1000", + "description": "Primary network administration role Updated", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "Primary network administration role Updated" + } + }, + { + "name": "ipam_routetarget_1", + "object_type": "ipam.routetarget", + "lookup": {"name": "65000:1001"}, + "create_expect": { + "name": "65000:1001", + "tenant.name": "Tenant 1", + "description": "Primary route target for MPLS VPN" + }, + "create": { + "route_target": { + "name": "65000:1001", + "tenant": {"name": "Tenant 1"}, + "description": "Primary route target for MPLS VPN", + "comments": "Used for customer VPN service", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "route_target": { + "name": "65000:1001", + "tenant": {"name": "Tenant 1"}, + "description": "Primary route target for MPLS VPN Updated", + "comments": "Used for customer VPN service", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "Primary route target for MPLS VPN Updated" + } + }, + { + "name": "ipam_service_1", + "object_type": "ipam.service", + "lookup": {"name": "Web Server"}, + "create_expect": { + "name": "Web Server", + "protocol": "tcp", + "ports": [80, 443], + "description": "Primary web server service" + }, + "create": { + "service": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "name": "Web Server", + "protocol": "tcp", + "ports": ["80", "443"], + "description": "Primary web server service", + "comments": "Handles HTTPS traffic for main website", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}], + "ipaddresses": [ + { + "address": "192.168.1.100/24", + "status": "active", + "dns_name": "web.example.com" + } + ] + } + }, + "update": { + "service": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "name": "Web Server", + "protocol": "tcp", + "ports": ["80", "443"], + "description": "Primary web server service Updated", + "comments": "Handles HTTPS traffic for main website", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}], + "ipaddresses": [ + { + "address": "192.168.1.100/24", + "status": "active", + "dns_name": "web.example.com" + } + ] + } + }, + "update_expect": { + "description": "Primary web server service Updated" + } + }, + { + "name": "dcim_site_1", + "object_type": "dcim.site", + "lookup": {"name": "Data Center West"}, + "create_expect": { + "name": "Data Center West", + "region.name": "North America", + "group.name": "Primary Data Centers", + "description": "Primary West Coast Data Center" + }, + "create": { + "site": { + "name": "Data Center West", + "slug": "dc-west", + "status": "active", + "region": { + "name": "North America", + "slug": "north-america" + }, + "group": { + "name": "Primary Data Centers", + "slug": "primary-dcs" + }, + "tenant": {"name": "Tenant 1"}, + "facility": "Building 7", + "time_zone": "America/Los_Angeles", + "description": "Primary West Coast Data Center", + "physical_address": "123 Tech Drive, San Jose, CA 95134", + "shipping_address": "Receiving Dock 3, 123 Tech Drive, San Jose, CA 95134", + "latitude": 37.3382, + "longitude": -121.8863, + "comments": "24x7 access requires security clearance", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}], + "asns": [ + { + "asn": "555", + "rir": {"name": "RIR 1"}, + "tenant": {"name": "Tenant 1"}, + "description": "ASN 555 Description", + "comments": "ASN 555 Comments", + "tags": [{"name": "Tag 1"}] + } + ] + } + }, + "update": { + "site": { + "name": "Data Center West", + "slug": "dc-west", + "status": "active", + "region": { + "name": "North America", + "slug": "north-america" + }, + "group": { + "name": "Primary Data Centers", + "slug": "primary-dcs" + }, + "tenant": {"name": "Tenant 1"}, + "facility": "Building 7", + "time_zone": "America/Los_Angeles", + "description": "Primary West Coast Data Center Updated", + "physical_address": "123 Tech Drive, San Jose, CA 95134", + "shipping_address": "Receiving Dock 3, 123 Tech Drive, San Jose, CA 95134", + "latitude": 37.3382, + "longitude": -121.8863, + "comments": "24x7 access requires security clearance", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}], + "asns": [ + { + "asn": "555", + "rir": {"name": "RIR 1"}, + "tenant": {"name": "Tenant 1"}, + "description": "ASN 555 Description", + "comments": "ASN 555 Comments", + "tags": [{"name": "Tag 1"}] + } + ] + } + }, + "update_expect": { + "description": "Primary West Coast Data Center Updated" + } + }, + { + "name": "dcim_sitegroup_1", + "object_type": "dcim.sitegroup", + "lookup": {"name": "Global Data Centers"}, + "create_expect": { + "name": "Global Data Centers", + "parent.name": "Infrastructure", + "description": "Worldwide data center facilities" + }, + "create": { + "site_group": { + "name": "Global Data Centers", + "slug": "global-dcs", + "parent": { + "name": "Infrastructure", + "slug": "infrastructure" + }, + "description": "Worldwide data center facilities", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "site_group": { + "name": "Global Data Centers", + "slug": "global-dcs", + "parent": { + "name": "Infrastructure", + "slug": "infrastructure" + }, + "description": "Worldwide data center facilities Updated", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "Worldwide data center facilities Updated" + } + }, + { + "name": "extras_tag_1", + "object_type": "extras.tag", + "lookup": {"name": "Production"}, + "create_expect": { + "name": "Production", + "slug": "production", + "color": "ff0000" + }, + "create": { + "tag": { + "name": "Production", + "slug": "production", + "color": "ff0000" + } + }, + "update": { + "tag": { + "name": "Production", + "slug": "production", + "color": "00ff00" + } + }, + "update_expect": { + "color": "00ff00" + } + }, + { + "name": "tenancy_tenant_1", + "object_type": "tenancy.tenant", + "lookup": {"name": "Acme Corporation"}, + "create_expect": { + "name": "Acme Corporation", + "slug": "acme-corp", + "description": "Global technology solutions provider" + }, + "create": { + "tenant": { + "name": "Acme Corporation", + "slug": "acme-corp", + "group": { + "name": "Enterprise Customers", + "slug": "enterprise-customers" + }, + "description": "Global technology solutions provider", + "comments": "Fortune 500 company with worldwide operations", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + + } + }, + "update": { + "tenant": { + "name": "Acme Corporation", + "slug": "acme-corp", + "group": { + "name": "Enterprise Customers", + "slug": "enterprise-customers" + }, + "description": "Global technology solutions provider Updated", + "comments": "Fortune 500 company with worldwide operations", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "Global technology solutions provider Updated" + } + }, + { + "name": "tenancy_tenantgroup_1", + "object_type": "tenancy.tenantgroup", + "lookup": {"name": "Financial Services"}, + "create_expect": { + "name": "Financial Services", + "description": "Banking and financial industry customers" + }, + "create": { + "tenant_group": { + "name": "Financial Services", + "slug": "financial-services", + "parent": { + "name": "Enterprise Sectors", + "slug": "enterprise-sectors" + }, + "description": "Banking and financial industry customers", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "tenant_group": { + "name": "Financial Services", + "slug": "financial-services", + "parent": { + "name": "Enterprise Sectors", + "slug": "enterprise-sectors" + }, + "description": "Banking and financial industry customers Updated", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "Banking and financial industry customers Updated" + } + }, + { + "name": "vpn_tunnel_1", + "object_type": "vpn.tunnel", + "lookup": {"name": "DC-West-to-East-Primary"}, + "create_expect": { + "name": "DC-West-to-East-Primary", + "status": "active" + }, + "create": { + "tunnel": { + "name": "DC-West-to-East-Primary", + "status": "active", + "group": { + "name": "Inter-DC Tunnels", + "slug": "inter-dc-tunnels" + }, + "encapsulation": "ipsec-tunnel", + "ipsec_profile": { + "name": "IPSEC-PROFILE-1", + "mode": "esp", + "ike_policy": { + "name": "IKE-POLICY-TUN-1", + "version": "2", + "preshared_key": "1234567890", + "comments": "Using AES-256-GCM encryption with PFS" + }, + "ipsec_policy": { + "name": "IPSEC-POLICY-1", + "pfs_group": "2" + } + }, + "tenant": {"name": "Tenant 1"}, + "tunnel_id": "1001", + "description": "Primary IPSec tunnel between West and East data centers", + "comments": "Using AES-256-GCM encryption with PFS", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "tunnel": { + "name": "DC-West-to-East-Primary", + "status": "active", + "group": { + "name": "Inter-DC Tunnels", + "slug": "inter-dc-tunnels" + }, + "encapsulation": "ipsec-tunnel", + "ipsec_profile": { + "name": "IPSEC-PROFILE-1", + "mode": "esp", + "ike_policy": { + "name": "IKE-POLICY-TUN-1", + "version": "2", + "preshared_key": "1234567890", + "comments": "Using AES-256-GCM encryption with PFS" + }, + "ipsec_policy": { + "name": "IPSEC-POLICY-1", + "pfs_group": "2" + } + }, + "tenant": {"name": "Tenant 1"}, + "tunnel_id": "1001", + "description": "Primary IPSec tunnel between West and East data centers Updated", + "comments": "Using AES-256-GCM encryption with PFS", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "Primary IPSec tunnel between West and East data centers Updated" + } + }, + { + "name": "vpn_tunnel_group_1", + "object_type": "vpn.tunnelgroup", + "lookup": {"name": "Regional Backbones"}, + "create_expect": { + "name": "Regional Backbones", + "description": "High-capacity encrypted tunnels between regional data centers" + }, + "create": { + "tunnel_group": { + "name": "Regional Backbones", + "slug": "regional-backbones", + "description": "High-capacity encrypted tunnels between regional data centers", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "tunnel_group": { + "name": "Regional Backbones", + "slug": "regional-backbones", + "description": "High-capacity encrypted tunnels between regional data centers Updated", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "High-capacity encrypted tunnels between regional data centers Updated" + } + }, + { + "name": "vpn_tunneltermination_1", + "object_type": "vpn.tunneltermination", + "lookup": {"tunnel__name": "DC-West-to-East-Primary"}, + "create_expect": { + "tunnel.name": "DC-West-to-East-Primary", + "role": "hub" + }, + "create": { + "tunnel_termination": { + "tunnel": { + "name": "DC-West-to-East-Primary", + "status": "active", + "encapsulation": "ipsec-tunnel" + }, + "role": "hub", + "termination_device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "outside_ip": { + "address": "203.0.113.1/24", + "status": "active", + "dns_name": "vpn1.example.com" + }, + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "tunnel_termination": { + "tunnel": { + "name": "DC-West-to-East-Primary", + "status": "active", + "encapsulation": "ipsec-tunnel" + }, + "role": "hub", + "termination_device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "outside_ip": { + "address": "203.0.113.1/24", + "status": "active", + "dns_name": "vpn1.example.com" + }, + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}, {"name": "Tag 3"}] + } + }, + "update_expect": { + "tags.all.__by_name": ["Tag 1", "Tag 2", "Tag 3"] + } + }, + { + "name": "ipam_vlan_1", + "object_type": "ipam.vlan", + "lookup": {"vid": 807}, + "create_expect": { + "vid": 807, + "name": "Production Servers" + }, + "create": { + "vlan": { + "group": { + "name": "Production VLANs", + "slug": "production-vlans" + }, + "vid": "807", + "name": "Production Servers", + "tenant": {"name": "Tenant 1"}, + "status": "active", + "role": { + "name": "Production", + "slug": "production" + }, + "description": "Primary production server network", + "qinq_role": "cvlan", + "qinq_svlan": { + "vid": "1909", + "name": "Service Provider VLAN" + }, + "comments": "Used for customer-facing production workloads", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "vlan": { + "group": { + "name": "Production VLANs", + "slug": "production-vlans" + }, + "vid": "807", + "name": "Production Servers", + "tenant": {"name": "Tenant 1"}, + "status": "active", + "role": { + "name": "Production", + "slug": "production" + }, + "description": "Primary production server network Updated", + "qinq_role": "cvlan", + "qinq_svlan": { + "vid": "1909", + "name": "Service Provider VLAN" + }, + "comments": "Used for customer-facing production workloads", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "Primary production server network Updated" + } + }, + { + "name": "ipam_vlan_group_1", + "object_type": "ipam.vlangroup", + "lookup": {"name": "Data Center Core"}, + "create_expect": { + "name": "Data Center Core", + "slug": "dc-core", + "description": "Core network VLANs for data center infrastructure" + }, + "create": { + "vlan_group": { + "name": "Data Center Core", + "slug": "dc-core", + "scope_site": { + "name": "Data Center West", + "slug": "dc-west", + "status": "active" + }, + "description": "Core network VLANs for data center infrastructure", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "vlan_group": { + "name": "Data Center Core", + "slug": "dc-core", + "scope_site": { + "name": "Data Center West", + "slug": "dc-west", + "status": "active" + }, + "description": "Core network VLANs for data center infrastructure Updated", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "Core network VLANs for data center infrastructure Updated" + } + }, + { + "name": "ipam_vlan_translation_policy_1", + "object_type": "ipam.vlantranslationpolicy", + "lookup": {"name": "Customer Edge Translation"}, + "create_expect": { + "name": "Customer Edge Translation", + "description": "VLAN translation policy for customer edge interfaces" + }, + "create": { + "vlan_translation_policy": { + "name": "Customer Edge Translation", + "description": "VLAN translation policy for customer edge interfaces" + } + }, + "update": { + "vlan_translation_policy": { + "name": "Customer Edge Translation", + "description": "VLAN translation policy for customer edge interfaces Updated" + } + }, + "update_expect": { + "description": "VLAN translation policy for customer edge interfaces Updated" + } + }, + { + "name": "ipam_vlan_translation_rule_1", + "object_type": "ipam.vlantranslationrule", + "lookup": {"policy__name": "Customer Edge Translation", "local_vid": "100"}, + "create_expect": { + "policy.name": "Customer Edge Translation", + "local_vid": 100, + "remote_vid": 1100, + "description": "Map customer VLAN 100 to provider VLAN 1100" + }, + "create": { + "vlan_translation_rule": { + "policy": { + "name": "Customer Edge Translation", + "description": "VLAN translation policy for customer edge interfaces" + }, + "local_vid": "100", + "remote_vid": "1100", + "description": "Map customer VLAN 100 to provider VLAN 1100" + } + }, + "update": { + "vlan_translation_rule": { + "policy": { + "name": "Customer Edge Translation", + "description": "VLAN translation policy for customer edge interfaces" + }, + "local_vid": "100", + "remote_vid": "1100", + "description": "Map customer VLAN 100 to provider VLAN 1100 Updated" + } + }, + "update_expect": { + "description": "Map customer VLAN 100 to provider VLAN 1100 Updated" + } + }, + { + "name": "virtualization_vminterface_1", + "object_type": "virtualization.vminterface", + "lookup": {"name": "eth0"}, + "create_expect": { + "name": "eth0", + "description": "Primary network interface" + }, + "create": { + "vm_interface": { + "virtual_machine": { + "name": "web-server-01", + "status": "active", + "role": {"name": "Web Server"}, + "site": {"name": "Site 1"} + }, + "name": "eth0", + "enabled": true, + "parent": { + "virtual_machine": { + "name": "web-server-01", + "status": "active", + "role": {"name": "Web Server"}, + "site": {"name": "Site 1"} + }, + "name": "bond0" + }, + "bridge": { + "virtual_machine": { + "name": "web-server-01", + "status": "active", + "role": {"name": "Web Server"}, + "site": {"name": "Site 1"} + }, + "name": "br0" + }, + "mtu": "9000", + "primary_mac_address": { + "mac_address": "00:1A:2B:3C:4D:5E" + }, + "description": "Primary network interface", + "mode": "q-in-q", + "untagged_vlan": { + "vid": "1101", + "name": "Production Servers" + }, + "qinq_svlan": { + "vid": "1000", + "name": "Service Provider VLAN" + }, + "vlan_translation_policy": { + "name": "Customer Edge Translation" + }, + "vrf": { + "name": "PROD-VRF", + "rd": "65000:1" + }, + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "vm_interface": { + "virtual_machine": { + "name": "web-server-01", + "status": "active", + "role": {"name": "Web Server"}, + "site": {"name": "Site 1"} + }, + "name": "eth0", + "enabled": true, + "parent": { + "virtual_machine": { + "name": "web-server-01", + "status": "active", + "role": {"name": "Web Server"}, + "site": {"name": "Site 1"} + }, + "name": "bond0" + }, + "bridge": { + "virtual_machine": { + "name": "web-server-01", + "status": "active", + "role": {"name": "Web Server"}, + "site": {"name": "Site 1"} + }, + "name": "br0" + }, + "mtu": "9000", + "primary_mac_address": { + "mac_address": "00:1A:2B:3C:4D:5E" + }, + "description": "Primary network interface Updated", + "mode": "q-in-q", + "untagged_vlan": { + "vid": "1101", + "name": "Production Servers" + }, + "qinq_svlan": { + "vid": "1000", + "name": "Service Provider VLAN" + }, + "vlan_translation_policy": { + "name": "Customer Edge Translation" + }, + "vrf": { + "name": "PROD-VRF", + "rd": "65000:1" + }, + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "Primary network interface Updated" + } + }, + { + "name": "ipam_vrf_1", + "object_type": "ipam.vrf", + "lookup": {"name": "Customer-A-VRF"}, + "create_expect": { + "name": "Customer-A-VRF", + "rd": "65000:100", + "tenant.name": "Tenant 1", + "enforce_unique": true, + "description": "Isolated routing domain for Customer A", + "comments": "Used for customer's private network services" + }, + "create": { + "vrf": { + "name": "Customer-A-VRF", + "rd": "65000:100", + "tenant": {"name": "Tenant 1"}, + "enforce_unique": true, + "description": "Isolated routing domain for Customer A", + "comments": "Used for customer's private network services", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}], + "import_targets": [ + { + "name": "65000:100" + }, + { + "name": "65000:101" + } + ], + "export_targets": [ + { + "name": "65000:103" + } + ] + } + }, + "update": { + "vrf": { + "name": "Customer-A-VRF", + "rd": "65000:100", + "tenant": {"name": "Tenant 1"}, + "enforce_unique": true, + "description": "Isolated routing domain for Customer A Updated", + "comments": "Used for customer's private network services", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}], + "import_targets": [ + { + "name": "65000:100" + }, + { + "name": "65000:101" + } + ], + "export_targets": [ + { + "name": "65000:103" + } + ] + } + }, + "update_expect": { + "description": "Isolated routing domain for Customer A Updated" + } + }, + { + "name": "dcim_virtualchassis_1", + "object_type": "dcim.virtualchassis", + "lookup": {"name": "Stack-DC1-Core"}, + "create_expect": { + "name": "Stack-DC1-Core", + "domain": "dc1-core.example.com" + }, + "create": { + "virtual_chassis": { + "name": "Stack-DC1-Core", + "domain": "dc1-core.example.com", + "master": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "description": "Core switch stack in DC1", + "comments": "Primary switching infrastructure for data center 1", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "virtual_chassis": { + "name": "Stack-DC1-Core", + "domain": "dc1-core.example.com", + "master": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "description": "Core switch stack in DC1 Updated", + "comments": "Primary switching infrastructure for data center 1", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "Core switch stack in DC1 Updated" + } + }, + { + "name": "circuits_virtualcircuit_1", + "object_type": "circuits.virtualcircuit", + "lookup": {"cid": "VC-001-LAX-NYC"}, + "create_expect": { + "cid": "VC-001-LAX-NYC" + }, + "create": { + "virtual_circuit": { + "cid": "VC-001-LAX-NYC", + "provider_network": { + "provider": {"name": "Level 3 Communications"}, + "name": "Global MPLS Network", + "service_id": "L3-MPLS-001" + }, + "provider_account": { + "provider": {"name": "Level 3 Communications"}, + "name": "East Coast Account", + "account": "L3-12345" + }, + "type": { + "name": "MPLS L3VPN", + "slug": "mpls-l3vpn" + }, + "status": "active", + "tenant": {"name": "Tenant 1"}, + "description": "LAX to NYC MPLS circuit", + "comments": "Primary east-west connectivity", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "virtual_circuit": { + "cid": "VC-001-LAX-NYC", + "provider_network": { + "provider": {"name": "Level 3 Communications"}, + "name": "Global MPLS Network", + "service_id": "L3-MPLS-001" + }, + "provider_account": { + "provider": {"name": "Level 3 Communications"}, + "name": "East Coast Account", + "account": "L3-12345" + }, + "type": { + "name": "MPLS L3VPN", + "slug": "mpls-l3vpn" + }, + "status": "active", + "tenant": {"name": "Tenant 1"}, + "description": "LAX to NYC MPLS circuit Updated", + "comments": "Primary east-west connectivity", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "LAX to NYC MPLS circuit Updated" + } + }, + { + "name": "circuits_virtualcircuittermination_1", + "object_type": "circuits.virtualcircuittermination", + "lookup": {"virtual_circuit__cid": "VC-001-LAX-NYC"}, + "create_expect": { + "virtual_circuit.cid": "VC-001-LAX-NYC", + "role": "hub", + "interface.device.name": "Device 1" + }, + "create": { + "virtual_circuit_termination": { + "virtual_circuit": { + "cid": "VC-001-LAX-NYC", + "provider_network": { + "provider": {"name": "Level 3 Communications"}, + "name": "Global MPLS Network" + }, + "type": { + "name": "MPLS L3VPN", + "slug": "mpls-l3vpn" + } + }, + "role": "hub", + "interface": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "name": "MegabitEthernet1/0/1", + "type": "virtual", + "enabled": true + }, + "description": "LAX hub termination for east-west MPLS circuit", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "virtual_circuit_termination": { + "virtual_circuit": { + "cid": "VC-001-LAX-NYC", + "provider_network": { + "provider": {"name": "Level 3 Communications"}, + "name": "Global MPLS Network" + }, + "type": { + "name": "MPLS L3VPN", + "slug": "mpls-l3vpn" + } + }, + "role": "hub", + "interface": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "name": "MegabitEthernet1/0/1", + "type": "virtual", + "enabled": true + }, + "description": "LAX hub termination for east-west MPLS circuit Updated", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "LAX hub termination for east-west MPLS circuit Updated" + } + }, + { + "name": "circuits_virtualcircuittype_1", + "object_type": "circuits.virtualcircuittype", + "lookup": {"name": "EVPN-VXLAN"}, + "create_expect": { + "name": "EVPN-VXLAN", + "description": "Data center interconnect using EVPN-VXLAN overlay" + }, + "create": { + "virtual_circuit_type": { + "name": "EVPN-VXLAN", + "slug": "evpn-vxlan", + "color": "0000ff", + "description": "Data center interconnect using EVPN-VXLAN overlay", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "virtual_circuit_type": { + "name": "EVPN-VXLAN", + "slug": "evpn-vxlan", + "color": "0000ff", + "description": "Data center interconnect using EVPN-VXLAN overlay Updated", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "Data center interconnect using EVPN-VXLAN overlay Updated" + } + }, + { + "name": "dcim_virtualdevicecontext_1", + "object_type": "dcim.virtualdevicecontext", + "lookup": {"name": "VDC-Production"}, + "create_expect": { + "name": "VDC-Production", + "description": "Production virtual device context", + "comments": "Isolated network context for production services", + "identifier": 1, + "device.name": "Device 1", + "primary_ip4.address": "192.168.1.1/32", + "primary_ip6.address": "2001:db8::1/128" + }, + "create": { + "virtual_device_context": { + "name": "VDC-Production", + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "identifier": "1", + "tenant": {"name": "Tenant 1"}, + "primary_ip4": { + "address": "192.168.1.1", + "assigned_object_interface": { + "type": "1000base-t", + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "name": "eth0" + } + }, + "primary_ip6": { + "address": "2001:db8::1", + "assigned_object_interface": { + "type": "1000base-t", + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "name": "eth0" + } + }, + "status": "active", + "description": "Production virtual device context", + "comments": "Isolated network context for production services", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + + }, + "update": { + "virtual_device_context": { + "name": "VDC-Production", + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "identifier": "1", + "tenant": {"name": "Tenant 1"}, + "primary_ip4": { + "address": "192.168.1.1", + "assigned_object_interface": { + "type": "1000base-t", + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "name": "eth0" + } + }, + "primary_ip6": { + "address": "2001:db8::1", + "assigned_object_interface": { + "type": "1000base-t", + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "name": "eth0" + } + }, + "status": "active", + "description": "Production virtual device context Updated", + "comments": "Isolated network context for production services", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "Production virtual device context Updated" + } + }, + { + "name": "virtualization_virtualdisk_1", + "object_type": "virtualization.virtualdisk", + "lookup": {"name": "root-volume"}, + "create_expect": { + "name": "root-volume", + "description": "Primary system disk" + }, + "create": { + "virtual_disk": { + "virtual_machine": { + "name": "web-server-01", + "status": "active", + "role": {"name": "Web Server"}, + "site": {"name": "Site 1"} + }, + "name": "root-volume", + "description": "Primary system disk", + "size": "182400", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "virtual_disk": { + "virtual_machine": { + "name": "web-server-01", + "status": "active", + "role": {"name": "Web Server"}, + "site": {"name": "Site 1"} + }, + "name": "root-volume", + "description": "Primary system disk Updated", + "size": "182400", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "Primary system disk Updated" + } + }, + { + "name": "virtualization_virtualmachine_1", + "object_type": "virtualization.virtualmachine", + "lookup": {"name": "app-server-01"}, + "create_expect": { + "name": "app-server-01", + "description": "Primary application server instance", + "comments": "Hosts critical business applications" + }, + "create": { + "virtual_machine": { + "name": "app-server-01", + "status": "active", + "site": {"name": "Site 1"}, + "cluster": { + "name": "Cluster 1", + "type": {"name": "Cluster Type 1"}, + "scope_site": {"name": "Site 1"} + }, + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"}, + "cluster": { + "name": "Cluster 1", + "type": {"name": "Cluster Type 1"}, + "scope_site": {"name": "Site 1"} + } + }, + "serial": "VM-2023-001", + "role": {"name": "Application Server"}, + "tenant": {"name": "Tenant 1"}, + "platform": {"name": "Ubuntu 22.04"}, + "primary_ip4": { + "address": "192.168.2.99", + "assigned_object_vm_interface": { + "virtual_machine": { + "name": "app-server-01", + "cluster": { + "name": "Cluster 1", + "type": {"name": "Cluster Type 1"}, + "scope_site": {"name": "Site 1"} + }, + "tenant": {"name": "Tenant 1"} + }, + "name": "eth0", + "enabled": true, + "mtu": "1500" + } + }, + "primary_ip6": { + "address": "2001:db8::99", + "assigned_object_vm_interface": { + "virtual_machine": { + "name": "app-server-01", + "cluster": { + "name": "Cluster 1", + "type": {"name": "Cluster Type 1"}, + "scope_site": {"name": "Site 1"} + }, + "tenant": {"name": "Tenant 1"} + }, + "name": "eth0", + "enabled": true, + "mtu": "1500" + } + }, + "vcpus": 4.0, + "memory": "214748364", + "disk": "147483647", + "description": "Primary application server instance", + "comments": "Hosts critical business applications", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "virtual_machine": { + "name": "app-server-01", + "status": "active", + "site": {"name": "Site 1"}, + "cluster": { + "name": "Cluster 1", + "type": {"name": "Cluster Type 1"}, + "scope_site": {"name": "Site 1"} + }, + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"}, + "cluster": { + "name": "Cluster 1", + "type": {"name": "Cluster Type 1"}, + "scope_site": {"name": "Site 1"} + } + }, + "serial": "VM-2023-001", + "role": {"name": "Application Server"}, + "tenant": {"name": "Tenant 1"}, + "platform": {"name": "Ubuntu 22.04"}, + "primary_ip4": { + "address": "192.168.2.99", + "assigned_object_vm_interface": { + "virtual_machine": { + "name": "app-server-01", + "cluster": { + "name": "Cluster 1", + "type": {"name": "Cluster Type 1"}, + "scope_site": {"name": "Site 1"} + }, + "tenant": {"name": "Tenant 1"} + }, + "name": "eth0", + "enabled": true, + "mtu": "1500" + } + }, + "primary_ip6": { + "address": "2001:db8::99", + "assigned_object_vm_interface": { + "virtual_machine": { + "name": "app-server-01", + "cluster": { + "name": "Cluster 1", + "type": {"name": "Cluster Type 1"}, + "scope_site": {"name": "Site 1"} + }, + "tenant": {"name": "Tenant 1"} + }, + "name": "eth0", + "enabled": true, + "mtu": "1500" + } + }, + "vcpus": 4.0, + "memory": "214748364", + "disk": "147483647", + "description": "Primary application server instance Updated", + "comments": "Hosts critical business applications", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "Primary application server instance Updated" + } + }, + { + "name": "wireless_wirelesslan_1", + "object_type": "wireless.wirelesslan", + "lookup": {"ssid": "Corp-Secure"}, + "create_expect": { + "ssid": "Corp-Secure", + "group.name": "Corporate Networks", + "description": "Corporate secure wireless network" + }, + "create": { + "wireless_lan": { + "ssid": "Corp-Secure", + "description": "Corporate secure wireless network", + "group": { + "name": "Corporate Networks", + "slug": "corporate-networks" + }, + "status": "active", + "vlan": { + "vid": 100, + "name": "Production Servers" + }, + "scope_site": {"name": "Site 1"}, + "tenant": {"name": "Tenant 1"}, + "auth_type": "wpa-enterprise", + "auth_cipher": "aes", + "auth_psk": "SecureWiFiKey123!", + "comments": "Primary corporate wireless network with 802.1X authentication", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "wireless_lan": { + "ssid": "Corp-Secure", + "description": "Corporate secure wireless network Updated", + "group": { + "name": "Corporate Networks", + "slug": "corporate-networks" + }, + "status": "active", + "vlan": { + "vid": 100, + "name": "Production Servers" + }, + "scope_site": {"name": "Site 1"}, + "tenant": {"name": "Tenant 1"}, + "auth_type": "wpa-enterprise", + "auth_cipher": "aes", + "auth_psk": "SecureWiFiKey123!", + "comments": "Primary corporate wireless network with 802.1X authentication", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "Corporate secure wireless network Updated" + } + }, + { + "name": "wireless_wirelesslangroup_1", + "object_type": "wireless.wirelesslangroup", + "lookup": {"name": "Corporate Networks"}, + "create_expect": { + "name": "Corporate Networks", + "parent.name": "All Networks", + "description": "Enterprise corporate wireless networks" + }, + "create": { + "wireless_lan_group": { + "name": "Corporate Networks", + "slug": "corporate-networks", + "parent": { + "name": "All Networks", + "slug": "all-networks" + }, + "description": "Enterprise corporate wireless networks", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "wireless_lan_group": { + "name": "Corporate Networks", + "slug": "corporate-networks", + "parent": { + "name": "All Networks", + "slug": "all-networks" + }, + "description": "Enterprise corporate wireless networks Updated", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "Enterprise corporate wireless networks Updated" + } + }, + { + "name": "wireless_wirelesslink_1", + "object_type": "wireless.wirelesslink", + "lookup": {"ssid": "P2P-Link-1"}, + "create_expect": { + "interface_a.device.name": "Device 1", + "interface_b.device.name": "Device 2", + "description": "Point-to-point wireless backhaul link" + }, + "create": { + "wireless_link": { + "interface_a": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "name": "Radio0/1", + "type": "ieee802.11ac", + "enabled": true + }, + "interface_b": { + "device": { + "name": "Device 2", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "name": "Radio0/1", + "type": "ieee802.11ac", + "enabled": true + }, + "ssid": "P2P-Link-1", + "status": "connected", + "tenant": {"name": "Tenant 1"}, + "auth_type": "wpa-personal", + "auth_cipher": "aes", + "auth_psk": "P2PLinkKey123!", + "distance": 1.5, + "distance_unit": "km", + "description": "Point-to-point wireless backhaul link", + "comments": "Building A to Building B wireless bridge", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "wireless_link": { + "interface_a": { + "device": { + "name": "Device 1", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "name": "Radio0/1", + "type": "ieee802.11ac", + "enabled": true + }, + "interface_b": { + "device": { + "name": "Device 2", + "role": {"name": "Device Role 1"}, + "device_type": { + "manufacturer": {"name": "Cisco"}, + "model": "C2960S" + }, + "site": {"name": "Site 1"} + }, + "name": "Radio0/1", + "type": "ieee802.11ac", + "enabled": true + }, + "ssid": "P2P-Link-1", + "status": "connected", + "tenant": {"name": "Tenant 1"}, + "auth_type": "wpa-personal", + "auth_cipher": "aes", + "auth_psk": "P2PLinkKey123!", + "distance": 1.5, + "distance_unit": "km", + "description": "Point-to-point wireless backhaul link Updated", + "comments": "Building A to Building B wireless bridge", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "Point-to-point wireless backhaul link Updated" + } + } +] \ No newline at end of file From ce2bf8f166e326e1772c5d97df44491277c64577 Mon Sep 17 00:00:00 2001 From: Michal Fiedorowicz Date: Tue, 22 Apr 2025 18:26:34 +0200 Subject: [PATCH 31/52] feat: diode auth authentication - authentication with OAuth2 instead of API keys - squashed and removed old redundant migrations - removed redundant setup step Signed-off-by: Michal Fiedorowicz --- README.md | 41 +---- docker/netbox/env/netbox.env | 5 +- docker/netbox/plugins_dev.py | 4 +- netbox_diode_plugin/__init__.py | 11 +- netbox_diode_plugin/api/authentication.py | 71 +++++---- netbox_diode_plugin/forms.py | 53 ------- .../migrations/0001_initial.py | 123 --------------- ...{0002_setting.py => 0001_squashed_0005.py} | 19 ++- .../migrations/0003_clear_permissions.py | 27 ---- .../migrations/0004_rename_legacy_users.py | 43 ------ .../0005_revoke_superuser_status.py | 36 ----- netbox_diode_plugin/models.py | 16 -- netbox_diode_plugin/plugin_config.py | 68 ++++++--- netbox_diode_plugin/search.py | 18 --- .../templates/diode/settings.html | 71 --------- .../templates/diode/setup.html | 38 ----- netbox_diode_plugin/urls.py | 1 - netbox_diode_plugin/views.py | 141 +----------------- 18 files changed, 111 insertions(+), 675 deletions(-) delete mode 100644 netbox_diode_plugin/migrations/0001_initial.py rename netbox_diode_plugin/migrations/{0002_setting.py => 0001_squashed_0005.py} (78%) delete mode 100644 netbox_diode_plugin/migrations/0003_clear_permissions.py delete mode 100644 netbox_diode_plugin/migrations/0004_rename_legacy_users.py delete mode 100644 netbox_diode_plugin/migrations/0005_revoke_superuser_status.py delete mode 100644 netbox_diode_plugin/search.py delete mode 100644 netbox_diode_plugin/templates/diode/setup.html diff --git a/README.md b/README.md index 374cd6b..6ae6b25 100644 --- a/README.md +++ b/README.md @@ -16,6 +16,7 @@ at [https://netboxlabs.com/blog/introducing-diode-streamlining-data-ingestion-in |:--------------:|:--------------:| | >= 3.7.2 | 0.1.0 | | >= 4.1.0 | 0.4.0 | +| >= 4.2.3 | 1.0.0 | ## Installation @@ -46,20 +47,11 @@ Also in your `configuration.py` file, in order to customise the plugin settings, ```python PLUGINS_CONFIG = { "netbox_diode_plugin": { - # Auto-provision users for Diode plugin - "auto_provision_users": False, - # Diode gRPC target for communication with Diode server "diode_target_override": "grpc://localhost:8080/diode", - # User allowed for Diode to NetBox communication - "diode_to_netbox_username": "diode-to-netbox", - - # User allowed for NetBox to Diode communication - "netbox_to_diode_username": "netbox-to-diode", - - # User allowed for data ingestion - "diode_username": "diode-ingestion", + # Username associated with changes applied via plugin + "diode_username": "diode", }, } ``` @@ -67,11 +59,6 @@ PLUGINS_CONFIG = { Note: Once you customise usernames with PLUGINS_CONFIG during first installation, you should not change or remove them later on. Doing so will cause the plugin to stop working properly. -`auto_provision_users` is a boolean flag (default: `False`) that determines whether the plugin should automatically -create the users during -migration. If set to `False`, you will need to provision Diode users with their API keys manually via the plugin's setup -page in the NetBox UI. - Restart NetBox services to load the plugin: ``` @@ -89,28 +76,6 @@ cd /opt/netbox source venv/bin/activate ``` -Three API keys will be needed (these are random 40 character long alphanumeric strings). They can be generated and set -to the appropriate environment variables with the following commands: - -```shell -# API key for the Diode service to interact with NetBox -export DIODE_TO_NETBOX_API_KEY=$(head -c20 Map to environment variable " - f'{user_properties["api_key_env_var_name"]} in Diode service' - f'{" and Diode SDK" if user_type == "diode" else ""}' - ) - - initial_value = user_properties.get("api_key") or user_properties.get( - "predefined_api_key" - ) - - if ( - user_properties.get("predefined_api_key") is None - and user_properties.get("api_key") is None - ): - initial_value = NetBoxToken.generate_key() - - self.fields[field_name] = forms.CharField( - required=True, - max_length=40, - validators=[MinLengthValidator(40)], - label=label, - disabled=disabled, - initial=initial_value, - help_text=help_text, - widget=forms.TextInput( - attrs={ - "data-clipboard": "true", - "placeholder": _( - f"Enter a valid API key for {username_or_type} user" - ), - } - ), - ) diff --git a/netbox_diode_plugin/migrations/0001_initial.py b/netbox_diode_plugin/migrations/0001_initial.py deleted file mode 100644 index 2f7983b..0000000 --- a/netbox_diode_plugin/migrations/0001_initial.py +++ /dev/null @@ -1,123 +0,0 @@ -#!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc -"""Diode Netbox Plugin - Database migrations.""" - -import os - -from django.apps import apps as django_apps -from django.conf import settings as netbox_settings -from django.contrib.contenttypes.management import create_contenttypes -from django.db import migrations, models -from netbox.plugins import get_plugin_config -from users.models import Token as NetBoxToken - -from netbox_diode_plugin.plugin_config import get_diode_usernames - - -# Read secret from file -def _read_secret(secret_name, default=None): - try: - f = open("/run/secrets/" + secret_name, encoding="utf-8") - except OSError: - return default - else: - with f: - return f.readline().strip() - - -def _create_user_with_token(apps, user_type, username, group): - User = apps.get_model(netbox_settings.AUTH_USER_MODEL) - """Create a user with the given username and API key if it does not exist.""" - try: - user = User.objects.get(username=username) - except User.DoesNotExist: - user = User.objects.create(username=username, is_active=True) - - user.groups.add(*[group.id]) - - Token = apps.get_model("users", "Token") - - if not Token.objects.filter(user=user).exists(): - key = f"{user_type.upper()}_API_KEY" - api_key = _read_secret(key.lower(), os.getenv(key)) - if api_key is None: - api_key = NetBoxToken.generate_key() - Token.objects.create(user=user, key=api_key) - - return user - - -def configure_plugin(apps, schema_editor): - """Configure the plugin.""" - Group = apps.get_model("users", "Group") - group, _ = Group.objects.get_or_create(name="diode") - - app_config = django_apps.get_app_config("netbox_diode_plugin") - create_contenttypes(app_config, verbosity=0) - - ContentType = apps.get_model("contenttypes", "ContentType") - diode_plugin_object_type = ContentType.objects.get( - app_label="netbox_diode_plugin", model="diode" - ) - - ObjectPermission = apps.get_model("users", "ObjectPermission") - permission, _ = ObjectPermission.objects.get_or_create( - name="Diode", - actions=["add", "view"], - ) - permission.object_types.set([diode_plugin_object_type.id]) - - auto_provision_users = get_plugin_config( - "netbox_diode_plugin", "auto_provision_users" - ) - - if not auto_provision_users: - return - - diode_to_netbox_user_id = None - - for user_type, username in get_diode_usernames().items(): - user = _create_user_with_token(apps, user_type, username, group) - if user_type == "diode_to_netbox": - diode_to_netbox_user_id = user.id - - permission.users.set([diode_to_netbox_user_id]) - - -class Migration(migrations.Migration): - """Initial migration.""" - - initial = True - - dependencies = [ - ("contenttypes", "0001_initial"), - ("users", "0006_custom_group_model"), - ] - - operations = [ - migrations.CreateModel( - # Does not create any table / fields in the database - # Registers the Diode model as migrated - # This model is used to generate permissions for the Diode NetBox Plugin - name="Diode", - fields=[ - ( - "id", - models.BigAutoField( - auto_created=True, primary_key=True, serialize=False - ), - ), - ], - options={ - "permissions": ( - ("view_diode", "Can view Diode"), - ("add_diode", "Can apply change sets from Diode"), - ), - "managed": False, - "default_permissions": (), - }, - ), - migrations.RunPython( - code=configure_plugin, reverse_code=migrations.RunPython.noop - ), - ] diff --git a/netbox_diode_plugin/migrations/0002_setting.py b/netbox_diode_plugin/migrations/0001_squashed_0005.py similarity index 78% rename from netbox_diode_plugin/migrations/0002_setting.py rename to netbox_diode_plugin/migrations/0001_squashed_0005.py index 5fa238a..f038a1f 100644 --- a/netbox_diode_plugin/migrations/0002_setting.py +++ b/netbox_diode_plugin/migrations/0001_squashed_0005.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc -"""Diode Netbox Plugin - Database migrations.""" +# Copyright 2025 NetBox Labs, Inc. +"""Diode NetBox Plugin - Database migrations.""" import utilities.json from django.db import migrations, models @@ -20,10 +20,21 @@ def create_settings_entity(apps, schema_editor): class Migration(migrations.Migration): - """0002_setting migration.""" + """Initial migration.""" - dependencies = [ + replaces = [ ("netbox_diode_plugin", "0001_initial"), + ("netbox_diode_plugin", "0002_setting"), + ("netbox_diode_plugin", "0003_clear_permissions"), + ("netbox_diode_plugin", "0004_rename_legacy_users"), + ("netbox_diode_plugin", "0005_revoke_superuser_status"), + ] + + initial = True + + dependencies = [ + ("contenttypes", "0001_initial"), + ("users", "0006_custom_group_model"), ] operations = [ diff --git a/netbox_diode_plugin/migrations/0003_clear_permissions.py b/netbox_diode_plugin/migrations/0003_clear_permissions.py deleted file mode 100644 index fa218b7..0000000 --- a/netbox_diode_plugin/migrations/0003_clear_permissions.py +++ /dev/null @@ -1,27 +0,0 @@ -#!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc -"""Diode Netbox Plugin - Database migrations.""" - -from django.db import migrations - - -def clear_diode_group_permissions(apps, schema_editor): - """Clear Diode group permissions.""" - ObjectPermission = apps.get_model("users", "ObjectPermission") - permission = ObjectPermission.objects.get(name="Diode") - permission.groups.clear() - - -class Migration(migrations.Migration): - """0003_clear_permissions migration.""" - - dependencies = [ - ("netbox_diode_plugin", "0001_initial"), - ("netbox_diode_plugin", "0002_setting"), - ] - - operations = [ - migrations.RunPython( - code=clear_diode_group_permissions, reverse_code=migrations.RunPython.noop - ), - ] diff --git a/netbox_diode_plugin/migrations/0004_rename_legacy_users.py b/netbox_diode_plugin/migrations/0004_rename_legacy_users.py deleted file mode 100644 index b179448..0000000 --- a/netbox_diode_plugin/migrations/0004_rename_legacy_users.py +++ /dev/null @@ -1,43 +0,0 @@ -#!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc -"""Diode Netbox Plugin - Database migrations.""" - -from django.db import migrations - -from netbox_diode_plugin.plugin_config import get_diode_usernames - - -def rename_legacy_users(apps, schema_editor): - """Rename legacy users.""" - legacy_usernames_to_user_type_map = { - "DIODE_TO_NETBOX": "diode_to_netbox", - "NETBOX_TO_DIODE": "netbox_to_diode", - "DIODE": "diode", - } - - User = apps.get_model("users", "User") - users = User.objects.filter( - username__in=legacy_usernames_to_user_type_map.keys(), - groups__name="diode", - ) - - for user in users: - user_type = legacy_usernames_to_user_type_map.get(user.username) - user.username = get_diode_usernames().get(user_type) - user.save() - - -class Migration(migrations.Migration): - """0004_rename_legacy_users migration.""" - - dependencies = [ - ("netbox_diode_plugin", "0001_initial"), - ("netbox_diode_plugin", "0002_setting"), - ("netbox_diode_plugin", "0003_clear_permissions"), - ] - - operations = [ - migrations.RunPython( - code=rename_legacy_users, reverse_code=migrations.RunPython.noop - ), - ] diff --git a/netbox_diode_plugin/migrations/0005_revoke_superuser_status.py b/netbox_diode_plugin/migrations/0005_revoke_superuser_status.py deleted file mode 100644 index 056f9d9..0000000 --- a/netbox_diode_plugin/migrations/0005_revoke_superuser_status.py +++ /dev/null @@ -1,36 +0,0 @@ -#!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc -"""Diode Netbox Plugin - Database migrations.""" - -from django.db import migrations - -from netbox_diode_plugin.plugin_config import get_diode_usernames - - -def revoke_superuser_status(apps, schema_editor): - """Revoke superuser status.""" - diode_usernames = get_diode_usernames().values() - User = apps.get_model("users", "User") - users = User.objects.filter(username__in=diode_usernames, groups__name="diode") - - for user in users: - user.is_staff = False - user.is_superuser = False - user.save() - - -class Migration(migrations.Migration): - """0005_revoke_superuser_status migration.""" - - dependencies = [ - ("netbox_diode_plugin", "0001_initial"), - ("netbox_diode_plugin", "0002_setting"), - ("netbox_diode_plugin", "0003_clear_permissions"), - ("netbox_diode_plugin", "0004_rename_legacy_users"), - ] - - operations = [ - migrations.RunPython( - code=revoke_superuser_status, reverse_code=migrations.RunPython.noop - ), - ] diff --git a/netbox_diode_plugin/models.py b/netbox_diode_plugin/models.py index a928ed1..86e0271 100644 --- a/netbox_diode_plugin/models.py +++ b/netbox_diode_plugin/models.py @@ -20,22 +20,6 @@ def diode_target_validator(target): raise ValidationError(exc) -class Diode(models.Model): - """Dummy model used to generate permissions for Diode NetBox Plugin. Does not exist in the database.""" - - class Meta: - """Meta class.""" - - managed = False - - default_permissions = () - - permissions = ( - ("view_diode", "Can view Diode"), - ("add_diode", "Can apply change sets from Diode"), - ) - - class Setting(NetBoxModel): """Setting model.""" diff --git a/netbox_diode_plugin/plugin_config.py b/netbox_diode_plugin/plugin_config.py index 12dfbd1..3005dc4 100644 --- a/netbox_diode_plugin/plugin_config.py +++ b/netbox_diode_plugin/plugin_config.py @@ -1,38 +1,62 @@ # !/usr/bin/env python -# Copyright 2024 NetBox Labs Inc +# Copyright 2025 NetBox Labs, Inc. """Diode NetBox Plugin - Plugin Settings.""" +from urllib.parse import urlparse + +from django.contrib.auth import get_user_model from netbox.plugins import get_plugin_config __all__ = ( - "get_diode_user_types", - "get_diode_usernames", - "get_diode_username_for_user_type", + "get_diode_auth_introspect_url", + "get_diode_user", ) +User = get_user_model() + + +def _parse_diode_target(target: str) -> tuple[str, str, bool]: + """Parse the target into authority, path and tls_verify.""" + parsed_target = urlparse(target) + + if parsed_target.scheme not in ["grpc", "grpcs"]: + raise ValueError("target should start with grpc:// or grpcs://") + + tls_verify = parsed_target.scheme == "grpcs" + + authority = parsed_target.netloc -def get_diode_user_types(): - """Returns a list of diode user types.""" - return "diode_to_netbox", "netbox_to_diode", "diode" + return authority, parsed_target.path, tls_verify -def get_diode_user_types_with_labels(): - """Returns a list of diode user types with labels.""" - return ( - ("diode_to_netbox", "Diode to NetBox"), - ("netbox_to_diode", "NetBox to Diode"), - ("diode", "Diode"), +def get_diode_auth_introspect_url(): + """Returns the Diode Auth introspect URL.""" + diode_target = get_plugin_config("netbox_diode_plugin", "diode_target") + diode_target_override = get_plugin_config( + "netbox_diode_plugin", "diode_target_override" ) + authority, path, tls_verify = _parse_diode_target( + diode_target_override or diode_target + ) + scheme = "https" if tls_verify else "http" + path = path.rstrip("/") + + return f"{scheme}://{authority}{path}/auth/introspect" + + +def get_diode_user(): + """Returns the Diode user.""" + diode_username = get_plugin_config("netbox_diode_plugin", "diode_username") + diode_username_override = get_plugin_config( + "netbox_diode_plugin", "diode_username_override" + ) -def get_diode_usernames(): - """Returns a dictionary of diode user types and their configured usernames.""" - return { - user_type: get_plugin_config("netbox_diode_plugin", f"{user_type}_username") - for user_type in get_diode_user_types() - } + diode_username = diode_username_override or diode_username + try: + diode_user = User.objects.get(username=diode_username) + except User.DoesNotExist: + diode_user = User.objects.create(username=diode_username, is_active=True) -def get_diode_username_for_user_type(user_type): - """Returns a diode username for a given user type.""" - return get_plugin_config("netbox_diode_plugin", f"{user_type}_username") + return diode_user diff --git a/netbox_diode_plugin/search.py b/netbox_diode_plugin/search.py deleted file mode 100644 index d0bfead..0000000 --- a/netbox_diode_plugin/search.py +++ /dev/null @@ -1,18 +0,0 @@ -#!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc -"""Diode NetBox Plugin - Search Indices.""" - -from extras.models import Tag -from netbox.search import SearchIndex, register_search - - -@register_search -class TagIndex(SearchIndex): - """Search index for tags.""" - - model = Tag - fields = ( - ("name", 100), - ("slug", 110), - ) - display_attrs = ("color",) diff --git a/netbox_diode_plugin/templates/diode/settings.html b/netbox_diode_plugin/templates/diode/settings.html index a90ed44..3572029 100644 --- a/netbox_diode_plugin/templates/diode/settings.html +++ b/netbox_diode_plugin/templates/diode/settings.html @@ -29,75 +29,4 @@ - -{% if diode_users_errors %} - -{% endif %} - -
-
-
-

{% trans "Diode users" %}

- - - - - - - - - - - {% for username, user_info in diode_users_info.items %} - - - - - - - {% endfor %} - -
{% trans "Username" %}{% trans "API key" %}{% trans "Environment variable" %}
{{ username }} - {{ user_info.api_key|placeholder }} - - Map to environment variable {{ user_info.env_var_name}} in Diode service{% if user_info.env_var_name == "DIODE_API_KEY" %} and Diode SDK{% endif %} - -
- - -
-
-
-
-
- {% endblock content %} diff --git a/netbox_diode_plugin/templates/diode/setup.html b/netbox_diode_plugin/templates/diode/setup.html deleted file mode 100644 index 7da6638..0000000 --- a/netbox_diode_plugin/templates/diode/setup.html +++ /dev/null @@ -1,38 +0,0 @@ -{% extends 'generic/_base.html' %} -{% load i18n %} - -{% block title %}{% trans "Setup" %}{% endblock %} - -{% block content %} - - - -
-
- {% csrf_token %} - -
-

{% trans "Diode users and API Keys" %}

-
- -
- {% block form %} - {% include 'htmx/form.html' %} - {% endblock form %} -
- -
- {% block buttons %} - - {% endblock buttons %} -
-
-
-{% endblock content %} - diff --git a/netbox_diode_plugin/urls.py b/netbox_diode_plugin/urls.py index 613c231..bd12a97 100644 --- a/netbox_diode_plugin/urls.py +++ b/netbox_diode_plugin/urls.py @@ -7,7 +7,6 @@ from . import views urlpatterns = ( - path("setup/", views.SetupView.as_view(), name="setup"), path("settings/", views.SettingsView.as_view(), name="settings"), path("settings/edit/", views.SettingsEditView.as_view(), name="settings_edit"), ) diff --git a/netbox_diode_plugin/views.py b/netbox_diode_plugin/views.py index 99403d6..8623f5a 100644 --- a/netbox_diode_plugin/views.py +++ b/netbox_diode_plugin/views.py @@ -1,8 +1,6 @@ #!/usr/bin/env python # Copyright 2024 NetBox Labs Inc """Diode NetBox Plugin - Views.""" -import os - from django.conf import settings as netbox_settings from django.contrib import messages from django.contrib.auth import get_user_model @@ -12,16 +10,10 @@ from django.views.generic import View from netbox.plugins import get_plugin_config from netbox.views import generic -from users.models import Group, ObjectPermission, Token from utilities.views import register_model_view -from netbox_diode_plugin.forms import SettingsForm, SetupForm +from netbox_diode_plugin.forms import SettingsForm from netbox_diode_plugin.models import Setting -from netbox_diode_plugin.plugin_config import ( - get_diode_user_types_with_labels, - get_diode_username_for_user_type, - get_diode_usernames, -) User = get_user_model() @@ -59,42 +51,11 @@ def get(self, request): diode_target=diode_target_override or default_diode_target ) - diode_users_info = {} - - diode_users_errors = [] - - for user_type, username in get_diode_usernames().items(): - try: - user = get_user_model().objects.get(username=username) - except User.DoesNotExist: - diode_users_errors.append( - f"User '{username}' does not exist, please check plugin configuration." - ) - continue - - if not Token.objects.filter(user=user).exists(): - diode_users_errors.append( - f"API key for '{username}' does not exist, please check plugin configuration." - ) - continue - - token = Token.objects.get(user=user) - - diode_users_info[username] = { - "api_key": token.key, - "env_var_name": f"{user_type.upper()}_API_KEY", - } - - if diode_users_errors: - return redirect("plugins:netbox_diode_plugin:setup") - diode_target = diode_target_override or settings.diode_target context = { - "diode_users_errors": diode_users_errors, "diode_target": diode_target, "is_diode_target_overridden": diode_target_override is not None, - "diode_users_info": diode_users_info, } return render(request, "diode/settings.html", context) @@ -148,103 +109,3 @@ def post(self, request, *args, **kwargs): kwargs["pk"] = settings.pk return super().post(request, *args, **kwargs) - - -class SetupView(View): - """Setup view.""" - - form = SetupForm - - @staticmethod - def _retrieve_predefined_api_key(api_key_env_var): - """Retrieve predefined API key from a secret or environment variable.""" - try: - f = open("/run/secrets/" + api_key_env_var, encoding="utf-8") - except OSError: - return os.getenv(api_key_env_var) - else: - with f: - return f.readline().strip() - - def _retrieve_users(self): - """Retrieve users for the setup form.""" - user_types_with_labels = get_diode_user_types_with_labels() - users = { - user_type: { - "username": None, - "user": None, - "api_key": None, - "api_key_env_var_name": f"{user_type.upper()}_API_KEY", - "predefined_api_key": self._retrieve_predefined_api_key( - f"{user_type.upper()}_API_KEY" - ), - } - for user_type, _ in user_types_with_labels - } - for user_type, _ in user_types_with_labels: - username = get_diode_username_for_user_type(user_type) - users[user_type]["username"] = username - - try: - user = get_user_model().objects.get(username=username) - users[user_type]["user"] = user - if Token.objects.filter(user=user).exists(): - users[user_type]["api_key"] = Token.objects.get(user=user).key - except User.DoesNotExist: - continue - return users - - def get(self, request): - """GET request handler.""" - if not request.user.is_authenticated or not request.user.is_staff: - return redirect_to_login(request) - - users = self._retrieve_users() - - context = { - "form": self.form(users), - } - - return render(request, "diode/setup.html", context) - - def post(self, request): - """POST request handler.""" - if not request.user.is_authenticated or not request.user.is_staff: - return redirect_to_login(request) - - users = self._retrieve_users() - - form = self.form(users, request.POST) - - group = Group.objects.get(name="diode") - permission = ObjectPermission.objects.get(name="Diode") - - if form.is_valid(): - for field in form.fields: - user_type = field.rsplit("_api_key", 1)[0] - username = users[user_type].get("username") - if username is None: - raise ValueError( - f"Username for user type '{user_type}' is not defined" - ) - - user = users[user_type].get("user") - if user is None: - user = get_user_model().objects.create_user( - username=username, is_active=True - ) - user.groups.add(*[group.id]) - - if user_type == "diode_to_netbox": - permission.users.set([user.id]) - - if not Token.objects.filter(user=user).exists(): - Token.objects.create(user=user, key=form.cleaned_data[field]) - - return redirect("plugins:netbox_diode_plugin:settings") - - context = { - "form": form, - } - - return render(request, "diode/setup.html", context) From 305ddf3ebfec098865f22780ec99d19f7ab9c740 Mon Sep 17 00:00:00 2001 From: Michal Fiedorowicz Date: Tue, 22 Apr 2025 19:31:22 +0200 Subject: [PATCH 32/52] chore: update compatibility version in netbox-plugin.yaml Signed-off-by: Michal Fiedorowicz --- netbox-plugin.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/netbox-plugin.yaml b/netbox-plugin.yaml index 916c77d..3f7daf8 100644 --- a/netbox-plugin.yaml +++ b/netbox-plugin.yaml @@ -1,7 +1,7 @@ version: 0.1 package_name: netboxlabs-diode-netbox-plugin compatibility: - - release: 0.7.0 + - release: 1.0.0 netbox_min: 4.2.3 netbox_max: 4.2.3 - release: 0.6.0 From 8ea3b2bd10adf5736ea83e33efda120d2f078430 Mon Sep 17 00:00:00 2001 From: Michal Fiedorowicz Date: Tue, 22 Apr 2025 19:31:56 +0200 Subject: [PATCH 33/52] feat: adjust unit test to diode auth with oauth2 Signed-off-by: Michal Fiedorowicz --- netbox_diode_plugin/api/authentication.py | 1 - .../tests/test_api_apply_change_set.py | 28 ++- .../tests/test_api_diff_and_apply.py | 44 ++-- .../tests/test_api_generate_diff.py | 26 ++- .../tests/test_authentication.py | 153 ++++++++++++ netbox_diode_plugin/tests/test_forms.py | 42 +--- .../tests/test_plugin_config.py | 62 ++--- netbox_diode_plugin/tests/test_views.py | 218 +----------------- 8 files changed, 234 insertions(+), 340 deletions(-) create mode 100644 netbox_diode_plugin/tests/test_authentication.py diff --git a/netbox_diode_plugin/api/authentication.py b/netbox_diode_plugin/api/authentication.py index e663c7c..52af41c 100644 --- a/netbox_diode_plugin/api/authentication.py +++ b/netbox_diode_plugin/api/authentication.py @@ -4,7 +4,6 @@ import hashlib import logging -import os import requests from django.core.cache import cache diff --git a/netbox_diode_plugin/tests/test_api_apply_change_set.py b/netbox_diode_plugin/tests/test_api_apply_change_set.py index d35e1bb..4315def 100644 --- a/netbox_diode_plugin/tests/test_api_apply_change_set.py +++ b/netbox_diode_plugin/tests/test_api_apply_change_set.py @@ -3,6 +3,7 @@ """Diode NetBox Plugin - Tests.""" import uuid +from unittest import mock from dcim.models import ( Device, @@ -27,6 +28,9 @@ VMInterface, ) +from netbox_diode_plugin.api.authentication import DiodeOAuth2Authentication +from netbox_diode_plugin.plugin_config import get_diode_user + User = get_user_model() def _get_error(response, object_name, field): @@ -37,11 +41,14 @@ class BaseApplyChangeSet(APITestCase): def setUp(self): """Set up test.""" - self.user = User.objects.create_user(username="testcommonuser") - self.add_permissions("netbox_diode_plugin.add_diode") - self.user_token = Token.objects.create(user=self.user) - - self.user_header = {"HTTP_AUTHORIZATION": f"Token {self.user_token.key}"} + self.authorization_header = {"Authorization": "Bearer mocked_oauth_token"} + self.diode_user = get_diode_user() + self.auth_patcher = mock.patch.object( + DiodeOAuth2Authentication, + 'authenticate', + return_value=(self.diode_user, None) + ) + self.auth_patcher.start() rir = RIR.objects.create(name="RFC 6996", is_private=True) self.asns = [ASN(asn=65000 + i, rir=rir) for i in range(8)] @@ -165,10 +172,15 @@ def setUp(self): self.url = "/netbox/api/plugins/diode/apply-change-set/" + def tearDown(self): + """Clean up after tests.""" + self.auth_patcher.stop() + super().tearDown() + def send_request(self, payload, status_code=status.HTTP_200_OK): """Post the payload to the url and return the response.""" response = self.client.post( - self.url, data=payload, format="json", **self.user_header + self.url, data=payload, format="json", **self.authorization_header ) self.assertEqual(response.status_code, status_code) return response @@ -262,7 +274,7 @@ def test_change_type_update_return_200(self): } _ = self.client.post( - self.url, payload, format="json", **self.user_header + self.url, payload, format="json", **self.authorization_header ) site_updated = Site.objects.get(id=20) @@ -580,7 +592,7 @@ def test_change_type_update_with_object_id_not_exist_return_400(self): } response = self.client.post( - self.url, payload, format="json", **self.user_header + self.url, payload, format="json", **self.authorization_header ) site_updated = Site.objects.get(id=20) diff --git a/netbox_diode_plugin/tests/test_api_diff_and_apply.py b/netbox_diode_plugin/tests/test_api_diff_and_apply.py index f53b1b1..12762be 100644 --- a/netbox_diode_plugin/tests/test_api_diff_and_apply.py +++ b/netbox_diode_plugin/tests/test_api_diff_and_apply.py @@ -7,23 +7,22 @@ import decimal import logging from uuid import uuid4 - +from unittest import mock import netaddr from circuits.models import Circuit from core.models import ObjectType from dcim.models import Device, Interface, Site -from django.contrib.auth import get_user_model from extras.models import CustomField from extras.models.customfields import CustomFieldTypeChoices from ipam.models import IPAddress, VLANGroup from rest_framework import status -from users.models import Token from utilities.testing import APITestCase from virtualization.models import VMInterface -logger = logging.getLogger(__name__) +from netbox_diode_plugin.api.authentication import DiodeOAuth2Authentication +from netbox_diode_plugin.plugin_config import get_diode_user -User = get_user_model() +logger = logging.getLogger(__name__) class GenerateDiffAndApplyTestCase(APITestCase): @@ -33,11 +32,15 @@ def setUp(self): """Set up the test case.""" self.diff_url = "/netbox/api/plugins/diode/generate-diff/" self.apply_url = "/netbox/api/plugins/diode/apply-change-set/" - self.user = User.objects.create_user(username="testcommonuser") - self.user_token = Token.objects.create(user=self.user) - self.user_header = {"HTTP_AUTHORIZATION": f"Token {self.user_token.key}"} - - self.add_permissions("netbox_diode_plugin.add_diode") + + self.authorization_header = {"Authorization": "Bearer mocked_oauth_token"} + self.diode_user = get_diode_user() + self.auth_patcher = mock.patch.object( + DiodeOAuth2Authentication, + 'authenticate', + return_value=(self.diode_user, None) + ) + self.auth_patcher.start() self.object_type = ObjectType.objects.get_for_model(Site) @@ -86,6 +89,11 @@ def setUp(self): self.decimal_field.object_types.set([self.object_type]) self.decimal_field.save() + def tearDown(self): + """Clean up after tests.""" + self.auth_patcher.stop() + super().tearDown() + def test_generate_diff_and_apply_create_interface_with_tags(self): """Test generate diff and apply create interface with tags.""" interface_uuid = str(uuid4()) @@ -525,7 +533,7 @@ def test_generate_diff_and_apply_create_and_update_site_with_custom_field(self): } } response1 = self.client.post( - self.diff_url, data=payload, format="json", **self.user_header + self.diff_url, data=payload, format="json", **self.authorization_header ) self.assertEqual(response1.status_code, status.HTTP_200_OK) diff = response1.json().get("change_set", {}) @@ -587,7 +595,7 @@ def test_generate_diff_and_apply_site_with_lat_lon(self): }, } response1 = self.client.post( - self.diff_url, data=payload, format="json", **self.user_header + self.diff_url, data=payload, format="json", **self.authorization_header ) self.assertEqual(response1.status_code, status.HTTP_200_OK) diff = response1.json().get("change_set", {}) @@ -611,14 +619,14 @@ def test_generate_diff_and_apply_wrong_type_date(self): } } response1 = self.client.post( - self.diff_url, data=payload, format="json", **self.user_header + self.diff_url, data=payload, format="json", **self.authorization_header ) self.assertEqual(response1.status_code, status.HTTP_200_OK) diff = response1.json().get("change_set", {}) response2 = self.client.post( - self.apply_url, data=diff, format="json", **self.user_header + self.apply_url, data=diff, format="json", **self.authorization_header ) self.assertEqual(response2.status_code, status.HTTP_400_BAD_REQUEST) @@ -739,7 +747,7 @@ def test_generate_diff_update_ip_address(self): } response1 = self.client.post( - self.diff_url, data=payload, format="json", **self.user_header + self.diff_url, data=payload, format="json", **self.authorization_header ) self.assertEqual(response1.status_code, status.HTTP_200_OK) diff = response1.json().get("change_set", {}) @@ -758,7 +766,7 @@ def test_generate_diff_update_ip_address(self): } response1 = self.client.post( - self.diff_url, data=payload, format="json", **self.user_header + self.diff_url, data=payload, format="json", **self.authorization_header ) self.assertEqual(response1.status_code, status.HTTP_200_OK) diff = response1.json().get("change_set", {}) @@ -916,13 +924,13 @@ def test_generate_diff_and_apply_complex_vminterface(self): def diff_and_apply(self, payload): """Diff and apply the payload.""" response1 = self.client.post( - self.diff_url, data=payload, format="json", **self.user_header + self.diff_url, data=payload, format="json", **self.authorization_header ) self.assertEqual(response1.status_code, status.HTTP_200_OK) diff = response1.json().get("change_set", {}) response2 = self.client.post( - self.apply_url, data=diff, format="json", **self.user_header + self.apply_url, data=diff, format="json", **self.authorization_header ) self.assertEqual(response2.status_code, status.HTTP_200_OK) return (response1, response2) diff --git a/netbox_diode_plugin/tests/test_api_generate_diff.py b/netbox_diode_plugin/tests/test_api_generate_diff.py index 0c84fd9..19c4177 100644 --- a/netbox_diode_plugin/tests/test_api_generate_diff.py +++ b/netbox_diode_plugin/tests/test_api_generate_diff.py @@ -2,6 +2,7 @@ # Copyright 2024 NetBox Labs Inc """Diode NetBox Plugin - Tests.""" +from unittest import mock from uuid import uuid4 from core.models import ObjectType @@ -10,10 +11,10 @@ from extras.models import CustomField from extras.models.customfields import CustomFieldTypeChoices from rest_framework import status -from users.models import Token from utilities.testing import APITestCase -User = get_user_model() +from netbox_diode_plugin.api.authentication import DiodeOAuth2Authentication +from netbox_diode_plugin.plugin_config import get_diode_user class GenerateDiffTestCase(APITestCase): """GenerateDiff test cases.""" @@ -22,13 +23,17 @@ def setUp(self): """Set up the test case.""" self.url = "/netbox/api/plugins/diode/generate-diff/" - self.user = User.objects.create_user(username="testcommonuser") - self.add_permissions("netbox_diode_plugin.add_diode") - self.user_token = Token.objects.create(user=self.user) + self.authorization_header = {"Authorization": "Bearer mocked_oauth_token"} + self.diode_user = get_diode_user() + self.auth_patcher = mock.patch.object( + DiodeOAuth2Authentication, + 'authenticate', + return_value=(self.diode_user, None) + ) + self.auth_patcher.start() - self.user_header = {"HTTP_AUTHORIZATION": f"Token {self.user_token.key}"} self.object_type = ObjectType.objects.get_for_model(Site) - + self.uuid_field = CustomField.objects.create( name='myuuid', type=CustomFieldTypeChoices.TYPE_TEXT, @@ -74,6 +79,11 @@ def setUp(self): ) self.rack_type.save() + def tearDown(self): + """Clean up after tests.""" + self.auth_patcher.stop() + super().tearDown() + def test_generate_diff_create_site(self): """Test generate diff create site.""" payload = { @@ -282,7 +292,7 @@ def test_generate_diff_update_rack_type_camel_case(self): def send_request(self, payload, status_code=status.HTTP_200_OK): """Post the payload to the url and return the response.""" response = self.client.post( - self.url, data=payload, format="json", **self.user_header + self.url, data=payload, format="json", **self.authorization_header ) self.assertEqual(response.status_code, status_code) return response diff --git a/netbox_diode_plugin/tests/test_authentication.py b/netbox_diode_plugin/tests/test_authentication.py new file mode 100644 index 0000000..8042d37 --- /dev/null +++ b/netbox_diode_plugin/tests/test_authentication.py @@ -0,0 +1,153 @@ +#!/usr/bin/env python +# Copyright 2024 NetBox Labs Inc +"""Diode NetBox Plugin - Authentication Tests.""" + +from unittest import mock + +from django.core.cache import cache +from django.test import TestCase +from rest_framework.exceptions import AuthenticationFailed +from rest_framework.request import Request +from rest_framework.test import APIRequestFactory + +from netbox_diode_plugin.api.authentication import DiodeOAuth2Authentication +from netbox_diode_plugin.plugin_config import get_diode_user, get_diode_auth_introspect_url + + +class DiodeOAuth2AuthenticationTestCase(TestCase): + """Test cases for DiodeOAuth2Authentication.""" + + def setUp(self): + """Set up test case.""" + self.auth = DiodeOAuth2Authentication() + self.factory = APIRequestFactory() + self.diode_user = get_diode_user() + self.valid_token = "valid_oauth_token" + self.invalid_token = "invalid_oauth_token" + self.token_without_scope = "token_without_scope" + self.token_with_scope = "token_with_scope" + + # Mock the cache + self.cache_patcher = mock.patch.object(cache, 'get') + self.cache_get_mock = self.cache_patcher.start() + self.cache_set_patcher = mock.patch.object(cache, 'set') + self.cache_set_mock = self.cache_set_patcher.start() + + # Mock requests.post for token introspection + self.requests_patcher = mock.patch('requests.post') + self.requests_mock = self.requests_patcher.start() + self.requests_mock.return_value.raise_for_status = mock.Mock() + + # Mock get_diode_auth_introspect_url + self.introspect_url_patcher = mock.patch( + 'netbox_diode_plugin.plugin_config.get_diode_auth_introspect_url', + return_value='http://test-introspect-url' + ) + self.introspect_url_patcher.start() + + def tearDown(self): + """Clean up after tests.""" + self.cache_patcher.stop() + self.cache_set_patcher.stop() + self.requests_patcher.stop() + self.introspect_url_patcher.stop() + + def test_authenticate_no_auth_header(self): + """Test authentication with no Authorization header.""" + request = self.factory.get('/') + result = self.auth.authenticate(request) + self.assertIsNone(result) + + def test_authenticate_invalid_auth_header_format(self): + """Test authentication with invalid Authorization header format.""" + request = self.factory.get('/', HTTP_AUTHORIZATION='InvalidFormat') + result = self.auth.authenticate(request) + self.assertIsNone(result) + + def test_authenticate_cached_token(self): + """Test authentication with cached token.""" + self.cache_get_mock.return_value = self.diode_user + request = self.factory.get('/', HTTP_AUTHORIZATION=f'Bearer {self.valid_token}') + + user, _ = self.auth.authenticate(request) + self.assertEqual(user, self.diode_user) + self.cache_get_mock.assert_called_once() + + def test_authenticate_invalid_token(self): + """Test authentication with invalid token.""" + self.cache_get_mock.return_value = None + self.requests_mock.return_value.json.return_value = {'active': False} + + request = self.factory.get('/', HTTP_AUTHORIZATION=f'Bearer {self.invalid_token}') + + with self.assertRaises(AuthenticationFailed): + self.auth.authenticate(request) + + def test_authenticate_token_without_required_scope(self): + """Test authentication with token missing required scope.""" + self.cache_get_mock.return_value = None + self.requests_mock.return_value.json.return_value = { + 'active': True, + 'scope': 'other:scope' + } + + request = self.factory.get('/', HTTP_AUTHORIZATION=f'Bearer {self.token_without_scope}') + + with self.assertRaises(AuthenticationFailed): + self.auth.authenticate(request) + + def test_authenticate_token_with_required_scope(self): + """Test authentication with token having required scope.""" + self.cache_get_mock.return_value = None + self.requests_mock.return_value.json.return_value = { + 'active': True, + 'scope': 'default:diode:netbox', + 'exp': 1000, + 'iat': 500 + } + + request = self.factory.get('/', HTTP_AUTHORIZATION=f'Bearer {self.token_with_scope}') + + user, _ = self.auth.authenticate(request) + self.assertEqual(user, self.diode_user) + self.cache_set_mock.assert_called_once() + + def test_authenticate_token_introspection_failure(self): + """Test authentication when token introspection fails.""" + self.cache_get_mock.return_value = None + self.requests_mock.side_effect = Exception("Introspection failed") + + request = self.factory.get('/', HTTP_AUTHORIZATION=f'Bearer {self.valid_token}') + + with self.assertRaises(AuthenticationFailed): + self.auth.authenticate(request) + + def test_authenticate_token_with_default_expiry(self): + """Test authentication with token having no expiry information.""" + self.cache_get_mock.return_value = None + self.requests_mock.return_value.json.return_value = { + 'active': True, + 'scope': 'default:diode:netbox' + } + + request = self.factory.get('/', HTTP_AUTHORIZATION=f'Bearer {self.token_with_scope}') + + user, _ = self.auth.authenticate(request) + self.assertEqual(user, self.diode_user) + + self.cache_set_mock.assert_called_once() + + # Get the actual call arguments + call_args = self.cache_set_mock.call_args + if not call_args: + self.fail("Cache set was not called with any arguments") + + # The cache key should start with 'diode:oauth2:introspect:' + cache_key = call_args.args[0] + self.assertTrue(cache_key.startswith('diode:oauth2:introspect:')) + + # The cached value should be the diode user + self.assertEqual(call_args.args[1], self.diode_user) + + # The timeout should be 300 (default) + self.assertEqual(call_args.kwargs['timeout'], 300) \ No newline at end of file diff --git a/netbox_diode_plugin/tests/test_forms.py b/netbox_diode_plugin/tests/test_forms.py index bc794f0..63d45de 100644 --- a/netbox_diode_plugin/tests/test_forms.py +++ b/netbox_diode_plugin/tests/test_forms.py @@ -3,14 +3,11 @@ """Diode NetBox Plugin - Tests.""" from unittest import mock -from django.contrib.auth import get_user_model from django.test import TestCase -from netbox_diode_plugin.forms import SettingsForm, SetupForm +from netbox_diode_plugin.forms import SettingsForm from netbox_diode_plugin.models import Setting -User = get_user_model() - class SettingsFormTestCase(TestCase): """Test case for the SettingsForm.""" @@ -37,7 +34,7 @@ def test_form_initialization_with_override_allowed(self): form.fields["diode_target"].help_text, ) - def test_form_initialization_with_diode_targer_override(self): + def test_form_initialization_with_diode_target_override(self): """Test form initialization when override is disallowed.""" with mock.patch( "netbox_diode_plugin.forms.get_plugin_config" @@ -52,38 +49,3 @@ def test_form_initialization_with_diode_targer_override(self): "This field is not allowed to be modified.", form.fields["diode_target"].help_text, ) - - -class SetupFormTestCase(TestCase): - """Test case for the SetupForm.""" - - def setUp(self): - """Set up the test case.""" - self.users = { - "diode_to_netbox": { - "username": "diode-to-netbox", - "api_key_env_var_name": "DIODE_TO_NETBOX_API_KEY", - "predefined_api_key": None, - "api_key": None, - "user": None, - }, - "diode": { - "username": "diode-ingestion", - "api_key_env_var_name": "DIODE_API_KEY", - "predefined_api_key": "5a52c45ee8231156cb620d193b0291912dd15433", - "api_key": None, - "user": User.objects.get(username="diode-ingestion"), - }, - } - - def test_form_initialization(self): - """Test form initialization with given users.""" - form = SetupForm(users=self.users) - self.assertIn("diode_to_netbox_api_key", form.fields) - self.assertFalse(form.fields["diode_to_netbox_api_key"].disabled) - self.assertIn("diode_api_key", form.fields) - self.assertTrue(form.fields["diode_api_key"].disabled) - self.assertEqual( - form.fields["diode_api_key"].initial, - self.users["diode"]["predefined_api_key"], - ) diff --git a/netbox_diode_plugin/tests/test_plugin_config.py b/netbox_diode_plugin/tests/test_plugin_config.py index 9cca798..662771c 100644 --- a/netbox_diode_plugin/tests/test_plugin_config.py +++ b/netbox_diode_plugin/tests/test_plugin_config.py @@ -3,62 +3,28 @@ """Diode NetBox Plugin - Tests.""" from unittest.mock import patch +from django.contrib.auth import get_user_model from django.test import TestCase from netbox_diode_plugin.plugin_config import ( - get_diode_user_types, - get_diode_user_types_with_labels, - get_diode_username_for_user_type, - get_diode_usernames, + get_diode_auth_introspect_url, + get_diode_user, ) +User = get_user_model() + class PluginConfigTestCase(TestCase): """Test case for plugin config helpers.""" - def test_get_diode_user_types(self): - """Test get_diode_user_types function.""" - expected = ("diode_to_netbox", "netbox_to_diode", "diode") - self.assertEqual(get_diode_user_types(), expected) - - def test_get_diode_user_types_with_labels(self): - """Test get_diode_user_types_with_labels function.""" - expected = ( - ("diode_to_netbox", "Diode to NetBox"), - ("netbox_to_diode", "NetBox to Diode"), - ("diode", "Diode"), - ) - self.assertEqual(get_diode_user_types_with_labels(), expected) + def test_get_diode_auth_introspect_url(self): + """Test get_diode_auth_introspect_url function.""" + expected = "http://localhost:8080/diode/auth/introspect" + self.assertEqual(get_diode_auth_introspect_url(), expected) - @patch("netbox_diode_plugin.plugin_config.get_plugin_config") - def test_get_diode_usernames(self, mock_get_plugin_config): - """Test get_diode_usernames function.""" - mock_usernames = { - "diode_to_netbox_username": "diode-to-netbox", - "netbox_to_diode_username": "netbox-to-diode", - "diode_username": "diode-ingestion", - } - mock_get_plugin_config.side_effect = lambda plugin, key: mock_usernames[key] - expected = { - "diode_to_netbox": "diode-to-netbox", - "netbox_to_diode": "netbox-to-diode", - "diode": "diode-ingestion", - } - self.assertEqual(get_diode_usernames(), expected) + def test_get_diode_user(self): + """Test get_diode_user function.""" + diode_user = get_diode_user() + expected_diode_user = User.objects.get(username="diode") + self.assertEqual(diode_user, expected_diode_user) - @patch("netbox_diode_plugin.plugin_config.get_plugin_config") - def test_get_diode_username_for_user_type(self, mock_get_plugin_config): - """Test get_diode_username_for_user_type function.""" - mock_usernames = { - "diode_to_netbox_username": "diode-to-netbox", - "netbox_to_diode_username": "netbox-to-diode", - "diode_username": "diode-ingestion", - } - mock_get_plugin_config.side_effect = lambda plugin, key: mock_usernames[key] - self.assertEqual( - get_diode_username_for_user_type("netbox_to_diode"), "netbox-to-diode" - ) - self.assertEqual( - get_diode_username_for_user_type("diode_to_netbox"), "diode-to-netbox" - ) - self.assertEqual(get_diode_username_for_user_type("diode"), "diode-ingestion") diff --git a/netbox_diode_plugin/tests/test_views.py b/netbox_diode_plugin/tests/test_views.py index aaf54ab..bfeb39b 100644 --- a/netbox_diode_plugin/tests/test_views.py +++ b/netbox_diode_plugin/tests/test_views.py @@ -11,10 +11,9 @@ from django.test import RequestFactory, TestCase from django.urls import reverse from rest_framework import status -from users.models import Token from netbox_diode_plugin.models import Setting -from netbox_diode_plugin.views import SettingsEditView, SettingsView, SetupView +from netbox_diode_plugin.views import SettingsEditView, SettingsView User = get_user_model() @@ -59,60 +58,6 @@ def test_settings_created_if_not_found(self): self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertIn("grpc://localhost:8080/diode", str(response.content)) - def test_redirects_to_setup_view_on_missing_diode_user(self): - """Test that we redirect to setup view when the Diode user is missing.""" - self.request.user = User.objects.create_user("foo", password="pass") - self.request.user.is_staff = True - - with ( - mock.patch( - "netbox_diode_plugin.views.get_diode_usernames" - ) as mock_get_diode_usernames, - mock.patch( - "netbox_diode_plugin.views.get_user_model" - ) as mock_get_user_model, - ): - mock_get_diode_usernames.return_value = { - "diode_to_netbox": "diode-to-netbox", - "netbox_to_diode": "fake-netbox-to-diode", - "diode": "diode-ingestion", - } - mock_get_user_model.return_value.objects.get.side_effect = [ - User.objects.get(username="diode-to-netbox"), - User.DoesNotExist, - User.objects.get(username="diode-ingestion"), - ] - - response = self.view.get(self.request) - - self.assertEqual(response.status_code, status.HTTP_302_FOUND) - self.assertEqual(response.url, reverse("plugins:netbox_diode_plugin:setup")) - - def test_redirects_to_setup_view_on_missing_diode_user_token(self): - """Test that we redirect to setup view when the Diode user token is missing.""" - self.request.user = User.objects.create_user("foo", password="pass") - self.request.user.is_staff = True - - with ( - mock.patch( - "netbox_diode_plugin.views.get_diode_usernames" - ) as mock_get_diode_usernames, - mock.patch( - "netbox_diode_plugin.views.Token.objects.filter" - ) as mock_token_objects_filter, - ): - mock_get_diode_usernames.return_value = { - "diode_to_netbox": "diode-to-netbox", - "netbox_to_diode": "fake-netbox-to-diode", - "diode": "diode-ingestion", - } - mock_token_objects_filter.return_value.exists.return_value = False - - response = self.view.get(self.request) - - self.assertEqual(response.status_code, status.HTTP_302_FOUND) - self.assertEqual(response.url, reverse("plugins:netbox_diode_plugin:setup")) - class SettingsEditViewTestCase(TestCase): """Test case for the SettingsEditView.""" @@ -275,164 +220,3 @@ def test_settings_update_disallowed_on_post_method(self): str(request._messages._queued_messages[0]), "The Diode target is not allowed to be modified.", ) - - -class SetupViewTestCase(TestCase): - """Test case for the SetupView.""" - - def setUp(self): - """Setup the test case.""" - self.path = reverse("plugins:netbox_diode_plugin:setup") - self.request_factory = RequestFactory() - self.view = SetupView() - - def test_get_method_redirects_to_login_page_for_unauthenticated_user(self): - """Test that the get method redirects an authenticated user to login page.""" - request = self.request_factory.get(self.path) - request.user = AnonymousUser() - self.view.setup(request) - - response = self.view.get(request) - self.assertEqual(response.status_code, status.HTTP_302_FOUND) - self.assertEqual(response.url, f"/netbox/login/?next={self.path}") - - def test_all_users_with_tokens_found(self): - """Test that the setup with all users and tokens displays correct data.""" - user = User.objects.create_user("foo", password="pass") - user.is_staff = True - - request = self.request_factory.get(self.path) - request.user = user - request.htmx = None - self.view.setup(request) - - users = { - "diode-to-netbox": User.objects.get(username="diode-to-netbox"), - "netbox-to-diode": User.objects.get(username="netbox-to-diode"), - "diode-ingestion": User.objects.get(username="diode-ingestion"), - } - - response = self.view.get(request) - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertIn("Diode users and API Keys", str(response.content)) - self.assertIn("diode-to-netbox", str(response.content)) - self.assertIn("netbox-to-diode", str(response.content)) - self.assertIn("diode-ingestion", str(response.content)) - self.assertIn(Token.objects.get(user=users.get("diode-to-netbox")).key, str(response.content)) - self.assertIn(Token.objects.get(user=users.get("netbox-to-diode")).key, str(response.content)) - self.assertIn(Token.objects.get(user=users.get("diode-ingestion")).key, str(response.content)) - - def test_not_all_users_with_tokens_found(self): - """Test that the setup with all users and tokens displays correct data.""" - user = User.objects.create_user("foo", password="pass") - user.is_staff = True - - request = self.request_factory.get(self.path) - request.user = user - request.htmx = None - self.view.setup(request) - - with mock.patch( - "netbox_diode_plugin.views.get_user_model" - ) as mock_get_user_model: - mock_get_user_model.return_value.objects.get.side_effect = [ - User.objects.get(username="diode-to-netbox"), - User.DoesNotExist, - User.objects.get(username="diode-ingestion"), - ] - - response = self.view.get(request) - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertIn("Diode users and API Keys", str(response.content)) - self.assertIn("diode-to-netbox", str(response.content)) - self.assertIn("netbox-to-diode", str(response.content)) - self.assertIn("diode-ingestion", str(response.content)) - - def test_post_method_redirects_to_login_page_for_unauthenticated_user(self): - """Test that the post method redirects an authenticated user to login page.""" - request = self.request_factory.get(self.path) - request.user = AnonymousUser() - self.view.setup(request) - - response = self.view.post(request) - self.assertEqual(response.status_code, status.HTTP_302_FOUND) - self.assertEqual(response.url, f"/netbox/login/?next={self.path}") - - def test_post_method_creates_users_and_tokens(self): - """Test that the post method creates users and tokens.""" - user = User.objects.create_user("foo", password="pass") - user.is_staff = True - - request = self.request_factory.post(self.path) - request.user = user - request.htmx = None - - with mock.patch( - "netbox_diode_plugin.views.SetupView._retrieve_users" - ) as mock_retrieve_users: - mock_retrieve_users.return_value = { - "diode_to_netbox": { - "username": "diode-to-netbox-1", - "user": None, - "api_key": None, - "api_key_env_var_name": "DIODE_TO_NETBOX_API_KEY", - "predefined_api_key": "be9b2530d690f07066fa8c37a4e054ff36cbb7d3", - }, - "netbox_to_diode": { - "username": "netbox-to-diode-1", - "user": None, - "api_key": None, - "api_key_env_var_name": "NETBOX_TO_DIODE_API_KEY", - "predefined_api_key": "61f693dc5ac62d150a13d462beb29f6d7e82b365", - }, - "diode": { - "username": "diode-ingestion-1", - "user": None, - "api_key": None, - "api_key_env_var_name": "DIODE_API_KEY", - "predefined_api_key": "20590746f3c5ab8ccccb6adcb1d5e101ebd254e8", - }, - } - self.view.setup(request) - - response = self.view.post(request) - self.assertEqual(response.status_code, status.HTTP_302_FOUND) - self.assertEqual( - response.url, reverse("plugins:netbox_diode_plugin:settings") - ) - - for user_type, user_info in mock_retrieve_users.return_value.items(): - user = User.objects.get(username=user_info.get("username")) - self.assertTrue(user) - self.assertEqual(Token.objects.get(user=user).key, user_info.get("predefined_api_key")) - - def test_post_method_displays_form_on_invalid_data(self): - """Test that the post method displays the form on invalid data.""" - user = User.objects.create_user("foo", password="pass") - user.is_staff = True - - request = self.request_factory.post(self.path) - request.user = user - request.htmx = None - - with mock.patch( - "netbox_diode_plugin.views.SetupView._retrieve_users" - ) as mock_retrieve_users: - mock_retrieve_users.return_value = { - "diode_to_netbox": { - "username": "diode-to-netbox-1", - "user": None, - "api_key": None, - "api_key_env_var_name": "DIODE_TO_NETBOX_API_KEY", - "predefined_api_key": None, - }, - } - request.POST = { - "diode_to_netbox_api_key": "foobar", - } - self.view.setup(request) - - response = self.view.post(request) - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertIn("Ensure this value has at least 40 characters (it has 6).", str(response.content)) - From 14149acbb708d1e1f2e06074d88597c63b23776c Mon Sep 17 00:00:00 2001 From: Michal Fiedorowicz Date: Tue, 22 Apr 2025 19:38:39 +0200 Subject: [PATCH 34/52] chore: update copyright year in all plugin files to 2025 - remove unused imports Signed-off-by: Michal Fiedorowicz --- netbox_diode_plugin/__init__.py | 4 +- netbox_diode_plugin/api/__init__.py | 2 +- netbox_diode_plugin/api/applier.py | 4 +- netbox_diode_plugin/api/authentication.py | 5 +-- netbox_diode_plugin/api/matcher.py | 7 +--- netbox_diode_plugin/api/permissions.py | 3 +- netbox_diode_plugin/api/plugin_utils.py | 6 +-- netbox_diode_plugin/api/serializers.py | 2 +- netbox_diode_plugin/api/transformer.py | 2 +- netbox_diode_plugin/api/urls.py | 2 +- netbox_diode_plugin/api/views.py | 4 +- netbox_diode_plugin/forms.py | 2 +- netbox_diode_plugin/migrations/__init__.py | 2 +- netbox_diode_plugin/models.py | 2 +- netbox_diode_plugin/navigation.py | 2 +- netbox_diode_plugin/tests/__init__.py | 2 +- .../tests/test_api_apply_change_set.py | 22 ++--------- .../tests/test_api_diff_and_apply.py | 9 +++-- .../tests/test_api_generate_diff.py | 6 +-- .../tests/test_authentication.py | 39 +++++++++---------- netbox_diode_plugin/tests/test_forms.py | 2 +- netbox_diode_plugin/tests/test_models.py | 2 +- .../tests/test_plugin_config.py | 8 +--- netbox_diode_plugin/tests/test_version.py | 2 +- netbox_diode_plugin/tests/test_views.py | 2 +- netbox_diode_plugin/urls.py | 2 +- netbox_diode_plugin/version.py | 2 +- netbox_diode_plugin/views.py | 2 +- 28 files changed, 61 insertions(+), 88 deletions(-) diff --git a/netbox_diode_plugin/__init__.py b/netbox_diode_plugin/__init__.py index 25aa18f..16f718d 100644 --- a/netbox_diode_plugin/__init__.py +++ b/netbox_diode_plugin/__init__.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc +# Copyright 2025 NetBox Labs, Inc. """Diode NetBox Plugin.""" from netbox.plugins import PluginConfig @@ -19,7 +19,7 @@ class NetBoxDiodePluginConfig(PluginConfig): default_settings = { # Default Diode gRPC target for communication with Diode server "diode_target": "grpc://localhost:8080/diode", - + # Default username associated with changes applied via plugin "diode_username": "diode", } diff --git a/netbox_diode_plugin/api/__init__.py b/netbox_diode_plugin/api/__init__.py index 97ca126..f6e265b 100644 --- a/netbox_diode_plugin/api/__init__.py +++ b/netbox_diode_plugin/api/__init__.py @@ -1,3 +1,3 @@ #!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc +# Copyright 2025 NetBox Labs, Inc. """Diode NetBox Plugin - API.""" diff --git a/netbox_diode_plugin/api/applier.py b/netbox_diode_plugin/api/applier.py index 0267302..17dde07 100644 --- a/netbox_diode_plugin/api/applier.py +++ b/netbox_diode_plugin/api/applier.py @@ -1,12 +1,10 @@ #!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc +# Copyright 2025 NetBox Labs, Inc. """Diode NetBox Plugin - API - Applier.""" import logging -from django.apps import apps -from django.contrib.contenttypes.models import ContentType from django.core.exceptions import ObjectDoesNotExist from django.db import models from rest_framework.exceptions import ValidationError as ValidationError diff --git a/netbox_diode_plugin/api/authentication.py b/netbox_diode_plugin/api/authentication.py index 52af41c..379c92f 100644 --- a/netbox_diode_plugin/api/authentication.py +++ b/netbox_diode_plugin/api/authentication.py @@ -10,10 +10,7 @@ from rest_framework.authentication import BaseAuthentication from rest_framework.exceptions import AuthenticationFailed -from netbox_diode_plugin.plugin_config import ( - get_diode_auth_introspect_url, - get_diode_user, -) +from netbox_diode_plugin.plugin_config import get_diode_auth_introspect_url, get_diode_user logger = logging.getLogger("netbox.diode_data") diff --git a/netbox_diode_plugin/api/matcher.py b/netbox_diode_plugin/api/matcher.py index c0ee706..9bfebcb 100644 --- a/netbox_diode_plugin/api/matcher.py +++ b/netbox_diode_plugin/api/matcher.py @@ -1,16 +1,13 @@ #!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc +# Copyright 2025 NetBox Labs, Inc. """Diode NetBox Plugin - API - Object matching utilities.""" -import copy import logging from dataclasses import dataclass from functools import cache, lru_cache from typing import Type import netaddr -from core.models import ObjectType as NetBoxType -from django.conf import settings from django.contrib.contenttypes.fields import ContentType from django.core.exceptions import FieldDoesNotExist from django.db import models @@ -20,7 +17,7 @@ from django.db.models.query_utils import Q from extras.models.customfields import CustomField -from .common import AutoSlug, UnresolvedReference +from .common import UnresolvedReference from .plugin_utils import content_type_id, get_object_type, get_object_type_model logger = logging.getLogger(__name__) diff --git a/netbox_diode_plugin/api/permissions.py b/netbox_diode_plugin/api/permissions.py index f0a9a89..89a4582 100644 --- a/netbox_diode_plugin/api/permissions.py +++ b/netbox_diode_plugin/api/permissions.py @@ -2,11 +2,12 @@ # Copyright 2025 NetBox Labs, Inc. """Diode NetBox Plugin - API Permissions.""" -from rest_framework.permissions import SAFE_METHODS, BasePermission +from rest_framework.permissions import BasePermission class IsDiodeOAuth2Authenticated(BasePermission): """Check if the request is authenticated via OAuth2.""" def has_permission(self, request, view): + """Check if the request is authenticated.""" return bool(getattr(request.user, "is_authenticated", False)) diff --git a/netbox_diode_plugin/api/plugin_utils.py b/netbox_diode_plugin/api/plugin_utils.py index a5d9cef..9a8a752 100644 --- a/netbox_diode_plugin/api/plugin_utils.py +++ b/netbox_diode_plugin/api/plugin_utils.py @@ -3,17 +3,17 @@ # Generated code. DO NOT EDIT. # Timestamp: 2025-04-13 16:50:25Z -from dataclasses import dataclass import datetime import decimal -from functools import lru_cache import logging +from dataclasses import dataclass +from functools import lru_cache from typing import Type +import netaddr from core.models import ObjectType as NetBoxType from django.contrib.contenttypes.models import ContentType from django.db import models -import netaddr from rest_framework.exceptions import ValidationError logger = logging.getLogger(__name__) diff --git a/netbox_diode_plugin/api/serializers.py b/netbox_diode_plugin/api/serializers.py index 60e2860..fae1130 100644 --- a/netbox_diode_plugin/api/serializers.py +++ b/netbox_diode_plugin/api/serializers.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc +# Copyright 2025 NetBox Labs, Inc. """Diode NetBox Plugin - Serializers.""" from netbox.api.serializers import NetBoxModelSerializer diff --git a/netbox_diode_plugin/api/transformer.py b/netbox_diode_plugin/api/transformer.py index f95ec52..1c3723d 100644 --- a/netbox_diode_plugin/api/transformer.py +++ b/netbox_diode_plugin/api/transformer.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc +# Copyright 2025 NetBox Labs, Inc. """Diode NetBox Plugin - API - Object resolution for diffing.""" import copy diff --git a/netbox_diode_plugin/api/urls.py b/netbox_diode_plugin/api/urls.py index cb6b3d4..c41963a 100644 --- a/netbox_diode_plugin/api/urls.py +++ b/netbox_diode_plugin/api/urls.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc +# Copyright 2025 NetBox Labs, Inc. """Diode NetBox Plugin - API URLs.""" from django.urls import include, path diff --git a/netbox_diode_plugin/api/views.py b/netbox_diode_plugin/api/views.py index 6049d3f..dc0f463 100644 --- a/netbox_diode_plugin/api/views.py +++ b/netbox_diode_plugin/api/views.py @@ -1,7 +1,6 @@ #!/usr/bin/env python # Copyright 2025 NetBox Labs, Inc. """Diode NetBox Plugin - API Views.""" -import json import logging import re @@ -9,14 +8,13 @@ from django.db import transaction from rest_framework import views from rest_framework.exceptions import ValidationError -from rest_framework.permissions import IsAuthenticated from rest_framework.response import Response from netbox_diode_plugin.api.applier import apply_changeset +from netbox_diode_plugin.api.authentication import DiodeOAuth2Authentication from netbox_diode_plugin.api.common import Change, ChangeSet, ChangeSetException, ChangeSetResult from netbox_diode_plugin.api.differ import generate_changeset from netbox_diode_plugin.api.permissions import IsDiodeOAuth2Authenticated -from netbox_diode_plugin.api.authentication import DiodeOAuth2Authentication logger = logging.getLogger("netbox.diode_data") diff --git a/netbox_diode_plugin/forms.py b/netbox_diode_plugin/forms.py index 512b6b2..5bec310 100644 --- a/netbox_diode_plugin/forms.py +++ b/netbox_diode_plugin/forms.py @@ -1,5 +1,5 @@ # !/usr/bin/env python -# Copyright 2024 NetBox Labs Inc +# Copyright 2025 NetBox Labs, Inc. """Diode NetBox Plugin - Forms.""" from netbox.forms import NetBoxModelForm from netbox.plugins import get_plugin_config diff --git a/netbox_diode_plugin/migrations/__init__.py b/netbox_diode_plugin/migrations/__init__.py index 7cd3268..86788e5 100644 --- a/netbox_diode_plugin/migrations/__init__.py +++ b/netbox_diode_plugin/migrations/__init__.py @@ -1,3 +1,3 @@ #!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc +# Copyright 2025 NetBox Labs, Inc. """Diode Netbox Plugin - Database migrations.""" diff --git a/netbox_diode_plugin/models.py b/netbox_diode_plugin/models.py index 86e0271..9079f9e 100644 --- a/netbox_diode_plugin/models.py +++ b/netbox_diode_plugin/models.py @@ -1,5 +1,5 @@ # !/usr/bin/env python -# Copyright 2024 NetBox Labs Inc +# Copyright 2025 NetBox Labs, Inc. """Diode NetBox Plugin - Models.""" from urllib.parse import urlparse diff --git a/netbox_diode_plugin/navigation.py b/netbox_diode_plugin/navigation.py index dc70888..4fb18ef 100644 --- a/netbox_diode_plugin/navigation.py +++ b/netbox_diode_plugin/navigation.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc +# Copyright 2025 NetBox Labs, Inc. """Diode NetBox Plugin - Navigation.""" from netbox.plugins import PluginMenu, PluginMenuItem diff --git a/netbox_diode_plugin/tests/__init__.py b/netbox_diode_plugin/tests/__init__.py index 51d6d6b..fa2c4b7 100644 --- a/netbox_diode_plugin/tests/__init__.py +++ b/netbox_diode_plugin/tests/__init__.py @@ -1,3 +1,3 @@ #!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc +# Copyright 2025 NetBox Labs, Inc. """Diode NetBox Plugin.""" diff --git a/netbox_diode_plugin/tests/test_api_apply_change_set.py b/netbox_diode_plugin/tests/test_api_apply_change_set.py index 4315def..8f968b0 100644 --- a/netbox_diode_plugin/tests/test_api_apply_change_set.py +++ b/netbox_diode_plugin/tests/test_api_apply_change_set.py @@ -1,35 +1,21 @@ #!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc +# Copyright 2025 NetBox Labs, Inc. """Diode NetBox Plugin - Tests.""" import uuid from unittest import mock -from dcim.models import ( - Device, - DeviceRole, - DeviceType, - Interface, - Manufacturer, - Rack, - Site, -) +from dcim.models import Device, DeviceRole, DeviceType, Interface, Manufacturer, Rack, Site from django.contrib.auth import get_user_model from django.contrib.contenttypes.models import ContentType from ipam.models import ASN, RIR, IPAddress, Prefix from netaddr import IPNetwork from rest_framework import status -from users.models import Token from utilities.testing import APITestCase -from virtualization.models import ( - Cluster, - ClusterType, - VirtualMachine, - VMInterface, -) +from virtualization.models import Cluster, ClusterType, VirtualMachine from netbox_diode_plugin.api.authentication import DiodeOAuth2Authentication -from netbox_diode_plugin.plugin_config import get_diode_user +from netbox_diode_plugin.plugin_config import get_diode_user User = get_user_model() diff --git a/netbox_diode_plugin/tests/test_api_diff_and_apply.py b/netbox_diode_plugin/tests/test_api_diff_and_apply.py index 12762be..d148c84 100644 --- a/netbox_diode_plugin/tests/test_api_diff_and_apply.py +++ b/netbox_diode_plugin/tests/test_api_diff_and_apply.py @@ -1,13 +1,14 @@ #!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc +# Copyright 2025 NetBox Labs, Inc. """Diode NetBox Plugin - Tests.""" import copy import datetime import decimal import logging -from uuid import uuid4 from unittest import mock +from uuid import uuid4 + import netaddr from circuits.models import Circuit from core.models import ObjectType @@ -32,7 +33,7 @@ def setUp(self): """Set up the test case.""" self.diff_url = "/netbox/api/plugins/diode/generate-diff/" self.apply_url = "/netbox/api/plugins/diode/apply-change-set/" - + self.authorization_header = {"Authorization": "Bearer mocked_oauth_token"} self.diode_user = get_diode_user() self.auth_patcher = mock.patch.object( @@ -93,7 +94,7 @@ def tearDown(self): """Clean up after tests.""" self.auth_patcher.stop() super().tearDown() - + def test_generate_diff_and_apply_create_interface_with_tags(self): """Test generate diff and apply create interface with tags.""" interface_uuid = str(uuid4()) diff --git a/netbox_diode_plugin/tests/test_api_generate_diff.py b/netbox_diode_plugin/tests/test_api_generate_diff.py index 19c4177..f9043ef 100644 --- a/netbox_diode_plugin/tests/test_api_generate_diff.py +++ b/netbox_diode_plugin/tests/test_api_generate_diff.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc +# Copyright 2025 NetBox Labs, Inc. """Diode NetBox Plugin - Tests.""" from unittest import mock @@ -7,7 +7,6 @@ from core.models import ObjectType from dcim.models import Manufacturer, RackType, Site -from django.contrib.auth import get_user_model from extras.models import CustomField from extras.models.customfields import CustomFieldTypeChoices from rest_framework import status @@ -16,6 +15,7 @@ from netbox_diode_plugin.api.authentication import DiodeOAuth2Authentication from netbox_diode_plugin.plugin_config import get_diode_user + class GenerateDiffTestCase(APITestCase): """GenerateDiff test cases.""" @@ -33,7 +33,7 @@ def setUp(self): self.auth_patcher.start() self.object_type = ObjectType.objects.get_for_model(Site) - + self.uuid_field = CustomField.objects.create( name='myuuid', type=CustomFieldTypeChoices.TYPE_TEXT, diff --git a/netbox_diode_plugin/tests/test_authentication.py b/netbox_diode_plugin/tests/test_authentication.py index 8042d37..b6c43e0 100644 --- a/netbox_diode_plugin/tests/test_authentication.py +++ b/netbox_diode_plugin/tests/test_authentication.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc +# Copyright 2025 NetBox Labs, Inc. """Diode NetBox Plugin - Authentication Tests.""" from unittest import mock @@ -7,11 +7,10 @@ from django.core.cache import cache from django.test import TestCase from rest_framework.exceptions import AuthenticationFailed -from rest_framework.request import Request from rest_framework.test import APIRequestFactory from netbox_diode_plugin.api.authentication import DiodeOAuth2Authentication -from netbox_diode_plugin.plugin_config import get_diode_user, get_diode_auth_introspect_url +from netbox_diode_plugin.plugin_config import get_diode_user class DiodeOAuth2AuthenticationTestCase(TestCase): @@ -68,7 +67,7 @@ def test_authenticate_cached_token(self): """Test authentication with cached token.""" self.cache_get_mock.return_value = self.diode_user request = self.factory.get('/', HTTP_AUTHORIZATION=f'Bearer {self.valid_token}') - + user, _ = self.auth.authenticate(request) self.assertEqual(user, self.diode_user) self.cache_get_mock.assert_called_once() @@ -77,9 +76,9 @@ def test_authenticate_invalid_token(self): """Test authentication with invalid token.""" self.cache_get_mock.return_value = None self.requests_mock.return_value.json.return_value = {'active': False} - + request = self.factory.get('/', HTTP_AUTHORIZATION=f'Bearer {self.invalid_token}') - + with self.assertRaises(AuthenticationFailed): self.auth.authenticate(request) @@ -90,9 +89,9 @@ def test_authenticate_token_without_required_scope(self): 'active': True, 'scope': 'other:scope' } - + request = self.factory.get('/', HTTP_AUTHORIZATION=f'Bearer {self.token_without_scope}') - + with self.assertRaises(AuthenticationFailed): self.auth.authenticate(request) @@ -105,9 +104,9 @@ def test_authenticate_token_with_required_scope(self): 'exp': 1000, 'iat': 500 } - + request = self.factory.get('/', HTTP_AUTHORIZATION=f'Bearer {self.token_with_scope}') - + user, _ = self.auth.authenticate(request) self.assertEqual(user, self.diode_user) self.cache_set_mock.assert_called_once() @@ -116,9 +115,9 @@ def test_authenticate_token_introspection_failure(self): """Test authentication when token introspection fails.""" self.cache_get_mock.return_value = None self.requests_mock.side_effect = Exception("Introspection failed") - + request = self.factory.get('/', HTTP_AUTHORIZATION=f'Bearer {self.valid_token}') - + with self.assertRaises(AuthenticationFailed): self.auth.authenticate(request) @@ -129,25 +128,25 @@ def test_authenticate_token_with_default_expiry(self): 'active': True, 'scope': 'default:diode:netbox' } - + request = self.factory.get('/', HTTP_AUTHORIZATION=f'Bearer {self.token_with_scope}') - + user, _ = self.auth.authenticate(request) self.assertEqual(user, self.diode_user) - + self.cache_set_mock.assert_called_once() - + # Get the actual call arguments call_args = self.cache_set_mock.call_args if not call_args: self.fail("Cache set was not called with any arguments") - + # The cache key should start with 'diode:oauth2:introspect:' cache_key = call_args.args[0] self.assertTrue(cache_key.startswith('diode:oauth2:introspect:')) - + # The cached value should be the diode user self.assertEqual(call_args.args[1], self.diode_user) - + # The timeout should be 300 (default) - self.assertEqual(call_args.kwargs['timeout'], 300) \ No newline at end of file + self.assertEqual(call_args.kwargs['timeout'], 300) diff --git a/netbox_diode_plugin/tests/test_forms.py b/netbox_diode_plugin/tests/test_forms.py index 63d45de..4afd4f1 100644 --- a/netbox_diode_plugin/tests/test_forms.py +++ b/netbox_diode_plugin/tests/test_forms.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc +# Copyright 2025 NetBox Labs, Inc. """Diode NetBox Plugin - Tests.""" from unittest import mock diff --git a/netbox_diode_plugin/tests/test_models.py b/netbox_diode_plugin/tests/test_models.py index 80ab215..fd9571d 100644 --- a/netbox_diode_plugin/tests/test_models.py +++ b/netbox_diode_plugin/tests/test_models.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc +# Copyright 2025 NetBox Labs, Inc. """Diode NetBox Plugin - Tests.""" from django.core.exceptions import ValidationError from django.test import TestCase diff --git a/netbox_diode_plugin/tests/test_plugin_config.py b/netbox_diode_plugin/tests/test_plugin_config.py index 662771c..ade00c7 100644 --- a/netbox_diode_plugin/tests/test_plugin_config.py +++ b/netbox_diode_plugin/tests/test_plugin_config.py @@ -1,15 +1,11 @@ #!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc +# Copyright 2025 NetBox Labs, Inc. """Diode NetBox Plugin - Tests.""" -from unittest.mock import patch from django.contrib.auth import get_user_model from django.test import TestCase -from netbox_diode_plugin.plugin_config import ( - get_diode_auth_introspect_url, - get_diode_user, -) +from netbox_diode_plugin.plugin_config import get_diode_auth_introspect_url, get_diode_user User = get_user_model() diff --git a/netbox_diode_plugin/tests/test_version.py b/netbox_diode_plugin/tests/test_version.py index ad87bce..4d65341 100644 --- a/netbox_diode_plugin/tests/test_version.py +++ b/netbox_diode_plugin/tests/test_version.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc +# Copyright 2025 NetBox Labs, Inc. """Diode NetBox Plugin - Tests.""" from django.test import TestCase diff --git a/netbox_diode_plugin/tests/test_views.py b/netbox_diode_plugin/tests/test_views.py index bfeb39b..cf5d93a 100644 --- a/netbox_diode_plugin/tests/test_views.py +++ b/netbox_diode_plugin/tests/test_views.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc +# Copyright 2025 NetBox Labs, Inc. """Diode NetBox Plugin - Tests.""" from unittest import mock diff --git a/netbox_diode_plugin/urls.py b/netbox_diode_plugin/urls.py index bd12a97..abe6a0b 100644 --- a/netbox_diode_plugin/urls.py +++ b/netbox_diode_plugin/urls.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc +# Copyright 2025 NetBox Labs, Inc. """Diode Netbox Plugin - URLs.""" from django.urls import path diff --git a/netbox_diode_plugin/version.py b/netbox_diode_plugin/version.py index 2de2087..e6a7e72 100644 --- a/netbox_diode_plugin/version.py +++ b/netbox_diode_plugin/version.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc +# Copyright 2025 NetBox Labs, Inc. """Version stamp.""" # These properties are injected at build time by the build process. diff --git a/netbox_diode_plugin/views.py b/netbox_diode_plugin/views.py index 8623f5a..b0e4a7a 100644 --- a/netbox_diode_plugin/views.py +++ b/netbox_diode_plugin/views.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc +# Copyright 2025 NetBox Labs, Inc. """Diode NetBox Plugin - Views.""" from django.conf import settings as netbox_settings from django.contrib import messages From 35286bc70b902eeedb042c45f01aa6e3983bfc4c Mon Sep 17 00:00:00 2001 From: Michal Fiedorowicz Date: Tue, 22 Apr 2025 20:40:16 +0200 Subject: [PATCH 35/52] fix tests Signed-off-by: Michal Fiedorowicz --- netbox_diode_plugin/api/differ.py | 4 -- netbox_diode_plugin/api/matcher.py | 2 +- .../tests/test_api_diff_and_apply.py | 4 +- .../tests/test_api_generate_diff.py | 1 - netbox_diode_plugin/tests/test_updates.py | 37 +++++++++---------- 5 files changed, 21 insertions(+), 27 deletions(-) diff --git a/netbox_diode_plugin/api/differ.py b/netbox_diode_plugin/api/differ.py index 07ee4d2..0873d54 100644 --- a/netbox_diode_plugin/api/differ.py +++ b/netbox_diode_plugin/api/differ.py @@ -4,13 +4,9 @@ import copy import datetime -import decimal import logging -import netaddr from django.contrib.contenttypes.models import ContentType -from django.db.backends.postgresql.psycopg_any import NumericRange -from netaddr.eui import EUI from rest_framework import serializers from utilities.data import shallow_compare_dict diff --git a/netbox_diode_plugin/api/matcher.py b/netbox_diode_plugin/api/matcher.py index 909f841..ef4c1c8 100644 --- a/netbox_diode_plugin/api/matcher.py +++ b/netbox_diode_plugin/api/matcher.py @@ -17,7 +17,7 @@ from django.db.models.query_utils import Q from extras.models.customfields import CustomField -from .common import _TRACE, AutoSlug, UnresolvedReference +from .common import _TRACE, UnresolvedReference from .plugin_utils import content_type_id, get_object_type, get_object_type_model logger = logging.getLogger(__name__) diff --git a/netbox_diode_plugin/tests/test_api_diff_and_apply.py b/netbox_diode_plugin/tests/test_api_diff_and_apply.py index eb4308b..fadb0b9 100644 --- a/netbox_diode_plugin/tests/test_api_diff_and_apply.py +++ b/netbox_diode_plugin/tests/test_api_diff_and_apply.py @@ -1230,13 +1230,13 @@ def test_generate_diff_and_apply_module_bay_circular_ref_fails(self): } } response1 = self.client.post( - self.diff_url, data=payload, format="json", **self.user_header + self.diff_url, data=payload, format="json", **self.authorization_header ) self.assertEqual(response1.status_code, status.HTTP_200_OK) diff = response1.json().get("change_set", {}) response2 = self.client.post( - self.apply_url, data=diff, format="json", **self.user_header + self.apply_url, data=diff, format="json", **self.authorization_header ) self.assertEqual(response2.status_code, status.HTTP_400_BAD_REQUEST) diff --git a/netbox_diode_plugin/tests/test_api_generate_diff.py b/netbox_diode_plugin/tests/test_api_generate_diff.py index c68b1a4..a05e5ef 100644 --- a/netbox_diode_plugin/tests/test_api_generate_diff.py +++ b/netbox_diode_plugin/tests/test_api_generate_diff.py @@ -16,7 +16,6 @@ from netbox_diode_plugin.api.authentication import DiodeOAuth2Authentication from netbox_diode_plugin.plugin_config import get_diode_user - logger = logging.getLogger(__name__) def _get_error(response, object_name, field): diff --git a/netbox_diode_plugin/tests/test_updates.py b/netbox_diode_plugin/tests/test_updates.py index d2c9fc5..9c2a081 100644 --- a/netbox_diode_plugin/tests/test_updates.py +++ b/netbox_diode_plugin/tests/test_updates.py @@ -2,31 +2,22 @@ # Copyright 2024 NetBox Labs Inc """Diode NetBox Plugin - Tests.""" -import copy -import datetime -import decimal import inspect import json import logging import os -from functools import wraps +from unittest import mock -from django.contrib.auth import get_user_model -from django.db import models -from django.db.models import QuerySet from rest_framework import status -from users.models import Token from utilities.testing import APITestCase +from netbox_diode_plugin.api.authentication import DiodeOAuth2Authentication from netbox_diode_plugin.api.common import harmonize_formats from netbox_diode_plugin.api.plugin_utils import get_object_type_model +from netbox_diode_plugin.plugin_config import get_diode_user logger = logging.getLogger(__name__) -User = get_user_model() - -def _get_error(response, object_name, field): - return response.json().get("errors", {}).get(object_name, {}).get(field, []) def load_test_cases(cls): """Class decorator to load test cases and create test methods.""" @@ -54,7 +45,7 @@ def test_func(self): self.assertEqual(res.status_code, status.HTTP_200_OK) diff = res.json().get("change_set", {}) res = self.client.post( - self.apply_url, data=diff, format="json", **self.user_header + self.apply_url, data=diff, format="json", **self.authorization_header ) self.assertEqual(res.status_code, status.HTTP_200_OK) # lookup the object and check fields @@ -87,7 +78,7 @@ def test_func(self): diff = res.json().get("change_set", {}) res = self.client.post( - self.apply_url, data=diff, format="json", **self.user_header + self.apply_url, data=diff, format="json", **self.authorization_header ) self.assertEqual(res.status_code, status.HTTP_200_OK) obj = model.objects.get(**case["lookup"]) @@ -118,11 +109,19 @@ def setUp(self): """Set up the test case.""" self.diff_url = "/netbox/api/plugins/diode/generate-diff/" self.apply_url = "/netbox/api/plugins/diode/apply-change-set/" - self.user = User.objects.create_user(username="testcommonuser") - self.user_token = Token.objects.create(user=self.user) - self.user_header = {"HTTP_AUTHORIZATION": f"Token {self.user_token.key}"} + self.authorization_header = {"Authorization": "Bearer mocked_oauth_token"} + self.diode_user = get_diode_user() + self.auth_patcher = mock.patch.object( + DiodeOAuth2Authentication, + 'authenticate', + return_value=(self.diode_user, None) + ) + self.auth_patcher.start() - self.add_permissions("netbox_diode_plugin.add_diode") + def tearDown(self): + """Clean up after tests.""" + self.auth_patcher.stop() + super().tearDown() def _follow_path(self, obj, path): cur = obj @@ -171,7 +170,7 @@ def _check_expect(self, obj, expect): def send_request(self, url, payload, status_code=status.HTTP_200_OK): """Post the payload to the url and return the response.""" response = self.client.post( - url, data=payload, format="json", **self.user_header + url, data=payload, format="json", **self.authorization_header ) self.assertEqual(response.status_code, status_code) return response From b66cbefd7983e9e1c1143680bab43feda45b361b Mon Sep 17 00:00:00 2001 From: Michal Fiedorowicz Date: Wed, 23 Apr 2025 11:39:07 +0200 Subject: [PATCH 36/52] feat: enhance OAuth2 authentication and permissions in Diode NetBox Plugin - Updated DiodeOAuth2Authentication to set user, token scopes, and token data on the request. - Introduced HasScope permission class to enforce required OAuth2 token scopes. - Refactored views to utilize IsAuthenticated and require_scopes for permission checks. - Adjusted unit tests to mock the updated authentication behavior. Signed-off-by: Michal Fiedorowicz --- netbox_diode_plugin/api/authentication.py | 28 +++++----- netbox_diode_plugin/api/permissions.py | 43 ++++++++++++++- netbox_diode_plugin/api/views.py | 52 ++++++++++++------- .../tests/test_api_apply_change_set.py | 25 ++++++--- .../tests/test_api_diff_and_apply.py | 20 ++++--- .../tests/test_api_generate_diff.py | 20 ++++--- .../tests/test_authentication.py | 33 +++++------- netbox_diode_plugin/tests/test_updates.py | 20 ++++--- 8 files changed, 157 insertions(+), 84 deletions(-) diff --git a/netbox_diode_plugin/api/authentication.py b/netbox_diode_plugin/api/authentication.py index 379c92f..a3dc325 100644 --- a/netbox_diode_plugin/api/authentication.py +++ b/netbox_diode_plugin/api/authentication.py @@ -4,13 +4,17 @@ import hashlib import logging +from types import SimpleNamespace import requests from django.core.cache import cache from rest_framework.authentication import BaseAuthentication from rest_framework.exceptions import AuthenticationFailed -from netbox_diode_plugin.plugin_config import get_diode_auth_introspect_url, get_diode_user +from netbox_diode_plugin.plugin_config import ( + get_diode_auth_introspect_url, + get_diode_user, +) logger = logging.getLogger("netbox.diode_data") @@ -30,7 +34,11 @@ def authenticate(self, request): if not diode_user: raise AuthenticationFailed("Invalid token") - return (diode_user, None) + request.user = diode_user.user + request.token_scopes = diode_user.token_scopes + request.token_data = diode_user.token_data + + return (diode_user.user, None) def _introspect_token(self, token: str): """Introspect the token and return the client info.""" @@ -57,20 +65,12 @@ def _introspect_token(self, token: str): return None if data.get("active"): - # Check if token has the required scope - scopes = data.get("scope", "").split() - has_diode_to_netbox_scope = any( - scope.endswith(":diode:netbox") for scope in scopes + diode_user = SimpleNamespace( + user=get_diode_user(), + token_scopes=data.get("scope", "").split(), + token_data=data, ) - if not has_diode_to_netbox_scope: - logger.warning( - f"Diode Auth token with insufficient scopes: {scopes}" - ) - return None - - diode_user = get_diode_user() - expires_in = ( data.get("exp") - data.get("iat") if "exp" in data and "iat" in data diff --git a/netbox_diode_plugin/api/permissions.py b/netbox_diode_plugin/api/permissions.py index 89a4582..bc4ebd7 100644 --- a/netbox_diode_plugin/api/permissions.py +++ b/netbox_diode_plugin/api/permissions.py @@ -4,10 +4,49 @@ from rest_framework.permissions import BasePermission +NETBOX_READ_SCOPE = "netbox:read" +NETBOX_WRITE_SCOPE = "netbox:write" -class IsDiodeOAuth2Authenticated(BasePermission): - """Check if the request is authenticated via OAuth2.""" + +class IsAuthenticated(BasePermission): + """Check if the request is authenticated.""" def has_permission(self, request, view): """Check if the request is authenticated.""" return bool(getattr(request.user, "is_authenticated", False)) + + +class HasScope(BasePermission): + """ + Require one or more OAuth2 token scopes to access a view. + + Example usage: + permission_classes = [IsAuthenticated, HasScope("netbox:write")] + """ + + def __init__(self, *required_scopes): + """Initialize the permission.""" + self.required_scopes = required_scopes + + def has_permission(self, request, view): + """Check if the request has the required scopes.""" + token_scopes = getattr(request, "token_scopes", []) + if not token_scopes: + return False + + return all(scope in token_scopes for scope in self.required_scopes) + + +def require_scopes(*required_scopes): + """Require one or more OAuth2 token scopes to access a view.""" + + class ScopedPermission(BasePermission): + """Check if the request has the required scopes.""" + + def has_permission(self, request, view): + """Check if the request has the required scopes.""" + scopes = getattr(request, "token_scopes", []) + return all(scope in scopes for scope in required_scopes) + + ScopedPermission.__name__ = f"RequireScopes_{'_'.join(required_scopes)}" + return ScopedPermission diff --git a/netbox_diode_plugin/api/views.py b/netbox_diode_plugin/api/views.py index dc0f463..e704993 100644 --- a/netbox_diode_plugin/api/views.py +++ b/netbox_diode_plugin/api/views.py @@ -12,9 +12,19 @@ from netbox_diode_plugin.api.applier import apply_changeset from netbox_diode_plugin.api.authentication import DiodeOAuth2Authentication -from netbox_diode_plugin.api.common import Change, ChangeSet, ChangeSetException, ChangeSetResult +from netbox_diode_plugin.api.common import ( + Change, + ChangeSet, + ChangeSetException, + ChangeSetResult, +) from netbox_diode_plugin.api.differ import generate_changeset -from netbox_diode_plugin.api.permissions import IsDiodeOAuth2Authenticated +from netbox_diode_plugin.api.permissions import ( + NETBOX_READ_SCOPE, + NETBOX_WRITE_SCOPE, + IsAuthenticated, + require_scopes, +) logger = logging.getLogger("netbox.diode_data") @@ -35,11 +45,13 @@ def get_valid_entity_keys(model_name): This can be snake or lowerCamel case (both are valid for protoJSON) """ - s = re.sub(r'([A-Z0-9]{2,})([A-Z])([a-z])', r'\1_\2\3', model_name) - s = re.sub(r'([a-z])([A-Z])', r'\1_\2', s) - snake = re.sub(r'_+', '_', s.lower()) # snake - upperCamel = ''.join([word.capitalize() for word in snake.split("_")]) # upperCamelCase - lowerCamel = upperCamel[0].lower() + upperCamel[1:] # lowerCamelCase + s = re.sub(r"([A-Z0-9]{2,})([A-Z])([a-z])", r"\1_\2\3", model_name) + s = re.sub(r"([a-z])([A-Z])", r"\1_\2", s) + snake = re.sub(r"_+", "_", s.lower()) # snake + upperCamel = "".join( + [word.capitalize() for word in snake.split("_")] + ) # upperCamelCase + lowerCamel = upperCamel[0].lower() + upperCamel[1:] # lowerCamelCase return (snake, lowerCamel) @@ -48,7 +60,7 @@ class GenerateDiffView(views.APIView): """GenerateDiff view.""" authentication_classes = [DiodeOAuth2Authentication] - permission_classes = [IsDiodeOAuth2Authenticated] + permission_classes = [IsAuthenticated, require_scopes(NETBOX_READ_SCOPE)] def post(self, request, *args, **kwargs): """Generate diff for entity.""" @@ -56,6 +68,7 @@ def post(self, request, *args, **kwargs): return self._post(request, *args, **kwargs) except Exception: import traceback + traceback.print_exc() raise @@ -107,7 +120,7 @@ class ApplyChangeSetView(views.APIView): """ApplyChangeSet view.""" authentication_classes = [DiodeOAuth2Authentication] - permission_classes = [IsDiodeOAuth2Authenticated] + permission_classes = [IsAuthenticated, require_scopes(NETBOX_WRITE_SCOPE)] def post(self, request, *args, **kwargs): """Apply change set for entity.""" @@ -123,20 +136,21 @@ def _post(self, request, *args, **kwargs): data = request.data.copy() changes = [] - if 'changes' in data: + if "changes" in data: changes = [ Change( - change_type=change.get('change_type'), - object_type=change.get('object_type'), - object_id=change.get('object_id'), - ref_id=change.get('ref_id'), - data=change.get('data'), - before=change.get('before'), - new_refs=change.get('new_refs', []), - ) for change in data['changes'] + change_type=change.get("change_type"), + object_type=change.get("object_type"), + object_id=change.get("object_id"), + ref_id=change.get("ref_id"), + data=change.get("data"), + before=change.get("before"), + new_refs=change.get("new_refs", []), + ) + for change in data["changes"] ] change_set = ChangeSet( - id=data.get('id'), + id=data.get("id"), changes=changes, ) try: diff --git a/netbox_diode_plugin/tests/test_api_apply_change_set.py b/netbox_diode_plugin/tests/test_api_apply_change_set.py index 8f968b0..1a87984 100644 --- a/netbox_diode_plugin/tests/test_api_apply_change_set.py +++ b/netbox_diode_plugin/tests/test_api_apply_change_set.py @@ -3,6 +3,7 @@ """Diode NetBox Plugin - Tests.""" import uuid +from types import SimpleNamespace from unittest import mock from dcim.models import Device, DeviceRole, DeviceType, Interface, Manufacturer, Rack, Site @@ -27,14 +28,19 @@ class BaseApplyChangeSet(APITestCase): def setUp(self): """Set up test.""" - self.authorization_header = {"Authorization": "Bearer mocked_oauth_token"} - self.diode_user = get_diode_user() - self.auth_patcher = mock.patch.object( + self.authorization_header = {"HTTP_AUTHORIZATION": "Bearer mocked_oauth_token"} + self.diode_user = SimpleNamespace( + user = get_diode_user(), + token_scopes=["netbox:read", "netbox:write"], + token_data={"scope": "netbox:read netbox:write"} + ) + + self.introspect_patcher = mock.patch.object( DiodeOAuth2Authentication, - 'authenticate', - return_value=(self.diode_user, None) + '_introspect_token', + return_value=self.diode_user ) - self.auth_patcher.start() + self.introspect_patcher.start() rir = RIR.objects.create(name="RFC 6996", is_private=True) self.asns = [ASN(asn=65000 + i, rir=rir) for i in range(8)] @@ -160,13 +166,16 @@ def setUp(self): def tearDown(self): """Clean up after tests.""" - self.auth_patcher.stop() + self.introspect_patcher.stop() super().tearDown() def send_request(self, payload, status_code=status.HTTP_200_OK): """Post the payload to the url and return the response.""" response = self.client.post( - self.url, data=payload, format="json", **self.authorization_header + self.url, + data=payload, + format="json", + **self.authorization_header ) self.assertEqual(response.status_code, status_code) return response diff --git a/netbox_diode_plugin/tests/test_api_diff_and_apply.py b/netbox_diode_plugin/tests/test_api_diff_and_apply.py index fadb0b9..3847429 100644 --- a/netbox_diode_plugin/tests/test_api_diff_and_apply.py +++ b/netbox_diode_plugin/tests/test_api_diff_and_apply.py @@ -6,6 +6,7 @@ import datetime import decimal import logging +from types import SimpleNamespace from unittest import mock from uuid import uuid4 @@ -36,14 +37,19 @@ def setUp(self): self.diff_url = "/netbox/api/plugins/diode/generate-diff/" self.apply_url = "/netbox/api/plugins/diode/apply-change-set/" - self.authorization_header = {"Authorization": "Bearer mocked_oauth_token"} - self.diode_user = get_diode_user() - self.auth_patcher = mock.patch.object( + self.authorization_header = {"HTTP_AUTHORIZATION": "Bearer mocked_oauth_token"} + self.diode_user = SimpleNamespace( + user = get_diode_user(), + token_scopes=["netbox:read", "netbox:write"], + token_data={"scope": "netbox:read netbox:write"} + ) + + self.introspect_patcher = mock.patch.object( DiodeOAuth2Authentication, - 'authenticate', - return_value=(self.diode_user, None) + '_introspect_token', + return_value=self.diode_user ) - self.auth_patcher.start() + self.introspect_patcher.start() self.object_type = ObjectType.objects.get_for_model(Site) @@ -94,7 +100,7 @@ def setUp(self): def tearDown(self): """Clean up after tests.""" - self.auth_patcher.stop() + self.introspect_patcher.stop() super().tearDown() def test_generate_diff_and_apply_create_interface_with_tags(self): diff --git a/netbox_diode_plugin/tests/test_api_generate_diff.py b/netbox_diode_plugin/tests/test_api_generate_diff.py index a05e5ef..a1d1220 100644 --- a/netbox_diode_plugin/tests/test_api_generate_diff.py +++ b/netbox_diode_plugin/tests/test_api_generate_diff.py @@ -3,6 +3,7 @@ """Diode NetBox Plugin - Tests.""" import logging +from types import SimpleNamespace from unittest import mock from uuid import uuid4 @@ -29,14 +30,19 @@ def setUp(self): """Set up the test case.""" self.url = "/netbox/api/plugins/diode/generate-diff/" - self.authorization_header = {"Authorization": "Bearer mocked_oauth_token"} - self.diode_user = get_diode_user() - self.auth_patcher = mock.patch.object( + self.authorization_header = {"HTTP_AUTHORIZATION": "Bearer mocked_oauth_token"} + self.diode_user = SimpleNamespace( + user = get_diode_user(), + token_scopes=["netbox:read", "netbox:write"], + token_data={"scope": "netbox:read netbox:write"} + ) + + self.introspect_patcher = mock.patch.object( DiodeOAuth2Authentication, - 'authenticate', - return_value=(self.diode_user, None) + '_introspect_token', + return_value=self.diode_user ) - self.auth_patcher.start() + self.introspect_patcher.start() self.object_type = ObjectType.objects.get_for_model(Site) @@ -87,7 +93,7 @@ def setUp(self): def tearDown(self): """Clean up after tests.""" - self.auth_patcher.stop() + self.introspect_patcher.stop() super().tearDown() def test_generate_diff_create_site(self): diff --git a/netbox_diode_plugin/tests/test_authentication.py b/netbox_diode_plugin/tests/test_authentication.py index b6c43e0..528d107 100644 --- a/netbox_diode_plugin/tests/test_authentication.py +++ b/netbox_diode_plugin/tests/test_authentication.py @@ -2,6 +2,7 @@ # Copyright 2025 NetBox Labs, Inc. """Diode NetBox Plugin - Authentication Tests.""" +from types import SimpleNamespace from unittest import mock from django.core.cache import cache @@ -20,7 +21,11 @@ def setUp(self): """Set up test case.""" self.auth = DiodeOAuth2Authentication() self.factory = APIRequestFactory() - self.diode_user = get_diode_user() + self.diode_user = SimpleNamespace( + user = get_diode_user(), + token_scopes=["netbox:read", "netbox:write"], + token_data={"scope": "netbox:read netbox:write"} + ) self.valid_token = "valid_oauth_token" self.invalid_token = "invalid_oauth_token" self.token_without_scope = "token_without_scope" @@ -69,7 +74,7 @@ def test_authenticate_cached_token(self): request = self.factory.get('/', HTTP_AUTHORIZATION=f'Bearer {self.valid_token}') user, _ = self.auth.authenticate(request) - self.assertEqual(user, self.diode_user) + self.assertEqual(user, self.diode_user.user) self.cache_get_mock.assert_called_once() def test_authenticate_invalid_token(self): @@ -82,25 +87,12 @@ def test_authenticate_invalid_token(self): with self.assertRaises(AuthenticationFailed): self.auth.authenticate(request) - def test_authenticate_token_without_required_scope(self): - """Test authentication with token missing required scope.""" - self.cache_get_mock.return_value = None - self.requests_mock.return_value.json.return_value = { - 'active': True, - 'scope': 'other:scope' - } - - request = self.factory.get('/', HTTP_AUTHORIZATION=f'Bearer {self.token_without_scope}') - - with self.assertRaises(AuthenticationFailed): - self.auth.authenticate(request) - def test_authenticate_token_with_required_scope(self): """Test authentication with token having required scope.""" self.cache_get_mock.return_value = None self.requests_mock.return_value.json.return_value = { 'active': True, - 'scope': 'default:diode:netbox', + 'scope': 'netbox:read netbox:write', 'exp': 1000, 'iat': 500 } @@ -108,7 +100,7 @@ def test_authenticate_token_with_required_scope(self): request = self.factory.get('/', HTTP_AUTHORIZATION=f'Bearer {self.token_with_scope}') user, _ = self.auth.authenticate(request) - self.assertEqual(user, self.diode_user) + self.assertEqual(user, self.diode_user.user) self.cache_set_mock.assert_called_once() def test_authenticate_token_introspection_failure(self): @@ -126,13 +118,13 @@ def test_authenticate_token_with_default_expiry(self): self.cache_get_mock.return_value = None self.requests_mock.return_value.json.return_value = { 'active': True, - 'scope': 'default:diode:netbox' + 'scope': 'netbox:read netbox:write' } request = self.factory.get('/', HTTP_AUTHORIZATION=f'Bearer {self.token_with_scope}') user, _ = self.auth.authenticate(request) - self.assertEqual(user, self.diode_user) + self.assertEqual(user, self.diode_user.user) self.cache_set_mock.assert_called_once() @@ -146,7 +138,8 @@ def test_authenticate_token_with_default_expiry(self): self.assertTrue(cache_key.startswith('diode:oauth2:introspect:')) # The cached value should be the diode user - self.assertEqual(call_args.args[1], self.diode_user) + self.assertEqual(call_args.args[1].user, self.diode_user.user) + self.assertEqual(call_args.args[1].token_scopes, self.diode_user.token_scopes) # The timeout should be 300 (default) self.assertEqual(call_args.kwargs['timeout'], 300) diff --git a/netbox_diode_plugin/tests/test_updates.py b/netbox_diode_plugin/tests/test_updates.py index 9c2a081..5481da9 100644 --- a/netbox_diode_plugin/tests/test_updates.py +++ b/netbox_diode_plugin/tests/test_updates.py @@ -6,6 +6,7 @@ import json import logging import os +from types import SimpleNamespace from unittest import mock from rest_framework import status @@ -109,18 +110,23 @@ def setUp(self): """Set up the test case.""" self.diff_url = "/netbox/api/plugins/diode/generate-diff/" self.apply_url = "/netbox/api/plugins/diode/apply-change-set/" - self.authorization_header = {"Authorization": "Bearer mocked_oauth_token"} - self.diode_user = get_diode_user() - self.auth_patcher = mock.patch.object( + self.authorization_header = {"HTTP_AUTHORIZATION": "Bearer mocked_oauth_token"} + self.diode_user = SimpleNamespace( + user = get_diode_user(), + token_scopes=["netbox:read", "netbox:write"], + token_data={"scope": "netbox:read netbox:write"} + ) + + self.introspect_patcher = mock.patch.object( DiodeOAuth2Authentication, - 'authenticate', - return_value=(self.diode_user, None) + '_introspect_token', + return_value=self.diode_user ) - self.auth_patcher.start() + self.introspect_patcher.start() def tearDown(self): """Clean up after tests.""" - self.auth_patcher.stop() + self.introspect_patcher.stop() super().tearDown() def _follow_path(self, obj, path): From c2f3a85f29e1b2ed1146f7fdb631ed9246147d22 Mon Sep 17 00:00:00 2001 From: Michal Fiedorowicz Date: Wed, 23 Apr 2025 15:02:12 +0200 Subject: [PATCH 37/52] chore: remove unspecified diode_username_override Signed-off-by: Michal Fiedorowicz --- netbox_diode_plugin/plugin_config.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/netbox_diode_plugin/plugin_config.py b/netbox_diode_plugin/plugin_config.py index 3005dc4..60dbcad 100644 --- a/netbox_diode_plugin/plugin_config.py +++ b/netbox_diode_plugin/plugin_config.py @@ -48,11 +48,6 @@ def get_diode_auth_introspect_url(): def get_diode_user(): """Returns the Diode user.""" diode_username = get_plugin_config("netbox_diode_plugin", "diode_username") - diode_username_override = get_plugin_config( - "netbox_diode_plugin", "diode_username_override" - ) - - diode_username = diode_username_override or diode_username try: diode_user = User.objects.get(username=diode_username) From 612c991a7915c1e4337ab306ff3f25d6ea263177 Mon Sep 17 00:00:00 2001 From: Michal Fiedorowicz Date: Wed, 23 Apr 2025 15:45:50 +0200 Subject: [PATCH 38/52] refactor: rename permission scope constants for consistency Signed-off-by: Michal Fiedorowicz --- netbox_diode_plugin/api/permissions.py | 6 +++--- netbox_diode_plugin/api/views.py | 8 ++++---- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/netbox_diode_plugin/api/permissions.py b/netbox_diode_plugin/api/permissions.py index bc4ebd7..344793b 100644 --- a/netbox_diode_plugin/api/permissions.py +++ b/netbox_diode_plugin/api/permissions.py @@ -4,8 +4,8 @@ from rest_framework.permissions import BasePermission -NETBOX_READ_SCOPE = "netbox:read" -NETBOX_WRITE_SCOPE = "netbox:write" +SCOPE_NETBOX_READ = "netbox:read" +SCOPE_NETBOX_WRITE = "netbox:write" class IsAuthenticated(BasePermission): @@ -21,7 +21,7 @@ class HasScope(BasePermission): Require one or more OAuth2 token scopes to access a view. Example usage: - permission_classes = [IsAuthenticated, HasScope("netbox:write")] + permission_classes = [IsAuthenticated, require_scopes(SCOPE_NETBOX_WRITE)] """ def __init__(self, *required_scopes): diff --git a/netbox_diode_plugin/api/views.py b/netbox_diode_plugin/api/views.py index e704993..99655e1 100644 --- a/netbox_diode_plugin/api/views.py +++ b/netbox_diode_plugin/api/views.py @@ -20,8 +20,8 @@ ) from netbox_diode_plugin.api.differ import generate_changeset from netbox_diode_plugin.api.permissions import ( - NETBOX_READ_SCOPE, - NETBOX_WRITE_SCOPE, + SCOPE_NETBOX_READ, + SCOPE_NETBOX_WRITE, IsAuthenticated, require_scopes, ) @@ -60,7 +60,7 @@ class GenerateDiffView(views.APIView): """GenerateDiff view.""" authentication_classes = [DiodeOAuth2Authentication] - permission_classes = [IsAuthenticated, require_scopes(NETBOX_READ_SCOPE)] + permission_classes = [IsAuthenticated, require_scopes(SCOPE_NETBOX_READ)] def post(self, request, *args, **kwargs): """Generate diff for entity.""" @@ -120,7 +120,7 @@ class ApplyChangeSetView(views.APIView): """ApplyChangeSet view.""" authentication_classes = [DiodeOAuth2Authentication] - permission_classes = [IsAuthenticated, require_scopes(NETBOX_WRITE_SCOPE)] + permission_classes = [IsAuthenticated, require_scopes(SCOPE_NETBOX_WRITE)] def post(self, request, *args, **kwargs): """Apply change set for entity.""" From 6bd12155da0cac2adbe69dc8426f4e3952437ba6 Mon Sep 17 00:00:00 2001 From: Michal Fiedorowicz Date: Wed, 23 Apr 2025 17:55:42 +0200 Subject: [PATCH 39/52] refactor: remove redundant HasScope permission class Signed-off-by: Michal Fiedorowicz --- netbox_diode_plugin/api/permissions.py | 21 --------------------- 1 file changed, 21 deletions(-) diff --git a/netbox_diode_plugin/api/permissions.py b/netbox_diode_plugin/api/permissions.py index 344793b..dfb2634 100644 --- a/netbox_diode_plugin/api/permissions.py +++ b/netbox_diode_plugin/api/permissions.py @@ -16,27 +16,6 @@ def has_permission(self, request, view): return bool(getattr(request.user, "is_authenticated", False)) -class HasScope(BasePermission): - """ - Require one or more OAuth2 token scopes to access a view. - - Example usage: - permission_classes = [IsAuthenticated, require_scopes(SCOPE_NETBOX_WRITE)] - """ - - def __init__(self, *required_scopes): - """Initialize the permission.""" - self.required_scopes = required_scopes - - def has_permission(self, request, view): - """Check if the request has the required scopes.""" - token_scopes = getattr(request, "token_scopes", []) - if not token_scopes: - return False - - return all(scope in token_scopes for scope in self.required_scopes) - - def require_scopes(*required_scopes): """Require one or more OAuth2 token scopes to access a view.""" From 0bfd34924d64fb1287ff88a2f1b981e4b8a095c9 Mon Sep 17 00:00:00 2001 From: Luke Tucker Date: Wed, 23 Apr 2025 16:48:43 -0400 Subject: [PATCH 40/52] fix: add rack role test --- .../tests/test_updates_cases.json | 30 +++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/netbox_diode_plugin/tests/test_updates_cases.json b/netbox_diode_plugin/tests/test_updates_cases.json index 8d94c65..a9e6f21 100644 --- a/netbox_diode_plugin/tests/test_updates_cases.json +++ b/netbox_diode_plugin/tests/test_updates_cases.json @@ -4111,6 +4111,36 @@ "description": "Standard 42U server rack Updated" } }, + { + "name": "dcim_rackrole_1", + "object_type": "dcim.rackrole", + "lookup": {"name": "Network Equipment"}, + "create_expect": { + "name": "Network Equipment", + "description": "Dedicated racks for network infrastructure" + }, + "create": { + "rack_role": { + "name": "Network Equipment", + "slug": "network-equipment", + "color": "0000ff", + "description": "Dedicated racks for network infrastructure", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update": { + "rack_role": { + "name": "Network Equipment", + "slug": "network-equipment", + "color": "0000ff", + "description": "Dedicated racks for network infrastructure Updated", + "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] + } + }, + "update_expect": { + "description": "Dedicated racks for network infrastructure Updated" + } + }, { "name": "dcim_racktype_1", "object_type": "dcim.racktype", From 39a80a398c6c38fd08f8cf9ecb94496bdfd16a3c Mon Sep 17 00:00:00 2001 From: Luke Tucker Date: Wed, 23 Apr 2025 16:57:27 -0400 Subject: [PATCH 41/52] chore: remove trace logging --- netbox_diode_plugin/api/common.py | 1 - netbox_diode_plugin/api/matcher.py | 31 +------------------------ netbox_diode_plugin/api/plugin_utils.py | 5 ++-- netbox_diode_plugin/api/transformer.py | 17 +------------- 4 files changed, 5 insertions(+), 49 deletions(-) diff --git a/netbox_diode_plugin/api/common.py b/netbox_diode_plugin/api/common.py index 8a7d754..9872968 100644 --- a/netbox_diode_plugin/api/common.py +++ b/netbox_diode_plugin/api/common.py @@ -25,7 +25,6 @@ logger = logging.getLogger("netbox.diode_data") NON_FIELD_ERRORS = "__all__" -_TRACE = False @dataclass class UnresolvedReference: diff --git a/netbox_diode_plugin/api/matcher.py b/netbox_diode_plugin/api/matcher.py index ef4c1c8..2cfb88f 100644 --- a/netbox_diode_plugin/api/matcher.py +++ b/netbox_diode_plugin/api/matcher.py @@ -17,7 +17,7 @@ from django.db.models.query_utils import Q from extras.models.customfields import CustomField -from .common import _TRACE, UnresolvedReference +from .common import UnresolvedReference from .plugin_utils import content_type_id, get_object_type, get_object_type_model logger = logging.getLogger(__name__) @@ -299,7 +299,6 @@ def _check_condition(self, data) -> bool: def _check_condition_1(self, data, condition) -> bool: if condition is None: return True - if _TRACE: logger.debug(f"checking condition {condition}") # noqa: E701 if isinstance(condition, tuple): return self._check_simple_condition(data, condition) @@ -310,7 +309,6 @@ def _check_condition_1(self, data, condition) -> bool: result = False break if condition.negated: - if _TRACE: logger.debug(f"negated condition {condition} => {not result}") # noqa: E701 return not result return result # TODO handle OR ? @@ -322,15 +320,12 @@ def _check_simple_condition(self, data, condition) -> bool: return True k, v = condition - if _TRACE: logger.debug(f"checking simple condition {k} => {v}") # noqa: E701 result = False if k.endswith("__isnull"): k = k[:-8] is_null = k not in data or data[k] is None - if _TRACE: logger.debug(f"checking isnull {k}? ({is_null}) want {v}") # noqa: E701 result = is_null == v else: - if _TRACE: logger.debug(f"checking equality {k} => {data.get(k)} == {v}") # noqa: E701 result = k in data and data[k] == v return result @@ -346,7 +341,6 @@ def build_queryset(self, data) -> models.QuerySet: def _build_fields_queryset(self, data) -> models.QuerySet: # noqa: C901 """Builds a queryset for a simple set-of-fields constraint.""" if not self._check_condition(data): - if _TRACE: logger.debug(f" * cannot build fields queryset for {self.name} (condition not met)") # noqa: E701 return None data = self._prepare_data(data) @@ -354,14 +348,11 @@ def _build_fields_queryset(self, data) -> models.QuerySet: # noqa: C901 for field_name in self.fields: field = self.model_class._meta.get_field(field_name) if field_name not in data: - if _TRACE: logger.debug(f" * cannot build fields queryset for {self.name} (missing field {field_name})") # noqa: E701 return None # cannot match, missing field data lookup_value = data.get(field_name) if isinstance(lookup_value, UnresolvedReference): - if _TRACE: logger.debug(f" * cannot build fields queryset for {self.name} ({field_name} is unresolved reference)") # noqa: E701 return None # cannot match, missing field data if isinstance(lookup_value, dict): - if _TRACE: logger.debug(f" * cannot build fields queryset for {self.name} ({field_name} is dict)") # noqa: E701 return None # cannot match, missing field data lookup_kwargs[field.name] = lookup_value @@ -386,10 +377,8 @@ def _build_expressions_queryset(self, data) -> models.QuerySet: refs = [F(ref) for ref in _get_refs(expr)] for ref in refs: if ref not in replacements: - if _TRACE: logger.debug(f" * cannot build expr queryset for {self.name} (missing field {ref})") # noqa: E701 return None # cannot match, missing field data if isinstance(replacements[ref], UnresolvedReference): - if _TRACE: logger.debug(f" * cannot build expr queryset for {self.name} ({ref} is unresolved reference)") # noqa: E701 return None # cannot match, missing field data rhs = expr.replace_expressions(replacements) @@ -497,11 +486,9 @@ def ip_value(self, data: dict, field: str) -> str|None: def build_queryset(self, data: dict) -> models.QuerySet: """Build a queryset for the custom field.""" if not self.has_required_fields(data): - if _TRACE: logger.debug(f" * cannot build expr queryset for {self.name} (missing field {self.ip_field})") # noqa: E701 return None if not self._check_condition(data): - if _TRACE: logger.debug(f" * cannot build expr queryset for {self.name} (condition not met)") # noqa: E701 return None filter = { @@ -510,7 +497,6 @@ def build_queryset(self, data: dict) -> models.QuerySet: for field in self.ip_fields: value = self.ip_value(data, field) if value is None: - if _TRACE: logger.debug(f" * cannot build expr queryset for {self.name} (ip value is None)") # noqa: E701 return None filter[f'{field}__net_host'] = value @@ -562,24 +548,20 @@ def ip_value(self, data: dict, field: str) -> str|None: def build_queryset(self, data: dict) -> models.QuerySet: """Build a queryset for the custom field.""" if not self.has_required_fields(data): - if _TRACE: logger.debug(f" * cannot build expr queryset for {self.name} (missing field {self.ip_field})") # noqa: E701 return None if not self._check_condition(data): - if _TRACE: logger.debug(f" * cannot build expr queryset for {self.name} (condition not met)") # noqa: E701 return None filter = {} for field in self.ip_fields: value = self.ip_value(data, field) if value is None: - if _TRACE: logger.debug(f" * cannot build expr queryset for {self.name} (ip value is None)") # noqa: E701 return None filter[f'{field}__net_host'] = value vrf_id = data[self.vrf_field] if isinstance(vrf_id, UnresolvedReference): - if _TRACE: logger.debug(f" * cannot build expr queryset for {self.name} ({self.vrf_field} is unresolved reference)") # noqa: E701 return None filter[f'{self.vrf_field}'] = vrf_id @@ -794,11 +776,7 @@ def fingerprints(data: dict, object_type: str) -> list[str]: fp = matcher.fingerprint(data) if fp is not None: fps.append(fp) - if _TRACE: logger.debug(f" ** matcher {matcher.name} gave fingerprint {fp}") # noqa: E701 - else: - if _TRACE: logger.debug(f" ** skipped matcher {matcher.name}") # noqa: E701 fp = _fingerprint_all(data) - if _TRACE: logger.debug(f" ** matcher _fingerprint_all gave fingerprint {fp}") # noqa: E701 fps.append(fp) return fps @@ -811,21 +789,14 @@ def find_existing_object(data: dict, object_type: str): # noqa: C901 Returns the object if found, otherwise None. """ - if _TRACE: logger.debug(f"resolving {data}") # noqa: E701 model_class = get_object_type_model(object_type) for matcher in get_model_matchers(model_class): if not matcher.has_required_fields(data): - if _TRACE: logger.debug(f" * skipped matcher {matcher.name} (missing fields)") # noqa: E701 continue q = matcher.build_queryset(data) if q is None: - if _TRACE: logger.debug(f" * skipped matcher {matcher.name} (no queryset)") # noqa: E701 continue - if _TRACE: logger.debug(f" * trying query {q.query}") # noqa: E701 existing = q.order_by('pk').first() if existing is not None: - if _TRACE: logger.debug(f" -> Found object {existing} via {matcher.name}") # noqa: E701 return existing - if _TRACE: logger.debug(f" -> No object found for matcher {matcher.name}") # noqa: E701 - if _TRACE: logger.debug(" * No matchers found an existing object") # noqa: E701 return None diff --git a/netbox_diode_plugin/api/plugin_utils.py b/netbox_diode_plugin/api/plugin_utils.py index 9a8a752..2a1f3e8 100644 --- a/netbox_diode_plugin/api/plugin_utils.py +++ b/netbox_diode_plugin/api/plugin_utils.py @@ -1209,6 +1209,7 @@ def apply_format_transformations(data: dict, object_type: str): except ValidationError: raise except ValueError as e: - raise ValidationError(f'Invalid value {val} for field {key} in {object_type}: {e}') + logger.error(f"Error processing field {key} in {object_type} with value {val}: {e}") + raise ValidationError(f"Invalid value for field {key} in {object_type}.") except Exception as e: - raise ValidationError(f'Invalid value {val} for field {key} in {object_type}') \ No newline at end of file + raise ValidationError(f'Invalid value for field {key} in {object_type}') \ No newline at end of file diff --git a/netbox_diode_plugin/api/transformer.py b/netbox_diode_plugin/api/transformer.py index 5ff1a7d..6a567b2 100644 --- a/netbox_diode_plugin/api/transformer.py +++ b/netbox_diode_plugin/api/transformer.py @@ -16,7 +16,7 @@ from extras.models.customfields import CustomField from rest_framework import serializers -from .common import _TRACE, NON_FIELD_ERRORS, AutoSlug, ChangeSetException, UnresolvedReference, harmonize_formats +from .common import NON_FIELD_ERRORS, AutoSlug, ChangeSetException, UnresolvedReference, harmonize_formats from .matcher import find_existing_object, fingerprints from .plugin_utils import ( CUSTOM_FIELD_OBJECT_REFERENCE_TYPE, @@ -95,28 +95,18 @@ def transform_proto_json(proto_json: dict, object_type: str, supported_models: d a certain form of deduplication and resolution of existing objects. """ entities = _transform_proto_json_1(proto_json, object_type) - if _TRACE: logger.debug(f"_transform_proto_json_1 entities: {json.dumps(entities, default=lambda o: str(o), indent=4)}") # noqa: E701 entities = _topo_sort(entities) - if _TRACE: logger.debug(f"_topo_sort: {json.dumps(entities, default=lambda o: str(o), indent=4)}") # noqa: E701 deduplicated = _fingerprint_dedupe(entities) - if _TRACE: logger.debug(f"_fingerprint_dedupe: {json.dumps(deduplicated, default=lambda o: str(o), indent=4)}") # noqa: E701 deduplicated = _topo_sort(deduplicated) - if _TRACE: logger.debug(f"_topo_sort: {json.dumps(deduplicated, default=lambda o: str(o), indent=4)}") # noqa: E701 _set_auto_slugs(deduplicated, supported_models) - if _TRACE: logger.debug(f"_set_auto_slugs: {json.dumps(deduplicated, default=lambda o: str(o), indent=4)}") # noqa: E701 _handle_cached_scope(deduplicated, supported_models) - if _TRACE: logger.debug(f"_handle_cached_scope: {json.dumps(deduplicated, default=lambda o: str(o), indent=4)}") # noqa: E701 resolved = _resolve_existing_references(deduplicated) - if _TRACE: logger.debug(f"_resolve_references: {json.dumps(resolved, default=lambda o: str(o), indent=4)}") # noqa: E701 _strip_cached_scope(resolved) - if _TRACE: logger.debug(f"_strip_cached_scope: {json.dumps(resolved, default=lambda o: str(o), indent=4)}") # noqa: E701 defaulted = _set_defaults(resolved, supported_models) - if _TRACE: logger.debug(f"_set_defaults: {json.dumps(defaulted, default=lambda o: str(o), indent=4)}") # noqa: E701 # handle post-create steps output = _handle_post_creates(defaulted) - if _TRACE: logger.debug(f"_handle_post_creates: {json.dumps(output, default=lambda o: str(o), indent=4)}") # noqa: E701 _check_unresolved_refs(output) for entity in output: @@ -409,21 +399,18 @@ def _fingerprint_dedupe(entities: list[dict]) -> list[dict]: # noqa: C901 new_refs = {} # uuid -> uuid for entity in entities: - if _TRACE: logger.debug(f"fingerprint_dedupe: {entity}") # noqa: E701 if entity.get('_is_post_create'): fp = entity['_uuid'] existing_uuid = None else: _update_unresolved_refs(entity, new_refs) fps = fingerprints(entity, entity['_object_type']) - if _TRACE: logger.debug(f" ==> {fps}") # noqa: E701 for fp in fps: existing_uuid = by_fp.get(fp) if existing_uuid is not None: break if existing_uuid is None: - if _TRACE: logger.debug(" * entity is new.") # noqa: E701 new_entity = copy.deepcopy(entity) _update_unresolved_refs(new_entity, new_refs) primary_uuid = new_entity['_uuid'] @@ -432,7 +419,6 @@ def _fingerprint_dedupe(entities: list[dict]) -> list[dict]: # noqa: C901 by_uuid[primary_uuid] = new_entity deduplicated.append(primary_uuid) else: - if _TRACE: logger.debug(" * entity already exists.") # noqa: E701 existing = by_uuid[existing_uuid] new_refs[entity['_uuid']] = existing['_uuid'] merged = _merge_nodes(existing, entity) @@ -509,7 +495,6 @@ def _resolve_existing_references(entities: list[dict]) -> list[dict]: existing = find_existing_object(data, object_type) if existing is not None: - if _TRACE: logger.debug(f"existing {data} -> {existing}") # noqa: E701 fp = (object_type, existing.id) if fp in seen: logger.warning(f"objects resolved to the same existing id after deduplication: {seen[fp]} and {data}") From ef1bee6f8540c27451d71b207728d22eaf02eef4 Mon Sep 17 00:00:00 2001 From: Luke Tucker Date: Wed, 23 Apr 2025 16:59:03 -0400 Subject: [PATCH 42/52] fix: sanitize branch name --- netbox_diode_plugin/api/views.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/netbox_diode_plugin/api/views.py b/netbox_diode_plugin/api/views.py index 99655e1..8f7ac09 100644 --- a/netbox_diode_plugin/api/views.py +++ b/netbox_diode_plugin/api/views.py @@ -111,7 +111,8 @@ def _post(self, request, *args, **kwargs): branch = Branch.objects.get(id=branch_id) result.branch = {"id": branch.id, "name": branch.name} except Branch.DoesNotExist: - logger.warning(f"Branch with ID {branch_id} does not exist") + sanitized_branch_id = branch_id.replace('\n', '').replace('\r', '') + logger.warning(f"Branch with ID {sanitized_branch_id} does not exist") return Response(result.to_dict(), status=result.get_status_code()) From 8c46fd2a7aa42d46aa95d43c9cb496745bad9343 Mon Sep 17 00:00:00 2001 From: Luke Tucker <64618+ltucker@users.noreply.github.com> Date: Wed, 23 Apr 2025 17:03:19 -0400 Subject: [PATCH 43/52] Potential fix for code scanning alert no. 21: Log Injection Co-authored-by: Copilot Autofix powered by AI <62310815+github-advanced-security[bot]@users.noreply.github.com> --- netbox_diode_plugin/api/plugin_utils.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/netbox_diode_plugin/api/plugin_utils.py b/netbox_diode_plugin/api/plugin_utils.py index 2a1f3e8..2db2f87 100644 --- a/netbox_diode_plugin/api/plugin_utils.py +++ b/netbox_diode_plugin/api/plugin_utils.py @@ -1209,7 +1209,9 @@ def apply_format_transformations(data: dict, object_type: str): except ValidationError: raise except ValueError as e: - logger.error(f"Error processing field {key} in {object_type} with value {val}: {e}") - raise ValidationError(f"Invalid value for field {key} in {object_type}.") + sanitized_object_type = object_type.replace('\n', '').replace('\r', '') + sanitized_val = str(val).replace('\n', '').replace('\r', '') + logger.error(f"Error processing field {key} in {sanitized_object_type} with value {sanitized_val}: {e}") + raise ValidationError(f"Invalid value for field {key} in {sanitized_object_type}.") except Exception as e: raise ValidationError(f'Invalid value for field {key} in {object_type}') \ No newline at end of file From 3a70229df0247372fefee61aa5906a9d548fed76 Mon Sep 17 00:00:00 2001 From: Luke Tucker <64618+ltucker@users.noreply.github.com> Date: Wed, 23 Apr 2025 17:04:34 -0400 Subject: [PATCH 44/52] Potential fix for code scanning alert no. 16: Log Injection Co-authored-by: Copilot Autofix powered by AI <62310815+github-advanced-security[bot]@users.noreply.github.com> --- netbox_diode_plugin/api/transformer.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/netbox_diode_plugin/api/transformer.py b/netbox_diode_plugin/api/transformer.py index 6a567b2..bc427f4 100644 --- a/netbox_diode_plugin/api/transformer.py +++ b/netbox_diode_plugin/api/transformer.py @@ -216,7 +216,10 @@ def _ensure_snake_case(proto_json: dict, object_type: str) -> dict: out[snake_key] = v else: # error? - logger.warning(f"Unknown field {k}/{snake_key} is not legal for {object_type}, skipping...") + sanitized_k = k.replace('\n', '').replace('\r', '') + sanitized_snake_key = snake_key.replace('\n', '').replace('\r', '') + sanitized_object_type = object_type.replace('\n', '').replace('\r', '') + logger.warning(f"Unknown field {sanitized_k}/{sanitized_snake_key} is not legal for {sanitized_object_type}, skipping...") return out From ece4fe85ac4fb517d1bc931f9404cec20e7239f8 Mon Sep 17 00:00:00 2001 From: Luke Tucker Date: Wed, 23 Apr 2025 19:45:16 -0400 Subject: [PATCH 45/52] fix: fix flakey tag sorting, test cleanup --- netbox_diode_plugin/api/applier.py | 3 ++- netbox_diode_plugin/api/common.py | 4 +++ netbox_diode_plugin/api/differ.py | 5 ++-- netbox_diode_plugin/api/plugin_utils.py | 2 +- netbox_diode_plugin/api/transformer.py | 9 ++++--- netbox_diode_plugin/tests/test_updates.py | 25 ++++++++++++++----- .../tests/test_updates_cases.json | 14 ++++++----- 7 files changed, 42 insertions(+), 20 deletions(-) diff --git a/netbox_diode_plugin/api/applier.py b/netbox_diode_plugin/api/applier.py index 52c9355..dd94a72 100644 --- a/netbox_diode_plugin/api/applier.py +++ b/netbox_diode_plugin/api/applier.py @@ -110,7 +110,8 @@ def _pre_apply(model_class: models.Model, change: Change, created: dict): allowed_fields = legal_fields(model_class) for key in list(data.keys()): if key not in allowed_fields: - logger.warning(f"Field {key} is not in the diode data model, ignoring.") + if key != "id": + logger.warning(f"Field {key} is not in the diode data model, ignoring.") data.pop(key) return data diff --git a/netbox_diode_plugin/api/common.py b/netbox_diode_plugin/api/common.py index 9872968..a00a504 100644 --- a/netbox_diode_plugin/api/common.py +++ b/netbox_diode_plugin/api/common.py @@ -283,3 +283,7 @@ def harmonize_formats(data): case _: logger.warning(f"Unknown type in harmonize_formats: {type(data)}") return data + +def sort_ints_first(data): + """Sort a mixed list of ints and other types, putting ints first.""" + return sorted(data, key=lambda x: (not isinstance(x, int), x)) diff --git a/netbox_diode_plugin/api/differ.py b/netbox_diode_plugin/api/differ.py index 0873d54..e213784 100644 --- a/netbox_diode_plugin/api/differ.py +++ b/netbox_diode_plugin/api/differ.py @@ -19,6 +19,7 @@ ChangeType, error_from_validation_error, harmonize_formats, + sort_ints_first, ) from .plugin_utils import get_primary_value, legal_fields from .supported_models import extract_supported_models @@ -251,6 +252,4 @@ def _merge_reference_list(prechange_list: list, postchange_list: list) -> list: """Merge reference lists rather than replacing the full value.""" result = set(prechange_list) result.update(postchange_list) - return sorted(result, key=str) - - + return sort_ints_first(result) diff --git a/netbox_diode_plugin/api/plugin_utils.py b/netbox_diode_plugin/api/plugin_utils.py index 2db2f87..15247ca 100644 --- a/netbox_diode_plugin/api/plugin_utils.py +++ b/netbox_diode_plugin/api/plugin_utils.py @@ -1025,7 +1025,7 @@ def ip_network_defaulting(value: str) -> str: def collect_integer_pairs(value: list[int]) -> list[tuple[int, int]]: if len(value) % 2 != 0: raise ValueError('Array must have an even number of elements') - return [(value[i], value[i+1]) for i in range(0, len(value), 2)] + return sorted([(value[i], value[i+1]) for i in range(0, len(value), 2)]) def for_all(transform): def wrapper(value): diff --git a/netbox_diode_plugin/api/transformer.py b/netbox_diode_plugin/api/transformer.py index bc427f4..5876425 100644 --- a/netbox_diode_plugin/api/transformer.py +++ b/netbox_diode_plugin/api/transformer.py @@ -16,7 +16,7 @@ from extras.models.customfields import CustomField from rest_framework import serializers -from .common import NON_FIELD_ERRORS, AutoSlug, ChangeSetException, UnresolvedReference, harmonize_formats +from .common import NON_FIELD_ERRORS, AutoSlug, ChangeSetException, UnresolvedReference, harmonize_formats, sort_ints_first from .matcher import find_existing_object, fingerprints from .plugin_utils import ( CUSTOM_FIELD_OBJECT_REFERENCE_TYPE, @@ -514,17 +514,20 @@ def _resolve_existing_references(entities: list[dict]) -> list[dict]: return resolved def _update_resolved_refs(data, new_refs): - for k, v in data.items(): + for k, v in list(data.items()): if isinstance(v, UnresolvedReference) and v.uuid in new_refs: data[k] = new_refs[v.uuid] elif isinstance(v, (list, tuple)): new_items = [] + has_refs = False for item in v: if isinstance(item, UnresolvedReference) and item.uuid in new_refs: new_items.append(new_refs[item.uuid]) + has_refs = True else: new_items.append(item) - data[k] = sorted(new_items) + if has_refs: + data[k] = sort_ints_first(new_items) elif isinstance(v, dict): _update_resolved_refs(v, new_refs) diff --git a/netbox_diode_plugin/tests/test_updates.py b/netbox_diode_plugin/tests/test_updates.py index 5481da9..d0f085c 100644 --- a/netbox_diode_plugin/tests/test_updates.py +++ b/netbox_diode_plugin/tests/test_updates.py @@ -9,6 +9,7 @@ from types import SimpleNamespace from unittest import mock +from django.db.models import QuerySet from rest_framework import status from utilities.testing import APITestCase @@ -20,15 +21,24 @@ logger = logging.getLogger(__name__) +def _harmonize_formats(data): + data = harmonize_formats(data) + return _tuples_to_lists(data) + +def _tuples_to_lists(data): + if isinstance(data, (tuple, list)): + return [_tuples_to_lists(d) for d in data] + if isinstance(data, dict): + return {k: _tuples_to_lists(v) for k, v in data.items()} + return data + def load_test_cases(cls): """Class decorator to load test cases and create test methods.""" - logger.error("**** Loading test cases") + logger.debug("Loading apply updates test cases") current_dir = os.path.dirname(os.path.abspath(__file__)) test_data_path = os.path.join(current_dir, "test_updates_cases.json") - logger.error(f"**** Looking for test data at {test_data_path}") if not os.path.exists(test_data_path): - logger.error(f"**** Test data file not found at {test_data_path}") raise FileNotFoundError(f"Test data file not found at {test_data_path}") def _create_and_update_test_case(case): @@ -92,7 +102,7 @@ def test_func(self): test_cases = json.load(f) for case in test_cases: t = _create_and_update_test_case(case) - logger.error(f"**** Creating test case {t.__name__}") + logger.debug(f"Creating test case {t.__name__}") setattr(cls, t.__name__, t) return cls @@ -146,7 +156,9 @@ def _follow_path(self, obj, path): cur = cur() except ValueError: pass - return harmonize_formats(cur) + if isinstance(cur, QuerySet): + cur = list(cur) + return cur def _check_set_by(self, obj, path, value): key = path[-1][len("__by_"):] @@ -158,11 +170,12 @@ def _check_set_by(self, obj, path, value): else: vals = {value} - cvals = {harmonize_formats(getattr(c, key)) for c in cur} + cvals = {_harmonize_formats(getattr(c, key)) for c in cur} self.assertEqual(cvals, vals) def _check_equals(self, obj, path, value): cur = self._follow_path(obj, path) + cur = _harmonize_formats(cur) self.assertEqual(cur, value) def _check_expect(self, obj, expect): diff --git a/netbox_diode_plugin/tests/test_updates_cases.json b/netbox_diode_plugin/tests/test_updates_cases.json index a9e6f21..5ff0a5a 100644 --- a/netbox_diode_plugin/tests/test_updates_cases.json +++ b/netbox_diode_plugin/tests/test_updates_cases.json @@ -1911,8 +1911,7 @@ "create_expect": { "name": "GigabitEthernet1/0/1", "label": "Core Link 1", - "tagged_vlans.all.0.vid": 101, - "tagged_vlans.all.1.vid": 102 + "tagged_vlans.all.__by_vid": [101, 102] }, "create": { "interface": { @@ -2168,7 +2167,7 @@ }, "update_expect": { "tags.all.__by_name": ["Tag 1", "Tag 2", "Tag 3"] - } + } }, { "name": "dcim_interface_2", @@ -2442,7 +2441,7 @@ }, "update_expect": { "tags.all.__by_name": ["Tag 1", "Tag 2", "Tag 3"] - } + } }, { "name": "dcim_interface_3", @@ -2688,7 +2687,7 @@ }, "update_expect": { "tags.all.__by_name": ["Tag 1", "Tag 2", "Tag 3"] - } + } }, { "name": "vpn_l2vpn_1", @@ -4946,6 +4945,7 @@ "slug": "dc-west", "status": "active" }, + "vid_ranges": [101, 102, 99, 100], "description": "Core network VLANs for data center infrastructure", "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] } @@ -4959,12 +4959,14 @@ "slug": "dc-west", "status": "active" }, + "vid_ranges": [101, 102, 99, 100], "description": "Core network VLANs for data center infrastructure Updated", "tags": [{"name": "Tag 1"}, {"name": "Tag 2"}] } }, "update_expect": { - "description": "Core network VLANs for data center infrastructure Updated" + "description": "Core network VLANs for data center infrastructure Updated", + "vid_ranges": [[99, 100], [101, 102]] } }, { From d86ce162b85b1151cf00eadfa7d66f30f3cfe768 Mon Sep 17 00:00:00 2001 From: Michal Fiedorowicz Date: Thu, 24 Apr 2025 09:58:54 +0200 Subject: [PATCH 46/52] perf: license update to NetBox Limited Use License 1.0 Signed-off-by: Michal Fiedorowicz --- LICENSE.md | 181 ++++++++++++++----------------------------------- README.md | 2 +- pyproject.toml | 2 +- 3 files changed, 53 insertions(+), 132 deletions(-) diff --git a/LICENSE.md b/LICENSE.md index fcaed8a..cd0c365 100644 --- a/LICENSE.md +++ b/LICENSE.md @@ -1,164 +1,85 @@ -# PolyForm Shield License 1.0.0 - - +# NetBox Limited Use License 1.0 ## Acceptance -In order to get any license under these terms, you must agree -to them as both strict obligations and conditions to all -your licenses. +In order to get any license under these terms, you must agree to them as +both strict obligations and conditions to all your licenses. ## Copyright License -The licensor grants you a copyright license for the -software to do everything you might do with the software -that would otherwise infringe the licensor's copyright -in it for any permitted purpose. However, you may -only distribute the software according to [Distribution -License](#distribution-license) and make changes or new works -based on the software according to [Changes and New Works -License](#changes-and-new-works-license). - -## Distribution License - -The licensor grants you an additional copyright license -to distribute copies of the software. Your license -to distribute covers distributing the software with -changes and new works permitted by [Changes and New Works -License](#changes-and-new-works-license). - -## Notices - -You must ensure that anyone who gets a copy of any part of -the software from you also gets a copy of these terms or the -URL for them above, as well as copies of any plain-text lines -beginning with `Required Notice:` that the licensor provided -with the software. For example: +NetBox Labs grants you a copyright license to use and modify the software +only as part of a NetBox installation obtained from NetBox Labs or a NetBox +distributor authorized by NetBox Labs, and only for your own internal use. -> Required Notice: Copyright Yoyodyne, Inc. (http://example.com) - -## Changes and New Works License - -The licensor grants you an additional copyright license to -make changes and new works based on the software for any -permitted purpose. +For clarity, this license grants no rights to distribute the software or +make it available to others as part of a commercial offering. ## Patent License -The licensor grants you a patent license for the software that -covers patent claims the licensor can license, or becomes able -to license, that you would infringe by using the software. - -## Noncompete - -Any purpose is a permitted purpose, except for providing any -product that competes with the software or any product the -licensor or any of its affiliates provides using the software. - -## Competition +NetBox Labs grants you a patent license for the software that covers patent +claims the licensor can license, or becomes able to license, that you would +infringe by using the software, as allowed in the copyright license above. -Goods and services compete even when they provide functionality -through different kinds of interfaces or for different technical -platforms. Applications can compete with services, libraries -with plugins, frameworks with development tools, and so on, -even if they're written in different programming languages -or for different computer architectures. Goods and services -compete even when provided free of charge. If you market a -product as a practical substitute for the software or another -product, it definitely competes. - -## New Products - -If you are using the software to provide a product that does -not compete, but the licensor or any of its affiliates brings -your product into competition by providing a new version of -the software or another product using the software, you may -continue using versions of the software available under these -terms beforehand to provide your competing product, but not -any later versions. - -## Discontinued Products - -You may begin using the software to compete with a product -or service that the licensor or any of its affiliates has -stopped providing, unless the licensor includes a plain-text -line beginning with `Licensor Line of Business:` with the -software that mentions that line of business. For example: - -> Licensor Line of Business: YoyodyneCMS Content Management -System (http://example.com/cms) +## Patent Defense -## Sales of Business +If you make any written claim that the software infringes or contributes to +infringement of any patent, your patent license for the software granted +under these terms ends immediately. If your company makes such a claim, +your patent license ends immediately for work on behalf of your company. +Competitive Restrictions -If the licensor or any of its affiliates sells a line of -business developing the software or using the software -to provide a product, the buyer can also enforce -[Noncompete](#noncompete) for that product. +This license does not grant you the right to use the software: -## Fair Use +- To provide a managed service or software products that includes, integrates + with, or extends NetBox in a way that competes with any product or service + of NetBox Labs. -You may have "fair use" rights for the software under the -law. These terms do not limit them. +- To assist or enable a third party in offering a service or product that + competes with any product or service of NetBox Labs. ## No Other Rights -These terms do not allow you to sublicense or transfer any of -your licenses to anyone else, or prevent the licensor from -granting licenses to anyone else. These terms do not imply -any other licenses. - -## Patent Defense - -If you make any written claim that the software infringes or -contributes to infringement of any patent, your patent license -for the software granted under these terms ends immediately. If -your company makes such a claim, your patent license ends -immediately for work on behalf of your company. +These terms do not allow you to sublicense or transfer any of your licenses +to anyone else, or prevent NetBox Labs from granting licenses to anyone else. +These terms do not imply any other licenses. ## Violations -The first time you are notified in writing that you have -violated any of these terms, or done anything with the software -not covered by your licenses, your licenses can nonetheless -continue if you come into full compliance with these terms, -and take practical steps to correct past violations, within -32 days of receiving notice. Otherwise, all your licenses -end immediately. +The first time you are notified in writing that you have violated any of +these terms, or done anything with the software not covered by your licenses, +your licenses can nonetheless continue if you come into full compliance with +these terms, and take practical steps to correct past violations, within 30 +days of receiving notice. Otherwise, all your licenses end immediately. ## No Liability -***As far as the law allows, the software comes as is, without -any warranty or condition, and the licensor will not be liable -to you for any damages arising out of these terms or the use -or nature of the software, under any kind of legal claim.*** +As far as the law allows, the software comes as is, without any warranty or +condition, and NetBox Labs will not be liable to you for any damages arising +out of these terms or the use or nature of the software, under any kind of +legal claim. + +If this disclaimer is unenforceable under applicable law, this license is void. ## Definitions -The **licensor** is the individual or entity offering these -terms, and the **software** is the software the licensor makes -available under these terms. +**NetBox Labs** is NetBox Labs, Inc. -A **product** can be a good or service, or a combination -of them. +**NetBox** is the community edition of NetBox found at + or any derivative thereof. -**You** refers to the individual or entity agreeing to these -terms. +The **software** is the software NetBox Labs makes available under these terms. -**Your company** is any legal entity, sole proprietorship, -or other kind of organization that you work for, plus all -its affiliates. +**You** refers to the individual or entity agreeing to these terms. -**Affiliates** means the other organizations than an -organization has control over, is under the control of, or is -under common control with. +**Your company** is any legal entity, sole proprietorship, or other kind of +organization that you work for, plus all organizations that have control over, +are under the control of, or are under common control with that organization. -**Control** means ownership of substantially all the assets of -an entity, or the power to direct its management and policies -by vote, contract, or otherwise. Control can be direct or -indirect. +**Control** means ownership of substantially all the assets of an entity, +or the power to direct its management and policies by vote, contract, or +otherwise. Control can be direct or indirect. -**Your licenses** are all the licenses granted to you for the -software under these terms. +**Your licenses** are all the licenses granted to you for the software under +these terms. -**Use** means anything you do with the software requiring one -of your licenses. \ No newline at end of file +**Use** means anything you do with the software requiring one of your licenses. diff --git a/README.md b/README.md index 6ae6b25..96f93b5 100644 --- a/README.md +++ b/README.md @@ -91,7 +91,7 @@ make docker-compose-netbox-plugin-test ## License -Distributed under the PolyForm Shield License 1.0.0 License. See [LICENSE.md](./LICENSE.md) for more information. +Distributed under the NetBox Limited Use License 1.0. See [LICENSE.md](./LICENSE.md) for more information. ## Required Notice diff --git a/pyproject.toml b/pyproject.toml index dccad0a..54c7eed 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ version = "0.0.1" # Overwritten during the build process description = "NetBox Labs, Diode NetBox plugin" readme = "README.md" requires-python = ">=3.8" -license = { text = "PolyForm Shield License 1.0.0" } +license = { text = "NetBox Limited Use License 1.0" } authors = [ {name = "NetBox Labs", email = "support@netboxlabs.com" } ] From 16e2a148fa135847368e75a3dd2212d575890474 Mon Sep 17 00:00:00 2001 From: Michal Fiedorowicz Date: Thu, 24 Apr 2025 13:57:36 +0200 Subject: [PATCH 47/52] fix: generate diff - set branch for the change set Signed-off-by: Michal Fiedorowicz --- netbox_diode_plugin/api/views.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/netbox_diode_plugin/api/views.py b/netbox_diode_plugin/api/views.py index 8f7ac09..7df6cd5 100644 --- a/netbox_diode_plugin/api/views.py +++ b/netbox_diode_plugin/api/views.py @@ -103,15 +103,15 @@ def _post(self, request, *args, **kwargs): ) return Response(result.to_dict(), status=result.get_status_code()) - branch_id = request.headers.get("X-NetBox-Branch") + branch_schema_id = request.headers.get("X-NetBox-Branch") - # If branch ID is provided and branching plugin is installed, get branch name - if branch_id and Branch is not None: + # If branch schema ID is provided and branching plugin is installed, get branch name + if branch_schema_id and Branch is not None: try: - branch = Branch.objects.get(id=branch_id) - result.branch = {"id": branch.id, "name": branch.name} + branch = Branch.objects.get(schema_id=branch_schema_id) + result.change_set.branch = {"id": branch.schema_id, "name": branch.name} except Branch.DoesNotExist: - sanitized_branch_id = branch_id.replace('\n', '').replace('\r', '') + sanitized_branch_id = branch_schema_id.replace('\n', '').replace('\r', '') logger.warning(f"Branch with ID {sanitized_branch_id} does not exist") return Response(result.to_dict(), status=result.get_status_code()) From 203ef34b65b84986bace3286e442403e02362c44 Mon Sep 17 00:00:00 2001 From: Luke Tucker Date: Thu, 24 Apr 2025 12:07:23 -0400 Subject: [PATCH 48/52] fix: consider object type in fingerprint all deduplication --- netbox_diode_plugin/api/matcher.py | 6 ++-- .../tests/test_api_generate_diff.py | 34 +++++++++++++++++++ 2 files changed, 37 insertions(+), 3 deletions(-) diff --git a/netbox_diode_plugin/api/matcher.py b/netbox_diode_plugin/api/matcher.py index 2cfb88f..1e4c512 100644 --- a/netbox_diode_plugin/api/matcher.py +++ b/netbox_diode_plugin/api/matcher.py @@ -735,7 +735,7 @@ def _get_refs(expr) -> set[str]: logger.warning(f"Unhandled expression type for _get_refs: {type(expr)}") return refs -def _fingerprint_all(data: dict) -> str: +def _fingerprint_all(data: dict, object_type: str|None = None) -> str: """ Returns a fingerprint of the data based on all fields. @@ -745,7 +745,7 @@ def _fingerprint_all(data: dict) -> str: if data is None: return None - values = [] + values = ["object_type", object_type] for k, v in sorted(data.items()): if k.startswith("_"): continue @@ -776,7 +776,7 @@ def fingerprints(data: dict, object_type: str) -> list[str]: fp = matcher.fingerprint(data) if fp is not None: fps.append(fp) - fp = _fingerprint_all(data) + fp = _fingerprint_all(data, object_type) fps.append(fp) return fps diff --git a/netbox_diode_plugin/tests/test_api_generate_diff.py b/netbox_diode_plugin/tests/test_api_generate_diff.py index a1d1220..9698712 100644 --- a/netbox_diode_plugin/tests/test_api_generate_diff.py +++ b/netbox_diode_plugin/tests/test_api_generate_diff.py @@ -3,6 +3,7 @@ """Diode NetBox Plugin - Tests.""" import logging +from collections import defaultdict from types import SimpleNamespace from unittest import mock from uuid import uuid4 @@ -377,6 +378,39 @@ def test_vlangroup_error(self): } _ = self.send_request(payload) + def test_generate_diff_dedupe_different_object_types(self): + """Test generate diff dedupe different object types with same values.""" + payload = { + "timestamp": 1, + "object_type": "dcim.device", + "entity": { + "device": { + "name": "Cat8000V", + "role": {"name": "undefined"}, + "site": {"name": "undefined"}, + "serial": "9OBXJHNNU5V", + "status": "active", + "platform": {"name": "ios", "manufacturer": {"name": "undefined"}}, + "device_type": {"model": "C8000V", "manufacturer": {"name": "undefined"}} + }, + }, + } + response = self.send_request(payload) + self.assertEqual(response.status_code, status.HTTP_200_OK) + cs = response.json().get("change_set", {}) + self.assertIsNotNone(cs.get("id")) + changes = cs.get("changes", []) + self.assertEqual(len(changes), 6) + by_object_type = defaultdict(int) + for change in changes: + by_object_type[change.get("object_type")] += 1 + + self.assertEqual(by_object_type["dcim.device"], 1) + self.assertEqual(by_object_type["dcim.manufacturer"], 1) + self.assertEqual(by_object_type["dcim.platform"], 1) + self.assertEqual(by_object_type["dcim.devicetype"], 1) + self.assertEqual(by_object_type["dcim.site"], 1) + self.assertEqual(by_object_type["dcim.devicerole"], 1) def send_request(self, payload, status_code=status.HTTP_200_OK): """Post the payload to the url and return the response.""" From 3c3b61cff2f7a06e244e986a40425db1c2d6a500 Mon Sep 17 00:00:00 2001 From: Michal Fiedorowicz Date: Thu, 24 Apr 2025 18:45:13 +0200 Subject: [PATCH 49/52] fix: handle validation errors by finding existing objects on create (concurrency) Signed-off-by: Michal Fiedorowicz --- netbox_diode_plugin/api/applier.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/netbox_diode_plugin/api/applier.py b/netbox_diode_plugin/api/applier.py index dd94a72..5f158ec 100644 --- a/netbox_diode_plugin/api/applier.py +++ b/netbox_diode_plugin/api/applier.py @@ -11,6 +11,7 @@ from rest_framework.exceptions import ValidationError as ValidationError from .common import NON_FIELD_ERRORS, Change, ChangeSet, ChangeSetException, ChangeSetResult, ChangeType, error_from_validation_error +from .matcher import find_existing_object from .plugin_utils import get_object_type_model, legal_fields from .supported_models import get_serializer_for_model @@ -58,8 +59,13 @@ def _apply_change(data: dict, model_class: models.Model, change: Change, created change_type = change.change_type if change_type == ChangeType.CREATE.value: serializer = serializer_class(data=data, context={"request": request}) - serializer.is_valid(raise_exception=True) - instance = serializer.save() + try: + serializer.is_valid(raise_exception=True) + instance = serializer.save() + except ValidationError as e: + instance = find_existing_object(data, change.object_type) + if not instance: + raise e created[change.ref_id] = instance elif change_type == ChangeType.UPDATE.value: From c4dee43d81f53c783599f18e866cc80a02d9f174 Mon Sep 17 00:00:00 2001 From: Leonardo Parente <23251360+leoparente@users.noreply.github.com> Date: Thu, 24 Apr 2025 13:52:01 -0300 Subject: [PATCH 50/52] chore: linux extra hosts (#92) --- docker/docker-compose.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index c1112ab..88b6702 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -27,6 +27,8 @@ services: - netbox-media-files:/opt/netbox/netbox/media:rw - netbox-reports-files:/opt/netbox/netbox/reports:rw - netbox-scripts-files:/opt/netbox/netbox/scripts:rw + extra_hosts: + - "host.docker.internal:host-gateway" ports: - "8000:8080" From 8bf87ecbc180fa791d44619a4b7b827501d44fdd Mon Sep 17 00:00:00 2001 From: Luke Tucker Date: Thu, 24 Apr 2025 13:01:59 -0400 Subject: [PATCH 51/52] chore: create tests for double created objects --- .../tests/test_api_apply_change_set.py | 43 +++++++++++++++++++ 1 file changed, 43 insertions(+) diff --git a/netbox_diode_plugin/tests/test_api_apply_change_set.py b/netbox_diode_plugin/tests/test_api_apply_change_set.py index 1a87984..8f6a6b3 100644 --- a/netbox_diode_plugin/tests/test_api_apply_change_set.py +++ b/netbox_diode_plugin/tests/test_api_apply_change_set.py @@ -1062,3 +1062,46 @@ def test_create_virtualmachine_with_cluster_site_stored_as_scope(self): } _ = self.send_request(payload) self.assertEqual(VirtualMachine.objects.get(name="VM foobar", site_id=self.sites[0].id).cluster.scope, self.sites[0]) + + def test_apply_two_changes_that_create_the_same_object_return_200(self): + """Test apply two changes that create the same object return 200.""" + site_name = uuid.uuid4() + payload1 = { + "id": str(uuid.uuid4()), + "changes": [ + { + "change_id": str(uuid.uuid4()), + "change_type": "create", + "object_version": None, + "object_type": "dcim.site", + "object_id": None, + "ref_id": "1", + "data": { + "name": f"Site {site_name}", + "slug": f"site-{site_name}", + "comments": "comment 1", + }, + }, + ], + } + _ = self.send_request(payload1) + + payload2 = { + "id": str(uuid.uuid4()), + "changes": [ + { + "change_id": str(uuid.uuid4()), + "change_type": "create", + "object_version": None, + "object_type": "dcim.site", + "object_id": None, + "ref_id": "1", + "data": { + "name": f"Site {site_name}", + "slug": f"site-{site_name}", + "comments": "comment 1", + }, + }, + ], + } + _ = self.send_request(payload2) From 48b7c56493aab2071b21a42c6d67d3e841fbdb73 Mon Sep 17 00:00:00 2001 From: Michal Fiedorowicz Date: Thu, 24 Apr 2025 20:15:44 +0200 Subject: [PATCH 52/52] chore: update Python package name format and improve release version output handling in workflow Signed-off-by: Michal Fiedorowicz --- .github/workflows/release.yaml | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 621dd4f..0faf051 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -13,7 +13,7 @@ env: SEMANTIC_RELEASE_PACKAGE: ${{ github.repository }} PYTHON_RUNTIME_VERSION: "3.11" APP_NAME: diode-netbox-plugin - PYTHON_PACKAGE_NAME: netboxlabs-diode-netbox-plugin + PYTHON_PACKAGE_NAME: netboxlabs_diode_netbox_plugin permissions: id-token: write @@ -108,8 +108,10 @@ jobs: run: echo "::set-output name=short-sha::${GITHUB_SHA::7}" - name: Set release version id: release-version + env: + NEXT_RELEASE_VERSION: ${{ steps.get-next-version.outputs.new-release-version }} run: | - echo "::set-output name=release-version::`echo ${{ steps.get-next-version.outputs.new-release-version }} | sed 's/v//g'`" + echo "release-version=`echo $NEXT_RELEASE_VERSION | sed 's/v//g'`" >> $GITHUB_OUTPUT outputs: new-release-published: ${{ steps.get-next-version.outputs.new-release-published }} new-release-version: ${{ steps.release-version.outputs.release-version }} @@ -123,7 +125,11 @@ jobs: if: needs.get-next-version.outputs.new-release-published == 'true' steps: - uses: actions/checkout@v4 - - run: echo "The new release version is ${{ needs.get-next-version.outputs.new-release-version }} commit ${{ needs.get-next-version.outputs.short-sha }}" + - name: Confirm version + env: + NEXT_RELEASE_VERSION: ${{ needs.get-next-version.outputs.new-release-version }} + NEXT_RELEASE_SHORT_SHA: ${{ needs.get-next-version.outputs.short-sha }} + run: echo "The new release version is $NEXT_RELEASE_VERSION commit $NEXT_RELEASE_SHORT_SHA" build: name: Build