From 46b7354a90c9872e464a0cf5d9e493aee7014de7 Mon Sep 17 00:00:00 2001 From: Jan Snasel Date: Fri, 23 May 2025 07:02:56 +0000 Subject: [PATCH] feat: NetBox4 and others --- docs/dev/generator.md | 8 +- nautobot_netbox_importer/diffsync/__init__.py | 5 +- .../diffsync/adapters/netbox.py | 23 +- .../diffsync/models/base.py | 101 +--- .../diffsync/models/cables.py | 264 ++++++++++ .../diffsync/models/circuits.py | 33 +- .../diffsync/models/content_types.py | 46 ++ .../diffsync/models/custom_fields.py | 8 +- .../diffsync/models/dcim.py | 20 +- .../diffsync/models/ipam.py | 101 +++- .../diffsync/models/locations.py | 8 +- .../diffsync/models/object_change.py | 10 +- .../diffsync/models/tags.py | 137 +++++ .../generator/__init__.py | 4 +- nautobot_netbox_importer/generator/base.py | 48 +- nautobot_netbox_importer/generator/fields.py | 62 ++- .../generator/nautobot.py | 65 +-- nautobot_netbox_importer/generator/source.py | 346 ++++++++++-- .../management/commands/import_netbox.py | 35 +- nautobot_netbox_importer/summary.py | 10 +- .../nautobot-v2.2/3.7.custom/input.json | 492 +++++++++++++++++- .../3.7.custom/samples/dcim.device.json | 2 +- .../3.7.custom/samples/dcim.interface.json | 33 ++ .../3.7.custom/samples/dcim.location.json | 2 +- .../3.7.custom/samples/extras.status.json | 1 + .../3.7.custom/samples/ipam.ipaddress.json | 12 +- .../samples/ipam.ipaddresstointerface.json | 18 + .../3.7.custom/samples/ipam.prefix.json | 54 +- netbox-checker/netbox_checker/__init__.py | 7 + netbox-checker/netbox_checker/checker.py | 52 ++ netbox-checker/pyproject.toml | 12 + netbox-checker/run.sh | 30 ++ poetry.lock | 10 +- pyproject.toml | 2 + tasks.py | 69 ++- 35 files changed, 1853 insertions(+), 277 deletions(-) create mode 100644 nautobot_netbox_importer/diffsync/models/cables.py create mode 100644 nautobot_netbox_importer/diffsync/models/content_types.py create mode 100644 nautobot_netbox_importer/diffsync/models/tags.py create mode 100644 nautobot_netbox_importer/tests/fixtures/nautobot-v2.2/3.7.custom/samples/dcim.interface.json create mode 100644 nautobot_netbox_importer/tests/fixtures/nautobot-v2.2/3.7.custom/samples/ipam.ipaddresstointerface.json create mode 100644 netbox-checker/netbox_checker/__init__.py create mode 100644 netbox-checker/netbox_checker/checker.py create mode 100644 netbox-checker/pyproject.toml create mode 100755 netbox-checker/run.sh diff --git a/docs/dev/generator.md b/docs/dev/generator.md index 530a21b8..3fa9d8f1 100644 --- a/docs/dev/generator.md +++ b/docs/dev/generator.md @@ -63,16 +63,16 @@ The first data iteration constructs the wrapping structure, which includes: - `SourceAdapter` with all source model `adapter.wrappers`. - The `SourceAdapter` manages `SourceModelWrapper` and `NautobotModelWrapper` instances. -- A `SourceModelWrapper` for each source content type, with `source_wrapper.fields` detailing how to import the source data. +- A `SourceModelWrapper` for each source content type, with `fields` detailing how to import the source data. - Each `SourceModelWrapper` instance corresponds to a single `NautobotModelWrapper` instance. - A `NautobotModelWrapper` for each Nautobot content type, detailing `nautobot_wrapper.fields` and types, aiding in constructing the `DiffSyncModel` instances. - A single `NautobotModelWrapper` instance can be referenced by multiple `SourceModelWrapper` instances. -During this phase, all non-defined but present source fields are appended to the `source_wrapper.fields`, focusing on field names, not values. +During this phase, all non-defined but present source fields are appended to the `SourceModelWrapper.fields`, focusing on field names, not values. ### Creating Source Importers -Convert each `source_wrapper.fields` item into a callable based on previously-established field definitions. The callables convert the source data into the `DiffSyncModel` constructor's expected structure. +Convert each `SourceModelWrapper.fields` item into a callable based on previously-established field definitions. The callables convert the source data into the `DiffSyncModel` constructor's expected structure. In this stage, the structure described in the previous section is enhanced. @@ -86,7 +86,7 @@ For each source record, the importer attempts to read the corresponding Nautobot ### Updating Referenced Content Types -The updating of `content_types` fields, based on cached references, occurs in this phase. It's possible to define forwarding references using `source_wrapper.set_references_forwarding()`, e.g. references to `dcim.location` are forwarded to `dcim.locationtype`. +The updating of `content_types` fields, based on cached references, occurs in this phase. It's possible to define forwarding references using `SourceModelWrapper.set_references_forwarding()`, e.g. references to `dcim.location` are forwarded to `dcim.locationtype`. ### Syncing to Nautobot diff --git a/nautobot_netbox_importer/diffsync/__init__.py b/nautobot_netbox_importer/diffsync/__init__.py index 81924900..27ee5d90 100644 --- a/nautobot_netbox_importer/diffsync/__init__.py +++ b/nautobot_netbox_importer/diffsync/__init__.py @@ -1 +1,4 @@ -"""DiffSync adapter and model implementation for nautobot-netbox-importer.""" +"""DiffSync adapter and model implementation for nautobot-netbox-importer. + +This folder is an importer implementation specific to NetBox, in opposite to `generator` folder, that is a generic Source => Nautobot importer. +""" diff --git a/nautobot_netbox_importer/diffsync/adapters/netbox.py b/nautobot_netbox_importer/diffsync/adapters/netbox.py index 557a2761..63fe1520 100644 --- a/nautobot_netbox_importer/diffsync/adapters/netbox.py +++ b/nautobot_netbox_importer/diffsync/adapters/netbox.py @@ -2,27 +2,32 @@ from gzip import GzipFile from pathlib import Path -from typing import Callable, Generator, NamedTuple, Union +from typing import Callable, Generator, NamedTuple, Sequence, Union from urllib.parse import ParseResult, urlparse import ijson import requests from django.core.management import call_command from django.db.transaction import atomic +from packaging.version import Version from nautobot_netbox_importer.base import GENERATOR_SETUP_MODULES, logger, register_generator_setup +from nautobot_netbox_importer.diffsync.models.cables import create_missing_cable_terminations from nautobot_netbox_importer.diffsync.models.dcim import fix_power_feed_locations, unrack_zero_uheight_devices from nautobot_netbox_importer.generator import SourceAdapter, SourceDataGenerator, SourceRecord from nautobot_netbox_importer.summary import Pathable for _name in ( "base", + "cables", "circuits", + "content_types", "custom_fields", "dcim", "ipam", "locations", "object_change", + "tags", "virtualization", ): register_generator_setup(f"nautobot_netbox_importer.diffsync.models.{_name}") @@ -46,13 +51,17 @@ class NetBoxImporterOptions(NamedTuple): bypass_data_validation: bool = False print_summary: bool = False update_paths: bool = False + deduplicate_prefixes: bool = False fix_powerfeed_locations: bool = False sitegroup_parent_always_region: bool = False + create_missing_cable_terminations: bool = False tag_issues: bool = False unrack_zero_uheight_devices: bool = True save_json_summary_path: str = "" save_text_summary_path: str = "" trace_issues: bool = False + customizations: Sequence[str] = [] + netbox_version: Version = Version("3.7") AdapterSetupFunction = Callable[[SourceAdapter], None] @@ -76,6 +85,10 @@ def __init__(self, input_ref: _FileRef, options: NetBoxImporterOptions, job=None self.options = options + for name in options.customizations: + if name: + register_generator_setup(name) + for name in GENERATOR_SETUP_MODULES: setup = __import__(name, fromlist=["setup"]).setup setup(self) @@ -83,11 +96,17 @@ def __init__(self, input_ref: _FileRef, options: NetBoxImporterOptions, job=None def load(self) -> None: """Load data from NetBox.""" self.import_data() + if self.options.fix_powerfeed_locations: fix_power_feed_locations(self) + if self.options.unrack_zero_uheight_devices: unrack_zero_uheight_devices(self) - self.post_import() + + if self.options.create_missing_cable_terminations: + create_missing_cable_terminations(self) + + self.post_load() def import_to_nautobot(self) -> None: """Import a NetBox export file into Nautobot.""" diff --git a/nautobot_netbox_importer/diffsync/models/base.py b/nautobot_netbox_importer/diffsync/models/base.py index 646c845e..653f3438 100644 --- a/nautobot_netbox_importer/diffsync/models/base.py +++ b/nautobot_netbox_importer/diffsync/models/base.py @@ -1,85 +1,21 @@ """NetBox to Nautobot Base Models Mapping.""" -from diffsync.enum import DiffSyncModelFlags +from packaging.version import Version -from nautobot_netbox_importer.base import RecordData -from nautobot_netbox_importer.generator import DiffSyncBaseModel, SourceAdapter, SourceField, fields +from nautobot_netbox_importer.diffsync.adapters.netbox import NetBoxAdapter +from nautobot_netbox_importer.diffsync.models.locations import define_locations +from nautobot_netbox_importer.generator import fields -from .locations import define_locations - -def _define_tagged_object(field: SourceField) -> None: - wrapper = field.wrapper - adapter = wrapper.adapter - tag_wrapper = adapter.get_or_create_wrapper("extras.tag") - - def tagged_object_importer(source: RecordData, target: DiffSyncBaseModel) -> None: - object_id = source.get(field.name, None) - if not object_id: - return - - tag = source.get(tag_field.name, None) - content_type = source.get(content_type_field.name, None) - if not tag or not content_type: - raise ValueError(f"Missing content_type or tag for tagged object {object_id}") - - tag_uuid = tag_wrapper.get_pk_from_uid(tag) - related_wrapper = adapter.get_or_create_wrapper(content_type) - result = related_wrapper.get_pk_from_uid(object_id) - field.set_nautobot_value(target, result) - tag_field.set_nautobot_value(target, tag_uuid) - content_type_field.set_nautobot_value(target, related_wrapper.nautobot.content_type_instance.pk) - related_wrapper.add_reference(tag_wrapper, tag_uuid) - - field.set_importer(tagged_object_importer) - tag_field = field.handle_sibling("tag", "tag") - content_type_field = field.handle_sibling("content_type", "content_type") - - -def _setup_content_types(adapter: SourceAdapter) -> None: - """Map NetBox content types to Nautobot. - - Automatically calculate NetBox content type IDs, if not provided, based on the order of the content types. - """ - netbox = {"id": 0} - - def define_app_label(field: SourceField) -> None: - def content_types_mapper_importer(source: RecordData, target: DiffSyncBaseModel) -> None: - app_label = source["app_label"] - model = source["model"] - netbox["id"] += 1 - uid = source.get("id", None) - if uid: - if uid != netbox["id"]: - raise ValueError(f"Content type id mismatch: {uid} != {netbox['id']}") - else: - uid = netbox["id"] - - wrapper = adapter.get_or_create_wrapper(f"{app_label}.{model}") - adapter.content_type_ids_mapping[uid] = wrapper - field.set_nautobot_value(target, app_label) - - field.set_importer(content_types_mapper_importer) - - adapter.configure_model( - "contenttypes.ContentType", - identifiers=["app_label", "model"], - flags=DiffSyncModelFlags.IGNORE, - nautobot_flags=DiffSyncModelFlags.IGNORE, - fields={ - "app_label": define_app_label, - }, - ) - - -def setup(adapter: SourceAdapter) -> None: +def setup(adapter: NetBoxAdapter) -> None: """Map NetBox base models to Nautobot.""" - adapter.disable_model("sessions.session", "Nautobot has own sessions, sessions should never cross apps.") + netbox_version = adapter.options.netbox_version + adapter.disable_model("admin.logentry", "Not directly used in Nautobot.") - adapter.disable_model("users.userconfig", "May not have a 1 to 1 translation to Nautobot.") adapter.disable_model("auth.permission", "Handled via a Nautobot model and may not be a 1 to 1.") - - _setup_content_types(adapter) + adapter.disable_model("extras.imageattachment", "Images are not imported yet.") + adapter.disable_model("sessions.session", "Nautobot has own sessions, sessions should never cross apps.") + adapter.disable_model("users.userconfig", "May not have a 1 to 1 translation to Nautobot.") adapter.configure_model( "extras.Status", @@ -89,18 +25,7 @@ def setup(adapter: SourceAdapter) -> None: }, ) adapter.configure_model("extras.role") - adapter.configure_model( - "extras.tag", - fields={ - "object_types": "content_types", - }, - ) - adapter.configure_model( - "extras.TaggedItem", - fields={ - "object_id": _define_tagged_object, - }, - ) + adapter.configure_model( "extras.ConfigContext", fields={ @@ -110,13 +35,15 @@ def setup(adapter: SourceAdapter) -> None: ) adapter.configure_model( # pylint: disable=hard-coded-auth-user - "auth.User", + "auth.User" if netbox_version < Version("4") else "users.User", nautobot_content_type="users.User", identifiers=["username"], fields={ "last_login": fields.disable("Should not be attempted to migrate"), "password": fields.disable("Should not be attempted to migrate"), "user_permissions": fields.disable("Permissions import is not implemented yet"), + "object_permissions": fields.disable("Permissions import is not implemented yet"), + "groups": fields.disable("Groups import is not implemented yet"), }, ) adapter.configure_model( diff --git a/nautobot_netbox_importer/diffsync/models/cables.py b/nautobot_netbox_importer/diffsync/models/cables.py new file mode 100644 index 00000000..7e1433ee --- /dev/null +++ b/nautobot_netbox_importer/diffsync/models/cables.py @@ -0,0 +1,264 @@ +# COMPATIBLE_TERMINATION_TYPES = { +# "circuittermination": ["interface", "frontport", "rearport", "circuittermination"], +# "consoleport": ["consoleserverport", "frontport", "rearport"], +# "consoleserverport": ["consoleport", "frontport", "rearport"], +# "interface": ["interface", "circuittermination", "frontport", "rearport"], +# "frontport": [ +# "consoleport", +# "consoleserverport", +# "interface", +# "frontport", +# "rearport", +# "circuittermination", +# ], +# "powerfeed": ["powerport"], +# "poweroutlet": ["powerport"], +# "powerport": ["poweroutlet", "powerfeed"], +# "rearport": [ +# "consoleport", +# "consoleserverport", +# "interface", +# "frontport", +# "rearport", +# "circuittermination", +# ], +# } +"""DCIM data related functions.""" + +from diffsync import DiffSyncModel +from nautobot.dcim.constants import COMPATIBLE_TERMINATION_TYPES + +from nautobot_netbox_importer.base import DUMMY_UID, ContentTypeStr, RecordData, Uid +from nautobot_netbox_importer.generator import ( + EMPTY_VALUES, + DiffSyncBaseModel, + PreImportRecordResult, + SourceAdapter, + SourceField, + SourceModelWrapper, +) + +_FALLBACK_TERMINATION_TYPE = "circuittermination" +_CIRCUIT_MODELS = {"circuittermination"} +_IGNORE_CABLE_LABELS = ( + "connected", + "testing", + "planned", + "decommissioned", + "disconnected", + "failed", + "unknown", +) + + +def _pre_import_cable_termination(source: RecordData, _) -> PreImportRecordResult: + cable_end = source.pop("cable_end").lower() + source["id"] = source.pop("cable") + source[f"termination_{cable_end}_type"] = source.pop("termination_type") + source[f"termination_{cable_end}_id"] = source.pop("termination_id") + + return PreImportRecordResult.USE_RECORD + + +def _define_cable_label(field: SourceField) -> None: + """Define the cable label field importer. + + Importer uses cable.id if label is empty or contains any of the ignored labels. + """ + + def cable_label_importer(source: RecordData, target: DiffSyncBaseModel) -> None: + value = field.get_source_value(source) + + if value: + value = str(value).strip() + + if value in EMPTY_VALUES or value.lower() in _IGNORE_CABLE_LABELS: + value = str(source["id"]) + + field.set_nautobot_value(target, value) + + field.set_importer(cable_label_importer) + + +def _get_first_compatible_termination_type(stripped_type: str) -> ContentTypeStr: + """Determine the first compatible termination type for a given termination. + + This function identifies the first compatible termination type based on the + given termination string, falling back to '_FALLBACK_TERMINATION_TYPE' if + no compatibility is found. + + Args: + stripped_type (str): The termination type with 'dcim.' prefix removed + + Returns: + str: The compatible termination type with 'dcim.' prefix + + Examples: + >>> _get_first_compatible_termination_type("interface") + 'dcim.interface' + + >>> _get_first_compatible_termination_type("poweroutlet") + 'dcim.powerport' + + >>> _get_first_compatible_termination_type("unknown") + 'circuits.circuittermination' + """ + + def get_type(model_name: str) -> ContentTypeStr: + return f"circuits.{model_name}" if model_name in _CIRCUIT_MODELS else f"dcim.{model_name}" + + if stripped_type not in COMPATIBLE_TERMINATION_TYPES: + return get_type(_FALLBACK_TERMINATION_TYPE) + + types = COMPATIBLE_TERMINATION_TYPES[stripped_type] + if _FALLBACK_TERMINATION_TYPE in types: + return get_type(_FALLBACK_TERMINATION_TYPE) + + return get_type(types[0]) + + +def _get_termination(uid: Uid, type_: ContentTypeStr, other_type: ContentTypeStr) -> tuple[Uid, ContentTypeStr]: + """Determine the appropriate termination for a cable side. + + This function evaluates cable termination data and returns correct termination information + based on compatibility rules. + + Args: + uid (Uid): UID of the current termination + type_ (ContentTypeStr): Type of the current termination + other_type (ContentTypeStr): Type of the opposite termination + + Returns: + tuple[str, str]: A tuple containing (termination_id, termination_type) + + Examples: + >>> _get_termination("123", "dcim.interface", "dcim.frontport") + ('123', 'dcim.interface') + + >>> _get_termination("", "dcim.interface", "dcim.poweroutlet") + ('dummy', 'circuit.circuittermination') + + >>> _get_termination("123", "", "dcim.frontport") + ('123', 'dcim.interface') + + >>> _get_termination("123", "dcim.interface", "") + ('123', 'dcim.interface') + + >>> _get_termination("", "", "") + ('dummy', 'circuit.circuittermination') + + >>> _get_termination("456", "dcim.powerport", "dcim.poweroutlet") + ('456', 'dcim.powerport') + """ + type_stripped = type_.split(".")[1] if type_ else "" + other_stripped = other_type.split(".")[1] if other_type else "" + first_compatible = _get_first_compatible_termination_type(other_stripped) + + if not type_: + uid = DUMMY_UID + type_ = first_compatible + + if not uid: + uid = DUMMY_UID + + if not other_type: + return uid, type_ + + if type_stripped in COMPATIBLE_TERMINATION_TYPES and other_stripped in COMPATIBLE_TERMINATION_TYPES.get( + type_stripped, [] + ): + return uid, type_ + + return DUMMY_UID, first_compatible + + +def _update_cable_termination(wrapper: SourceModelWrapper, cable: DiffSyncModel, side: str) -> None: + """Update cable termination information for a specific side. + + This function retrieves termination data for the specified side of a cable, determines + the appropriate termination using _get_termination(), and updates the cable if needed. + + Args: + wrapper (SourceModelWrapper): Model wrapper containing field definitions + cable (DiffSyncModel): The cable model to update + side (str): Which side of the cable to update ('a' or 'b') + """ + adapter = wrapper.adapter + + old_uid = getattr(cable, f"termination_{side}_id", "") + old_type_id = getattr(cable, f"termination_{side}_type_id", 0) + old_type = adapter.nautobot.get_content_type_str(old_type_id) if old_type_id else "" + other_type = getattr(cable, f"termination_{'b' if side == 'a' else 'a'}_type", "") + + new_uid, new_type = _get_termination(old_uid, old_type, other_type) + + if new_uid == old_uid and new_type == old_type: + return + + source_field = wrapper.fields[f"termination_{side}_type"] + source_field.set_nautobot_value(cable, adapter.get_nautobot_content_type_uid(new_type)) + + if new_uid == DUMMY_UID: + type_wrapper = adapter.get_or_create_wrapper(new_type) + new_instance = type_wrapper.import_dummy_object( + f"_dcim.cable_{getattr(cable, wrapper.nautobot.pk_field.name)}_side_{side}", + { + "cable": cable.id, # type: ignore + }, + ) + new_uid = new_instance.id # type: ignore + cable_id = type_wrapper.get_pk_from_uid(new_uid) + wrapper.add_reference(type_wrapper, cable_id) + + source_field = wrapper.fields[f"termination_{side}_id"] + source_field.set_nautobot_value(cable, cable_id) + + source_field.add_issue( + "UpdatedCableTermination", + f"Cable termination {side.upper()} updated from {old_uid}, {old_type} to {new_uid}, {new_type}", + cable, + ) + + +def create_missing_cable_terminations(adapter: SourceAdapter) -> None: + """Fix cables by ensuring proper terminations. + + This function processes all cables from the source adapter and validates/fixes + termination information for both sides of each cable. + + Args: + adapter (SourceAdapter): The source adapter containing cable data + """ + adapter.logger.info("Creating missing cable terminations ...") + wrapper = adapter.get_or_create_wrapper("dcim.cable") + + for cable in adapter.get_all(wrapper.nautobot.diffsync_class): + if getattr(cable, "termination_a_id", None) and getattr(cable, "termination_b_id", None): + continue + + adapter.logger.debug(f"Processing missing cable terminations {getattr(cable, 'id')} ...") + + _update_cable_termination(wrapper, cable, "a") + _update_cable_termination(wrapper, cable, "b") + + +def setup(adapter: SourceAdapter) -> None: + """Map NetBox Cable related models to Nautobot.""" + adapter.disable_model("dcim.cablepath", "Recreated in Nautobot on signal when circuit termination is created") + + adapter.configure_model( + "dcim.cable", + fields={ + "label": _define_cable_label, + "termination_a_type": "", + "termination_a_id": "", + "termination_b_type": "", + "termination_b_id": "", + }, + ) + + adapter.configure_model( + "dcim.cabletermination", + extend_content_type="dcim.cable", + pre_import_record=_pre_import_cable_termination, + ) diff --git a/nautobot_netbox_importer/diffsync/models/circuits.py b/nautobot_netbox_importer/diffsync/models/circuits.py index d21a4ab1..0e1f3343 100644 --- a/nautobot_netbox_importer/diffsync/models/circuits.py +++ b/nautobot_netbox_importer/diffsync/models/circuits.py @@ -1,23 +1,52 @@ """NetBox to Nautobot Circuits Models Mapping.""" -from nautobot_netbox_importer.generator import SourceAdapter +from nautobot_netbox_importer.base import DUMMY_UID +from nautobot_netbox_importer.generator import SourceAdapter, fields from .locations import define_location def setup(adapter: SourceAdapter) -> None: """Map NetBox circuits models to Nautobot.""" - adapter.configure_model( + + def fill_circuit_termination_dummy_data(record, suffix): + circuit_instance = circuit.import_dummy_object(suffix) + uid = circuit_instance.id # type: ignore + return record.update({"circuit": uid}) + + circuit_type = adapter.configure_model("circuits.circuittype") + circuit_type.cache_record( + { + "id": DUMMY_UID, + } + ) + circuit_provider = adapter.configure_model("circuits.provider") + circuit_provider.cache_record( + { + "id": DUMMY_UID, + } + ) + circuit = adapter.configure_model( "circuits.circuit", fields={ + "provider": "", + "cid": fields.auto_increment(), "type": "circuit_type", "termination_a": "circuit_termination_a", "termination_z": "circuit_termination_z", }, + fill_dummy_data=lambda record, _: record.update( + { + "provider": DUMMY_UID, + "type": DUMMY_UID, + } + ), ) adapter.configure_model( "circuits.circuittermination", fields={ + "circuit": "", "location": define_location, }, + fill_dummy_data=fill_circuit_termination_dummy_data, ) diff --git a/nautobot_netbox_importer/diffsync/models/content_types.py b/nautobot_netbox_importer/diffsync/models/content_types.py new file mode 100644 index 00000000..beaaa09d --- /dev/null +++ b/nautobot_netbox_importer/diffsync/models/content_types.py @@ -0,0 +1,46 @@ +"""NetBox to Nautobot Base Models Mapping.""" + +from diffsync.enum import DiffSyncModelFlags + +from nautobot_netbox_importer.base import RecordData +from nautobot_netbox_importer.generator import DiffSyncBaseModel, SourceAdapter, SourceField + + +def setup(adapter: SourceAdapter) -> None: + """Map NetBox content types to Nautobot. + + It's vital to map NetBox content types to Nautobot content types properly. + + Automatically calculate NetBox content type IDs, if not provided, based on the order of the content types. + """ + netbox = {"id": 0} + + def define_app_label(field: SourceField) -> None: + def content_types_mapper_importer(source: RecordData, target: DiffSyncBaseModel) -> None: + app_label = source["app_label"] + model = source["model"] + netbox["id"] += 1 + uid = id_field.get_source_value(source) + if uid: + if uid != netbox["id"]: + raise ValueError(f"Content type id mismatch: {uid} != {netbox['id']}") + else: + uid = netbox["id"] + + wrapper = adapter.get_or_create_wrapper(f"{app_label}.{model}") + adapter.content_type_ids_mapping[uid] = wrapper + field.set_nautobot_value(target, app_label) + + field.set_importer(content_types_mapper_importer) + + content_type_wrapper = adapter.configure_model( + "contenttypes.ContentType", + identifiers=["app_label", "model"], + flags=DiffSyncModelFlags.IGNORE, + nautobot_flags=DiffSyncModelFlags.IGNORE, + fields={ + "app_label": define_app_label, + }, + ) + + id_field = content_type_wrapper.fields["id"] diff --git a/nautobot_netbox_importer/diffsync/models/custom_fields.py b/nautobot_netbox_importer/diffsync/models/custom_fields.py index 2d2e59fe..5e5dd2f7 100644 --- a/nautobot_netbox_importer/diffsync/models/custom_fields.py +++ b/nautobot_netbox_importer/diffsync/models/custom_fields.py @@ -8,7 +8,7 @@ EMPTY_VALUES, DiffSyncBaseModel, ImporterPass, - PreImportResult, + PreImportRecordResult, SourceAdapter, SourceField, fields, @@ -95,14 +95,14 @@ def setup(adapter: SourceAdapter) -> None: """Map NetBox custom fields to Nautobot.""" choice_sets = {} - def create_choice_set(source: RecordData, importer_pass: ImporterPass) -> PreImportResult: + def create_choice_set(source: RecordData, importer_pass: ImporterPass) -> PreImportRecordResult: if importer_pass == ImporterPass.DEFINE_STRUCTURE: choice_sets[source.get("id")] = [ *_convert_choices(source.get("base_choices")), *_convert_choices(source.get("extra_choices")), ] - return PreImportResult.USE_RECORD + return PreImportRecordResult.USE_RECORD def define_choice_set(field: SourceField) -> None: def choices_importer(source: RecordData, target: DiffSyncBaseModel) -> None: @@ -144,7 +144,7 @@ def create_choices(choices: list, custom_field_uid: Uid) -> None: # Defined in NetBox but not in Nautobot adapter.configure_model( "extras.CustomFieldChoiceSet", - pre_import=create_choice_set, + pre_import_record=create_choice_set, ) adapter.configure_model( diff --git a/nautobot_netbox_importer/diffsync/models/dcim.py b/nautobot_netbox_importer/diffsync/models/dcim.py index b6c6c1d1..f07ab00a 100644 --- a/nautobot_netbox_importer/diffsync/models/dcim.py +++ b/nautobot_netbox_importer/diffsync/models/dcim.py @@ -4,7 +4,7 @@ from uuid import UUID from nautobot_netbox_importer.base import RecordData -from nautobot_netbox_importer.generator import DiffSyncBaseModel, PreImportResult, SourceAdapter, SourceField, fields +from nautobot_netbox_importer.generator import DiffSyncBaseModel, SourceAdapter, SourceField, fields from .locations import define_location @@ -39,18 +39,8 @@ def units_importer(source: RecordData, target: DiffSyncBaseModel) -> None: field.set_importer(units_importer) -def _pre_import_cable_termination(source: RecordData, _) -> PreImportResult: - cable_end = source.pop("cable_end").lower() - source["id"] = source.pop("cable") - source[f"termination_{cable_end}_type"] = source.pop("termination_type") - source[f"termination_{cable_end}_id"] = source.pop("termination_id") - - return PreImportResult.USE_RECORD - - def setup(adapter: SourceAdapter) -> None: """Map NetBox DCIM models to Nautobot.""" - adapter.disable_model("dcim.cablepath", "Recreated in Nautobot on signal when circuit termination is created") adapter.configure_model( "dcim.rackreservation", fields={ @@ -65,12 +55,6 @@ def setup(adapter: SourceAdapter) -> None: "role": fields.role(adapter, "dcim.rackrole"), }, ) - adapter.configure_model("dcim.cable") - adapter.configure_model( - "dcim.cabletermination", - extend_content_type="dcim.cable", - pre_import=_pre_import_cable_termination, - ) adapter.configure_model( "dcim.interface", fields={ @@ -185,7 +169,7 @@ def fix_power_feed_locations(adapter: SourceAdapter) -> None: if not isinstance(location_uid, UUID): raise TypeError(f"Location UID must be UUID, got {type(location_uid)}") - target.location_id = location_uid + target.location_id = location_uid # type: ignore[assignment] adapter.update(target) # Need to update references, to properly update `content_types` fields diff --git a/nautobot_netbox_importer/diffsync/models/ipam.py b/nautobot_netbox_importer/diffsync/models/ipam.py index f81e24ed..9af9fe39 100644 --- a/nautobot_netbox_importer/diffsync/models/ipam.py +++ b/nautobot_netbox_importer/diffsync/models/ipam.py @@ -1,13 +1,17 @@ """NetBox to Nautobot IPAM Models Mapping.""" +import netaddr from nautobot.ipam.models import get_default_namespace from nautobot_netbox_importer.base import RecordData from nautobot_netbox_importer.generator import ( DiffSyncBaseModel, + ImporterPass, InvalidChoiceValueIssue, + PreImportRecordResult, SourceAdapter, SourceField, + SourceModelWrapper, fields, ) @@ -26,24 +30,110 @@ def _fhrp_protocol_fallback(field: SourceField, source: RecordData, target: Diff raise InvalidChoiceValueIssue(field, value, target_value) +def _assign_ipaddress(source: RecordData, ipaddresstointerface: SourceModelWrapper) -> None: + assigned_object_type = source.get("assigned_object_type") + assigned_object_id = source.get("assigned_object_id") + if not assigned_object_type or not assigned_object_id: + return + + if isinstance(assigned_object_type, list): + assigned_object_type = ".".join(assigned_object_type) + + if assigned_object_type != "dcim.interface": + return + + ipaddress_id = source["id"] + ipaddresstointerface.import_record( + { + "id": ipaddress_id, + "ip_address": ipaddress_id, + "interface": assigned_object_id, + } + ) + + +def _add_missing_prefix(source: RecordData, prefix_wrapper: SourceModelWrapper) -> None: + address = source.get("address", "") + ip_network = netaddr.IPNetwork(address) + prefix_cidr = str(ip_network.cidr) + prefix_pk = prefix_wrapper.find_pk_from_uid(prefix_cidr) + if prefix_pk: + return + + instance = prefix_wrapper.import_record( + { + "id": prefix_cidr, + "prefix": prefix_cidr, + "status": "tbd", + } + ) + prefix_wrapper.nautobot.add_issue("CreatedMissingPrefix", diffsync_instance=instance) + + +def _deduplicate_prefix( + prefix: SourceModelWrapper, wrapper: SourceModelWrapper, source: RecordData, importer_pass: ImporterPass +) -> PreImportRecordResult: + """Pre-cache prefix prefixes UUIDs to deduplicate them.""" + if importer_pass == ImporterPass.DEFINE_STRUCTURE: + cidr = source["prefix"] + uuid = prefix.get_pk_from_uid(cidr) + if prefix.is_pk_cached(uuid): + prefix.nautobot.add_issue( + "DuplicatePrefix", + f"Duplicate prefix `{cidr}` found, merging `{wrapper.content_type}:{source['id']}`", + uid=uuid, + data=source, + ) + + wrapper.cache_record_uids(source, uuid) + + return PreImportRecordResult.USE_RECORD + + def setup(adapter: SourceAdapter) -> None: """Map NetBox IPAM models to Nautobot.""" + options = getattr(adapter, "options", {}) + deduplicate_prefixes = getattr(options, "deduplicate_prefixes", False) + + def pre_import_ipaddress(source: RecordData, importer_pass: ImporterPass) -> PreImportRecordResult: + if importer_pass == ImporterPass.DEFINE_STRUCTURE: + host = source["address"].split("/")[0] + ipaddress.cache_record_uids(source, ipaddress.get_pk_from_uid(host)) + + return PreImportRecordResult.USE_RECORD + + def post_import_ipaddress(source: RecordData, _) -> None: + _assign_ipaddress(source, ipaddresstointerface) + _add_missing_prefix(source, prefix) + ipaddress = adapter.configure_model( "ipam.ipaddress", + pre_import_record=pre_import_ipaddress if deduplicate_prefixes else None, + post_import_record=post_import_ipaddress, fields={ "role": fields.role(adapter, "ipam.role"), }, ) ipaddress.nautobot.set_instance_defaults(namespace=get_default_namespace()) - adapter.configure_model( + + def pre_import_prefix(source: RecordData, importer_pass: ImporterPass) -> PreImportRecordResult: + return _deduplicate_prefix(prefix, prefix, source, importer_pass) + + prefix = adapter.configure_model( "ipam.prefix", + pre_import_record=pre_import_prefix, fields={ "location": define_location, "role": fields.role(adapter, "ipam.role"), }, ) - adapter.configure_model( + + def pre_import_aggregate(source: RecordData, importer_pass: ImporterPass) -> PreImportRecordResult: + return _deduplicate_prefix(prefix, aggregate, source, importer_pass) + + aggregate = adapter.configure_model( "ipam.aggregate", + pre_import_record=pre_import_aggregate if deduplicate_prefixes else None, nautobot_content_type="ipam.prefix", ) adapter.configure_model( @@ -61,3 +151,10 @@ def setup(adapter: SourceAdapter) -> None: "protocol": fields.fallback(callback=_fhrp_protocol_fallback), }, ) + ipaddresstointerface = adapter.configure_model( + "ipam.ipaddresstointerface", + fields={ + "ip_address": "ip_address", + "interface": "interface", + }, + ) diff --git a/nautobot_netbox_importer/diffsync/models/locations.py b/nautobot_netbox_importer/diffsync/models/locations.py index d4e01ce5..4a6ce91e 100644 --- a/nautobot_netbox_importer/diffsync/models/locations.py +++ b/nautobot_netbox_importer/diffsync/models/locations.py @@ -175,9 +175,11 @@ def forward_references(wrapper: SourceModelWrapper, references: SourceReferences "dcim.SiteGroup", nautobot_content_type="dcim.LocationType", fields={ - "parent": fields.constant(region_type_uid, reference=location_type_wrapper) - if sitegroup_parent_always_region - else "parent", + "parent": ( + fields.constant(region_type_uid, reference=location_type_wrapper) + if sitegroup_parent_always_region + else "parent" + ), "nestable": fields.constant(True), }, ) diff --git a/nautobot_netbox_importer/diffsync/models/object_change.py b/nautobot_netbox_importer/diffsync/models/object_change.py index 27752008..901a0341 100644 --- a/nautobot_netbox_importer/diffsync/models/object_change.py +++ b/nautobot_netbox_importer/diffsync/models/object_change.py @@ -1,23 +1,23 @@ """NetBox to Nautobot Object Change Model Mapping.""" from nautobot_netbox_importer.base import RecordData -from nautobot_netbox_importer.generator import ImporterPass, PreImportResult, SourceAdapter, fields +from nautobot_netbox_importer.generator import ImporterPass, PreImportRecordResult, SourceAdapter, fields def setup(adapter: SourceAdapter) -> None: """Map NetBox object change to Nautobot.""" - def skip_disabled_object_types(source: RecordData, importer_pass: ImporterPass) -> PreImportResult: + def skip_disabled_object_types(source: RecordData, importer_pass: ImporterPass) -> PreImportRecordResult: """Disabled object types are not in Nautobot and should be skipped.""" if importer_pass != ImporterPass.IMPORT_DATA: - return PreImportResult.USE_RECORD + return PreImportRecordResult.USE_RECORD object_type = source.get("changed_object_type", None) wrapper = adapter.get_or_create_wrapper(object_type) - return PreImportResult.SKIP_RECORD if wrapper.disable_reason else PreImportResult.USE_RECORD + return PreImportRecordResult.SKIP_RECORD if wrapper.disable_reason else PreImportRecordResult.USE_RECORD adapter.configure_model( "extras.ObjectChange", - pre_import=skip_disabled_object_types, + pre_import_record=skip_disabled_object_types, disable_related_reference=True, fields={ "postchange_data": "object_data", diff --git a/nautobot_netbox_importer/diffsync/models/tags.py b/nautobot_netbox_importer/diffsync/models/tags.py new file mode 100644 index 00000000..fecf4336 --- /dev/null +++ b/nautobot_netbox_importer/diffsync/models/tags.py @@ -0,0 +1,137 @@ +"""NetBox to Nautobot Tags related Models Mapping.""" + +from packaging.version import Version + +from nautobot_netbox_importer.base import RecordData, Uid +from nautobot_netbox_importer.diffsync.adapters import NetBoxAdapter +from nautobot_netbox_importer.generator import DiffSyncBaseModel, SourceAdapter, SourceField +from nautobot_netbox_importer.generator.base import source_pk_to_uuid + + +def _setup_4(adapter: SourceAdapter) -> None: + def get_tag_pk_from_data(source: RecordData) -> Uid: + """Get the primary key from the data.""" + name = name_field.get_source_value(source) + if not name: + raise ValueError("Missing name for tag") + + return source_pk_to_uuid("extras.tag", name) + + def get_tagged_item_pk_from_data(source: RecordData) -> Uid: + """Get the primary key from the data.""" + content_type = content_type_field.get_source_value(source) + object_id = object_id_field.get_source_value(source) + tag = tag_field.get_source_value(source) + if isinstance(tag, list): + tag = tag[0] + + return source_pk_to_uuid("extras.taggeditem", f"{':'.join(content_type)}.{object_id}.{tag}") + + def define_object(field: SourceField) -> None: + def tagged_object_importer(source: RecordData, target: DiffSyncBaseModel) -> None: + object_id = field.get_source_value(source) + if not object_id: + return + + tag = tag_field.get_source_value(source) + if isinstance(tag, list): + if len(tag) == 0: + return + + if len(tag) > 1: + tag_field.add_issue( + "MultipleTags", + f"Multiple tags found, importing only the first one: {tag}", + target=target, + ) + + tag = tag[0] + + content_type = content_type_field.get_source_value(source) + if not tag or not content_type: + raise ValueError(f"Missing content_type or tag for tagged object {object_id}") + + tag_uuid = source_pk_to_uuid("extras.tag", tag) + related_wrapper = adapter.get_or_create_wrapper(content_type) + result = related_wrapper.get_pk_from_uid(object_id) + field.set_nautobot_value(target, result) + tag_field.set_nautobot_value(target, tag_uuid) + content_type_field.set_nautobot_value(target, related_wrapper.nautobot.content_type_instance.pk) + related_wrapper.add_reference(tag_wrapper, tag_uuid) + + field.set_importer(tagged_object_importer) + field.handle_sibling("tag", "tag") + field.handle_sibling("content_type", "content_type") + + tag_wrapper = adapter.configure_model( + "extras.Tag", + get_pk_from_data=get_tag_pk_from_data, + fields={ + "name": "", + "object_types": "content_types", + }, + ) + name_field = tag_wrapper.fields["name"] + + tagged_item_wrapper = adapter.configure_model( + "extras.TaggedItem", + get_pk_from_data=get_tagged_item_pk_from_data, + fields={ + "content_type": "", + "object_id": define_object, + "tag": "", + }, + ) + content_type_field = tagged_item_wrapper.fields["content_type"] + object_id_field = tagged_item_wrapper.fields["object_id"] + tag_field = tagged_item_wrapper.fields["tag"] + + +def _setup_3(adapter: SourceAdapter) -> None: + def define_tagged_object(field: SourceField) -> None: + wrapper = field.wrapper + adapter = wrapper.adapter + tag_wrapper = adapter.get_or_create_wrapper("extras.tag") + + def tagged_object_importer(source: RecordData, target: DiffSyncBaseModel) -> None: + object_id = field.get_source_value(source) + if not object_id: + return + + tag = tag_field.get_source_value(source) + content_type = content_type_field.get_source_value(source) + if not tag or not content_type: + raise ValueError(f"Missing content_type or tag for tagged object {object_id}") + + tag_uuid = tag_wrapper.get_pk_from_uid(tag) + related_wrapper = adapter.get_or_create_wrapper(content_type) + result = related_wrapper.get_pk_from_uid(object_id) + field.set_nautobot_value(target, result) + tag_field.set_nautobot_value(target, tag_uuid) + content_type_field.set_nautobot_value(target, related_wrapper.nautobot.content_type_instance.pk) + related_wrapper.add_reference(tag_wrapper, tag_uuid) + + field.set_importer(tagged_object_importer) + tag_field = field.handle_sibling("tag", "tag") + content_type_field = field.handle_sibling("content_type", "content_type") + + adapter.configure_model( + "extras.tag", + fields={ + "object_types": "content_types", + }, + ) + adapter.configure_model( + "extras.TaggedItem", + fields={ + "object_id": define_tagged_object, + }, + ) + + +def setup(adapter: NetBoxAdapter) -> None: + """Map NetBox tags related models to Nautobot.""" + if adapter.options.netbox_version < Version("4"): + _setup_3(adapter) + else: + _setup_4(adapter) diff --git a/nautobot_netbox_importer/generator/__init__.py b/nautobot_netbox_importer/generator/__init__.py index c1625421..60fde09e 100644 --- a/nautobot_netbox_importer/generator/__init__.py +++ b/nautobot_netbox_importer/generator/__init__.py @@ -7,7 +7,7 @@ DiffSyncBaseModel, ImporterPass, InvalidChoiceValueIssue, - PreImportResult, + PreImportRecordResult, SourceAdapter, SourceContentType, SourceDataGenerator, @@ -26,7 +26,7 @@ "InternalFieldType", "InvalidChoiceValueIssue", "NautobotAdapter", - "PreImportResult", + "PreImportRecordResult", "SourceAdapter", "SourceContentType", "SourceDataGenerator", diff --git a/nautobot_netbox_importer/generator/base.py b/nautobot_netbox_importer/generator/base.py index dcbe5b50..edc39b90 100644 --- a/nautobot_netbox_importer/generator/base.py +++ b/nautobot_netbox_importer/generator/base.py @@ -3,7 +3,7 @@ import datetime import decimal from enum import Enum -from typing import Any, Mapping, Optional, Tuple, Type +from typing import Any, Iterable, Mapping, Optional, Tuple, Type from uuid import UUID, uuid5 from dateutil import parser as datetime_parser @@ -16,6 +16,7 @@ from django.db.models.options import Options as _DjangoModelMeta from nautobot.core.models import BaseModel from pydantic import Field as _PydanticField +from timezone_field import TimeZoneField from nautobot_netbox_importer.base import ContentTypeStr, Uid @@ -57,11 +58,12 @@ class InternalFieldType(Enum): TEXT_FIELD = "TextField" TREE_NODE_FOREIGN_KEY = "TreeNodeForeignKey" UUID_FIELD = "UUIDField" + TIMEZONE_FIELD = "TimeZoneField" StrToInternalFieldType = {item.value: item for item in InternalFieldType.__members__.values()} -INTERNAL_TYPE_TO_ANNOTATION: Mapping[InternalFieldType, type] = { +INTERNAL_TYPE_TO_ANNOTATION: Mapping[InternalFieldType, Any] = { InternalFieldType.AUTO_FIELD: int, InternalFieldType.BIG_AUTO_FIELD: int, InternalFieldType.BIG_INTEGER_FIELD: int, @@ -81,8 +83,46 @@ class InternalFieldType(Enum): InternalFieldType.SMALL_INTEGER_FIELD: int, InternalFieldType.TEXT_FIELD: str, InternalFieldType.UUID_FIELD: UUID, + InternalFieldType.TIMEZONE_FIELD: datetime.tzinfo, } +INTERNAL_AUTO_INC_TYPES: Iterable[InternalFieldType] = ( + InternalFieldType.AUTO_FIELD, + InternalFieldType.BIG_AUTO_FIELD, +) + +INTERNAL_DONT_IMPORT_TYPES: Iterable[InternalFieldType] = ( + InternalFieldType.NOT_FOUND, + InternalFieldType.PRIVATE_PROPERTY, + InternalFieldType.READ_ONLY_PROPERTY, +) + +INTERNAL_INTEGER_FIELDS: Iterable[InternalFieldType] = ( + InternalFieldType.AUTO_FIELD, + InternalFieldType.BIG_AUTO_FIELD, + InternalFieldType.BIG_INTEGER_FIELD, + InternalFieldType.INTEGER_FIELD, + InternalFieldType.POSITIVE_INTEGER_FIELD, + InternalFieldType.POSITIVE_SMALL_INTEGER_FIELD, + InternalFieldType.SMALL_INTEGER_FIELD, +) + +INTERNAL_REFERENCE_TYPES: Iterable[InternalFieldType] = ( + InternalFieldType.FOREIGN_KEY, + InternalFieldType.FOREIGN_KEY_WITH_AUTO_RELATED_NAME, + InternalFieldType.MANY_TO_MANY_FIELD, + InternalFieldType.ONE_TO_ONE_FIELD, + InternalFieldType.ROLE_FIELD, + InternalFieldType.STATUS_FIELD, + InternalFieldType.TREE_NODE_FOREIGN_KEY, +) + +INTERNAL_STRING_FIELDS: Iterable[InternalFieldType] = ( + InternalFieldType.CHAR_FIELD, + InternalFieldType.SLUG_FIELD, + InternalFieldType.TEXT_FIELD, +) + # Fields to auto add to source and target wrappers AUTO_ADD_FIELDS = ( "content_types", @@ -101,6 +141,7 @@ class InternalFieldType(Enum): ) +# pylint: disable=too-many-return-statements def get_nautobot_field_and_type( model: NautobotBaseModelType, field_name: str, @@ -125,6 +166,9 @@ def get_nautobot_field_and_type( if field_name == "_custom_field_data": return field, InternalFieldType.CUSTOM_FIELD_DATA + if isinstance(field, TimeZoneField): + return field, InternalFieldType.TIMEZONE_FIELD + try: return field, StrToInternalFieldType[field.get_internal_type()] except KeyError as error: diff --git a/nautobot_netbox_importer/generator/fields.py b/nautobot_netbox_importer/generator/fields.py index edaac635..0239de6d 100644 --- a/nautobot_netbox_importer/generator/fields.py +++ b/nautobot_netbox_importer/generator/fields.py @@ -3,7 +3,7 @@ from typing import Any, Dict, Optional from uuid import UUID -from .base import EMPTY_VALUES, ContentTypeStr, Uid +from .base import EMPTY_VALUES, INTERNAL_INTEGER_FIELDS, INTERNAL_STRING_FIELDS, ContentTypeStr, Uid from .nautobot import DiffSyncBaseModel from .source import ( FallbackValueIssue, @@ -11,7 +11,7 @@ ImporterPass, InternalFieldType, InvalidChoiceValueIssue, - PreImportResult, + PreImportRecordResult, RecordData, SourceAdapter, SourceContentType, @@ -21,6 +21,8 @@ SourceModelWrapper, ) +_AUTO_INCREMENTS = {} + def default(default_value: Any, nautobot_name: FieldName = "") -> SourceFieldDefinition: """Create a default field definition. @@ -107,7 +109,7 @@ def role( e.g., RackRole with `name = "Network"` and DeviceRole with `name = "Network"` to avoid duplicates. """ - def cache_roles(source: RecordData, importer_pass: ImporterPass) -> PreImportResult: + def cache_roles(source: RecordData, importer_pass: ImporterPass) -> PreImportRecordResult: if importer_pass == ImporterPass.DEFINE_STRUCTURE: name = source.get("name", "").capitalize() if not name: @@ -117,12 +119,12 @@ def cache_roles(source: RecordData, importer_pass: ImporterPass) -> PreImportRes if not uid: _ROLE_NAME_TO_UID_CACHE[name] = nautobot_uid - return PreImportResult.USE_RECORD + return PreImportRecordResult.USE_RECORD role_wrapper = adapter.configure_model( source_content_type, nautobot_content_type="extras.role", - pre_import=cache_roles, + pre_import_record=cache_roles, identifiers=("name",), fields={ # Include color to allow setting the default Nautobot value, import fails without it. @@ -248,3 +250,53 @@ def define_disable(field: SourceField) -> None: field.disable(reason) return define_disable + + +def auto_increment(prefix="", nautobot_name: FieldName = "") -> SourceFieldDefinition: + """Auto increment field value, if the source value is empty. + + Use to set the field value to a unique auto incremented value. + + Supports string and integer fields. + + Args: + prefix (str): Optional prefix to be added to the auto incremented value. Valid for string fields only. + nautobot_name (str): Optional name for the Nautobot field. + + Returns: + A function that defines an auto increment field importer. + """ + + def define_auto_increment(field: SourceField) -> None: + key = f"{field.wrapper.content_type}.{field.name}_prefix" + + original_importer = field.set_importer(nautobot_name=nautobot_name) + if not original_importer: + return + + if field.nautobot.internal_type in INTERNAL_INTEGER_FIELDS: + if prefix: + raise ValueError("Prefix is not supported for integer fields") + + if field.nautobot.internal_type not in INTERNAL_STRING_FIELDS: + raise ValueError(f"Field {field.name} is not a string or integer field") + + def auto_increment_importer(source: RecordData, target: DiffSyncBaseModel) -> None: + value = field.get_source_value(source) + if value not in EMPTY_VALUES: + original_importer(source, target) + return + + if key not in _AUTO_INCREMENTS: + _AUTO_INCREMENTS[key] = 0 + _AUTO_INCREMENTS[key] += 1 + + value = _AUTO_INCREMENTS[key] + if field.nautobot.internal_type in INTERNAL_STRING_FIELDS: + value = f"{prefix}{value}" + + field.set_nautobot_value(target, value) + + field.set_importer(auto_increment_importer, nautobot_name=nautobot_name, override=True) + + return define_auto_increment diff --git a/nautobot_netbox_importer/generator/nautobot.py b/nautobot_netbox_importer/generator/nautobot.py index a729de01..9146d182 100644 --- a/nautobot_netbox_importer/generator/nautobot.py +++ b/nautobot_netbox_importer/generator/nautobot.py @@ -22,6 +22,10 @@ from .base import ( AUTO_ADD_FIELDS, EMPTY_VALUES, + INTERNAL_AUTO_INC_TYPES, + INTERNAL_DONT_IMPORT_TYPES, + INTERNAL_INTEGER_FIELDS, + INTERNAL_REFERENCE_TYPES, INTERNAL_TYPE_TO_ANNOTATION, BaseAdapter, ContentTypeStr, @@ -39,38 +43,6 @@ ) from .exceptions import NautobotModelNotFound -_AUTO_INCREMENT_TYPES: Iterable[InternalFieldType] = ( - InternalFieldType.AUTO_FIELD, - InternalFieldType.BIG_AUTO_FIELD, -) - -_INTEGER_TYPES: Iterable[InternalFieldType] = ( - InternalFieldType.AUTO_FIELD, - InternalFieldType.BIG_AUTO_FIELD, - InternalFieldType.BIG_INTEGER_FIELD, - InternalFieldType.INTEGER_FIELD, - InternalFieldType.POSITIVE_INTEGER_FIELD, - InternalFieldType.POSITIVE_SMALL_INTEGER_FIELD, - InternalFieldType.SMALL_INTEGER_FIELD, -) - -_REFERENCE_TYPES: Iterable[InternalFieldType] = ( - InternalFieldType.FOREIGN_KEY, - InternalFieldType.FOREIGN_KEY_WITH_AUTO_RELATED_NAME, - InternalFieldType.MANY_TO_MANY_FIELD, - InternalFieldType.ONE_TO_ONE_FIELD, - InternalFieldType.ROLE_FIELD, - InternalFieldType.STATUS_FIELD, - InternalFieldType.TREE_NODE_FOREIGN_KEY, -) - -_DONT_IMPORT_TYPES: Iterable[InternalFieldType] = ( - InternalFieldType.NOT_FOUND, - InternalFieldType.PRIVATE_PROPERTY, - InternalFieldType.READ_ONLY_PROPERTY, -) - - # Helper to determine the import order of models. # Due to dependencies among Nautobot models, certain models must be imported first to ensure successful `instance.save()` calls without errors. # Models listed here take precedence over others, which are sorted by the order they're introduced to the importer. @@ -141,6 +113,15 @@ def tag_issues(self, summary: ImportSummary) -> None: for item in summary.nautobot: self.get_or_create_wrapper(item.content_type).tag_issues(item.issues) + def get_content_type_str(self, content_type_uid: Uid) -> ContentTypeStr: + """Find Nautobot content type string for a given UID.""" + content_type_wrapper = self.get_or_create_wrapper("contenttypes.contenttype") + instance = content_type_wrapper.find_or_create({"id": content_type_uid}) + if not instance: + raise ValueError(f"Content type {content_type_uid} not found") + + return f"{getattr(instance, 'app_label')}.{getattr(instance, 'model')}" + class NautobotField: """Wrapper for a Nautobot field.""" @@ -175,17 +156,17 @@ def related_meta(self) -> DjangoModelMeta: @property def is_reference(self) -> bool: """Check if the field is a reference.""" - return self.internal_type in _REFERENCE_TYPES + return self.internal_type in INTERNAL_REFERENCE_TYPES @property def is_integer(self) -> bool: """Check if the field is an integer.""" - return self.internal_type in _INTEGER_TYPES + return self.internal_type in INTERNAL_INTEGER_FIELDS @property def is_auto_increment(self) -> bool: """Check if the field is an integer.""" - return self.internal_type in _AUTO_INCREMENT_TYPES + return self.internal_type in INTERNAL_AUTO_INC_TYPES @property def is_content_type(self) -> bool: @@ -198,7 +179,7 @@ def is_content_type(self) -> bool: @property def can_import(self) -> bool: """Determine if this field can be imported.""" - return self.internal_type not in _DONT_IMPORT_TYPES + return self.internal_type not in INTERNAL_DONT_IMPORT_TYPES NautobotFields = MutableMapping[FieldName, NautobotField] @@ -329,6 +310,14 @@ def diffsync_class(self) -> Type["DiffSyncBaseModel"]: return result + @property + def content_type_id(self) -> Optional[int]: + """Get the Nautobot content type ID.""" + if self.disabled: + return None + + return self.content_type_instance.pk # type: ignore + def get_summary(self) -> NautobotModelSummary: """Get the summary.""" issues = sorted(self.get_importer_issues()) @@ -337,7 +326,7 @@ def get_summary(self) -> NautobotModelSummary: return NautobotModelSummary( content_type=self.content_type, - content_type_id=None if self.disabled else self.content_type_instance.pk, + content_type_id=self.content_type_id, stats=self.stats, issues=issues, flags=str(self.flags), @@ -543,7 +532,7 @@ def add_field(self, field_name: FieldName) -> NautobotField: nautobot_field, internal_type = get_nautobot_field_and_type(self.model, field_name) if ( - internal_type in _REFERENCE_TYPES + internal_type in INTERNAL_REFERENCE_TYPES and internal_type != InternalFieldType.MANY_TO_MANY_FIELD and not field_name.endswith("_id") ): diff --git a/nautobot_netbox_importer/generator/source.py b/nautobot_netbox_importer/generator/source.py index 3cca1714..4a5a39df 100644 --- a/nautobot_netbox_importer/generator/source.py +++ b/nautobot_netbox_importer/generator/source.py @@ -16,10 +16,12 @@ Optional, OrderedDict, Set, + Sized, Union, ) from uuid import UUID +import pytz from diffsync import DiffSyncModel from diffsync.enum import DiffSyncModelFlags from nautobot.core.models.tree_queries import TreeModel @@ -78,15 +80,6 @@ def __init__(self, message: str, field: "SourceField", issue_type=""): self.issue_type = issue_type -class UpdatedValueIssue(SourceFieldIssue): - """Raised when a value is updated.""" - - def __init__(self, field: "SourceField", source_value: Any, target_value: Any): - """Initialize the exception.""" - message = f"Value `{source_value}` updated to `{target_value}`" - super().__init__(message, field) - - class FallbackValueIssue(SourceFieldIssue): """Raised when a fallback value is used.""" @@ -130,7 +123,7 @@ class ImporterPass(Enum): IMPORT_DATA = 2 -class PreImportResult(Enum): +class PreImportRecordResult(Enum): """Pre Import Response.""" SKIP_RECORD = False @@ -148,12 +141,16 @@ class SourceFieldSource(Enum): IDENTIFIER = auto() # Fields used as identifiers -PreImport = Callable[[RecordData, ImporterPass], PreImportResult] +PreImportRecord = Callable[[RecordData, ImporterPass], PreImportRecordResult] +PostImportRecord = Callable[[RecordData, DiffSyncBaseModel], None] SourceDataGenerator = Callable[[], Iterable[SourceRecord]] SourceFieldImporter = Callable[[RecordData, DiffSyncBaseModel], None] GetPkFromData = Callable[[RecordData], Uid] SourceFieldImporterFallback = Callable[["SourceField", RecordData, DiffSyncBaseModel, Exception], None] SourceFieldImporterFactory = Callable[["SourceField"], None] +SourceReferences = Dict[Uid, Set["SourceModelWrapper"]] +ForwardReferences = Callable[["SourceModelWrapper", SourceReferences], None] +SourceContentType = Union[ContentTypeValue, "SourceModelWrapper", NautobotModelWrapper, NautobotBaseModelType] SourceFieldDefinition = Union[ None, # Ignore field FieldName, # Rename field @@ -161,13 +158,42 @@ class SourceFieldSource(Enum): ] -SourceReferences = Dict[Uid, Set["SourceModelWrapper"]] -ForwardReferences = Callable[["SourceModelWrapper", SourceReferences], None] -SourceContentType = Union[ContentTypeValue, "SourceModelWrapper", NautobotModelWrapper, NautobotBaseModelType] +class SourceAdapter(BaseAdapter): + """Source DiffSync Adapter for importing data into Nautobot. + This adapter manages the entire import process from external data sources to Nautobot, + including content type mapping, data transformation, and reference handling. It serves + as the primary engine for the import process, maintaining relationships between source + and target (Nautobot) models. -class SourceAdapter(BaseAdapter): - """Source DiffSync Adapter.""" + Attributes: + get_source_data (SourceDataGenerator): Function that generates source data records + for import, returning an iterable of SourceRecord objects. + + wrappers (OrderedDict): Ordered mapping of content type strings to SourceModelWrapper + objects that handle the adaptation of source data to Nautobot models. + + nautobot (NautobotAdapter): Adapter for interacting with Nautobot models and data. + + content_type_ids_mapping (Dict[int, SourceModelWrapper]): Maps numeric content type + IDs to their corresponding source model wrappers. + + summary (ImportSummary): Collects statistics and information about the import process. + + logger: Logger instance for reporting progress and issues. + + _content_types_back_mapping (Dict[ContentTypeStr, Optional[ContentTypeStr]]): + Maps from Nautobot content types back to source content types. When multiple + source types map to a single Nautobot type, the mapping is set to None. + + The adapter works in phases: + + 1. Model configuration - defining how source models map to Nautobot + 2. First pass - scanning source data to enhance defined structures + 3. Creation of importers - building functions to transform data + 4. Second pass - importing actual data + 5. Post-processing - handling references and finalizing imports + """ def __init__( self, @@ -188,9 +214,6 @@ def __init__( self.content_type_ids_mapping: Dict[int, SourceModelWrapper] = {} self.logger = logger or default_logger self.summary = ImportSummary() - - # From Nautobot to Source content type mapping - # When multiple source content types are mapped to the single nautobot content type, mapping is set to `None` self._content_types_back_mapping: Dict[ContentTypeStr, Optional[ContentTypeStr]] = {} # pylint: disable=too-many-arguments,too-many-branches,too-many-locals @@ -204,15 +227,104 @@ def configure_model( default_reference: Optional[RecordData] = None, flags: Optional[DiffSyncModelFlags] = None, nautobot_flags: Optional[DiffSyncModelFlags] = None, - pre_import: Optional[PreImport] = None, + pre_import_record: Optional[PreImportRecord] = None, + post_import_record: Optional[PostImportRecord] = None, disable_related_reference: Optional[bool] = None, forward_references: Optional[ForwardReferences] = None, fill_dummy_data: Optional[FillDummyData] = None, get_pk_from_data: Optional[GetPkFromData] = None, ) -> "SourceModelWrapper": - """Create if not exist and configure a wrapper for a given source content type. + """Create or configure a wrapper for a source content type. + + This method defines how a source data model maps to a Nautobot model during import, + establishing field mappings, identifiers, processing behaviors, and reference handling. + It serves as the primary configuration point for data adaptation between source and target. + + Args: + content_type (ContentTypeStr): String identifier for the source content type + (e.g., "dcim.device"). Will be converted to lowercase. This is the primary + identifier for the source model in the import process. + + nautobot_content_type (ContentTypeStr): Target Nautobot content type to map to. + If empty, defaults to the source content_type value. Will be converted to + lowercase. Use this when the source model name differs from Nautobot's model. + + extend_content_type (ContentTypeStr): Name of an existing source content type to + extend. When specified, the current model inherits the Nautobot content type + and behaviors from the extended model. Cannot be used with nautobot_content_type. + Useful for creating specialized versions of existing model mappings. + + identifiers (Optional[Iterable[FieldName]]): Collection of field names that uniquely + identify records in this model. Used to match source records with existing + Nautobot records. These fields serve as natural keys for record identification + when primary keys don't match between systems. + + fields (Optional[Mapping[FieldName, SourceFieldDefinition]]): Dictionary mapping + source field names to their definitions, which can be: + + - None: Ignore the field during import + - str: Rename the field to this name in Nautobot + - callable: Custom factory function to create field importer + + These mappings control exactly how each field is transformed during import. + + default_reference (Optional[RecordData]): Record data to use when this model is + referenced but no specific record is provided. Creates a default instance for + references that can be used as fallbacks for required relationships. + + flags (Optional[DiffSyncModelFlags]): DiffSync model flags controlling synchronization + behavior for the source model. These flags affect how changes are detected and + applied during the synchronization process. + + nautobot_flags (Optional[DiffSyncModelFlags]): DiffSync model flags for the target + Nautobot model. Allows different sync behavior between source and target models. + + pre_import_record (Optional[PreImportRecord]): Function called before importing each record. + + Signature: (data: RecordData, pass: ImporterPass) -> PreImportRecordResult + + Can return SKIP_RECORD to exclude records from import. Useful for filtering + or preprocessing data before import. + + post_import_record (Optional[PostImportRecord]): Function called after importing each record. + + Signature: (data: RecordData, model: DiffSyncBaseModel) -> None + + Enables custom post-processing of imported records, such as denormalization + or triggering additional operations. + + disable_related_reference (Optional[bool]): When True, prevents automatic content_type + field references from being created when this model is referenced by other models. + Useful for models that should not participate in automatic reference tracking. + + forward_references (ForwardReferences): Custom function to allow forwarding references to another instance. + + Signature: (wrapper: SourceModelWrapper, references: SourceReferences) -> None + + + fill_dummy_data (Optional[FillDummyData]): Function to populate dummy data for missing + required fields. + + Signature: (data: RecordData, suffix: str) -> None + + Used when creating placeholder objects to satisfy required relationships. + + get_pk_from_data (Optional[GetPkFromData]): Custom function to derive Nautobot's primary key + from source record data. + + Signature: (data: RecordData) -> Uid + + Overrides default primary key generation logic when source data has complex + or non-standard primary key representations. + + Returns: + SourceModelWrapper: The created or updated wrapper for the source content type, + ready for use in the import process with all specified configurations applied. - Create Nautobot content type wrapper as well. + Raises: + ValueError: If both nautobot_content_type and extend_content_type are specified, + or if a content type is already mapped to a different Nautobot content type. + These configurations are mutually exclusive. """ content_type = content_type.lower() nautobot_content_type = nautobot_content_type.lower() @@ -256,8 +368,10 @@ def configure_model( wrapper.flags = flags if nautobot_flags is not None: wrapper.nautobot.flags = nautobot_flags - if pre_import: - wrapper.pre_import = pre_import + if pre_import_record: + wrapper.pre_import_record = pre_import_record + if post_import_record: + wrapper.post_import_record = post_import_record if disable_related_reference is not None: wrapper.disable_related_reference = disable_related_reference if forward_references: @@ -313,7 +427,7 @@ def get_or_create_wrapper(self, value: Union[None, SourceContentType]) -> "Sourc if value not in self.content_type_ids_mapping: raise ValueError(f"Content type not found {value}") return self.content_type_ids_mapping[value] - elif isinstance(value, Iterable) and len(value) == 2: # noqa: PLR2004 + elif isinstance(value, Iterable) and isinstance(value, Sized) and len(value) == 2: # noqa: PLR2004 value = ".".join(value).lower() else: raise ValueError(f"Invalid content type {value}") @@ -348,7 +462,7 @@ def get_nautobot_content_type_uid(self, content_type: ContentTypeValue) -> int: def load(self) -> None: """Load data from the source.""" self.import_data() - self.post_import() + self.post_load() def import_data(self) -> None: """Import data from the source.""" @@ -378,9 +492,9 @@ def import_data(self) -> None: for content_type, data in get_source_data(): self.wrappers[content_type].second_pass(data) - def post_import(self) -> None: + def post_load(self) -> None: """Post import processing.""" - while any(wrapper.post_import() for wrapper in self.wrappers.values()): + while any(wrapper.post_process_references() for wrapper in self.wrappers.values()): pass for nautobot_wrapper in self.get_imported_nautobot_wrappers(): @@ -414,7 +528,74 @@ def get_imported_nautobot_wrappers(self) -> Generator[NautobotModelWrapper, None # pylint: disable=too-many-instance-attributes, too-many-public-methods class SourceModelWrapper: - """Definition of a source model mapping to Nautobot model.""" + """Definition of a source model mapping to Nautobot model. + + This class maintains the mapping between a source data model and its corresponding Nautobot model. + It handles field definitions, data transformations, importing records, and tracking references + between models. + + Attributes: + adapter (SourceAdapter): Parent adapter that manages this wrapper and coordinates the overall import process. + + content_type (ContentTypeStr): String identifier for source content type (e.g., "dcim.device"). + + nautobot (NautobotModelWrapper): Wrapper for the target Nautobot model, containing field definitions + and handling interactions with Nautobot's data model. + + identifiers (List[FieldName]): Field names used to uniquely identify records when primary keys don't + match between systems. Acts as natural keys for record identification. + + disable_reason (str): If non-empty, explains why this model is disabled for import. Disabled models + are skipped during the import process. + + disable_related_reference (bool): When True, prevents references processing. + + See `references` for more details. + + extends_wrapper (SourceModelWrapper): Another wrapper this one extends. When specified, this wrapper's + data will be merged into the extended wrapper's instances. + + references (SourceReferences): Cache all referencing content types for each imported instance of this model. + + Used to fill in Nautobot's `content_types` fields during post processing. + + forward_references (ForwardReferences): Custom function to allow forwarding references to another instance. + + See `references` for more details. + + fields (OrderedDict[FieldName, SourceField]): Field definitions for this model, mapping source + field names to their corresponding SourceField objects. + + importers (Set[SourceFieldImporter]): Collection of functions that import fields from source to target. + Each function handles the transformation of a specific field or set of related fields. + + flags (DiffSyncModelFlags): Flags controlling DiffSync behavior for this model, affecting + how changes are detected and applied during synchronization. + + default_reference_uid (Uid): UID for the default record used when referencing this model + but no specific record is provided. + + pre_import_record (PreImportRecord): Function called before importing each record that can + filter or preprocess records before import. + + post_import_record (PostImportRecord): Function called after importing each record that can + perform additional operations or validations. + + stats (SourceModelStats): Statistics tracking for import operations on this model, including + counts of created, imported, and cached records. + + _uid_to_pk_cache (Dict[Uid, Uid]): Cache mapping source UIDs to primary keys for quick lookup + during the import process. + + _cached_data (Dict[Uid, RecordData]): Cache of data records by UID, used for storing records + that might be referenced later but aren't directly imported. + + _fill_dummy_data (FillDummyData): Function to populate dummy data for missing required fields + when creating placeholder objects. + + _get_pk_from_data (GetPkFromData): Custom function to derive Nautobot primary keys from source + record data, allowing for custom key generation strategies. + """ def __init__(self, adapter: SourceAdapter, content_type: ContentTypeStr, nautobot_wrapper: NautobotModelWrapper): """Initialize the SourceModelWrapper.""" @@ -457,7 +638,8 @@ def __init__(self, adapter: SourceAdapter, content_type: ContentTypeStr, nautobo # Source fields defintions self.fields: OrderedDict[FieldName, SourceField] = OrderedDict() - self.pre_import: Optional[PreImport] = None + self.pre_import_record: Optional[PreImportRecord] = None + self.post_import_record: Optional[PostImportRecord] = None if self.disable_reason: self.adapter.logger.debug("Created disabled %s", self) @@ -503,8 +685,8 @@ def cache_record_uids(self, source: RecordData, nautobot_uid: Optional[Uid] = No def first_pass(self, data: RecordData) -> None: """Firts pass of data import.""" - if self.pre_import: - if self.pre_import(data, ImporterPass.DEFINE_STRUCTURE) != PreImportResult.USE_RECORD: + if self.pre_import_record: + if self.pre_import_record(data, ImporterPass.DEFINE_STRUCTURE) != PreImportRecordResult.USE_RECORD: self.stats.first_pass_skipped += 1 return @@ -521,14 +703,17 @@ def second_pass(self, data: RecordData) -> None: if self.disable_reason: return - if self.pre_import: - if self.pre_import(data, ImporterPass.IMPORT_DATA) != PreImportResult.USE_RECORD: + if self.pre_import_record: + if self.pre_import_record(data, ImporterPass.IMPORT_DATA) != PreImportRecordResult.USE_RECORD: self.stats.second_pass_skipped += 1 return self.stats.second_pass_used += 1 - self.import_record(data) + target = self.import_record(data) + + if self.post_import_record: + self.post_import_record(data, target) def get_summary(self, content_type_id) -> SourceModelSummary: """Get a summary of the model.""" @@ -543,10 +728,11 @@ def get_summary(self, content_type_id) -> SourceModelSummary: identifiers=self.identifiers, disable_related_reference=self.disable_related_reference, forward_references=self.forward_references and self.forward_references.__name__ or None, - pre_import=self.pre_import and self.pre_import.__name__ or None, + pre_import=self.pre_import_record and self.pre_import_record.__name__ or None, + post_import=self.post_import_record and self.post_import_record.__name__ or None, fields=sorted(fields, key=lambda field: field.name), flags=str(self.flags), - default_reference_uid=serialize_to_summary(self.default_reference_uid), + default_reference_uid=f"{serialize_to_summary(self.default_reference_uid)}", stats=self.stats, ) @@ -574,7 +760,7 @@ def disable_field(self, field_name: FieldName, reason: str) -> "SourceField": def format_field_name(self, name: FieldName) -> str: """Format a field name for logging.""" - return f"{self.content_type}->{name}" + return f"{self.content_type}.{name}" def add_field(self, name: FieldName, source: SourceFieldSource) -> "SourceField": """Add a field definition for a source field.""" @@ -586,6 +772,7 @@ def add_field(self, name: FieldName, source: SourceFieldSource) -> "SourceField" field = self.fields[name] field.sources.add(source) + return field def create_importers(self) -> None: @@ -629,7 +816,7 @@ def get_pk_from_uid(self, uid: Uid) -> Uid: result = self.extends_wrapper.get_pk_from_uid(uid) else: result = source_pk_to_uuid(self.content_type, uid) - self.nautobot.uid_to_source[str(result)] = f"{self.content_type}:{uid}" + self.nautobot.uid_to_source[f"{result}"] = f"{self.content_type}:{uid}" elif self.nautobot.pk_field.is_auto_increment: self.nautobot.last_id += 1 result = self.nautobot.last_id @@ -785,38 +972,31 @@ def cache_record(self, data: RecordData) -> Uid: return uid - def cache_dummy_object(self, suffix: str, data: Union[RecordData, None] = None) -> Uid: + def import_dummy_object(self, suffix: str, data: Union[RecordData, None] = None) -> DiffSyncBaseModel: """Create a dummy object for the given data.""" - uid = f"{DUMMY_UID}{suffix}" - nautobot_uid = self.get_pk_from_uid(uid) - - if nautobot_uid in self._cached_data: - return uid - if not data: data = {} if "id" not in data: - data["id"] = uid + data["id"] = f"{DUMMY_UID}{suffix}" if self.fill_dummy_data: self.fill_dummy_data(data, suffix) - self.cache_record(data) + result = self.import_record(data) self.nautobot.add_issue( "DummyObject", - message="Dummy object cached", - uid=nautobot_uid, - data=data, + message="Dummy object created", + diffsync_instance=result, ) - return uid + return result def set_default_reference(self, data: RecordData) -> None: """Set the default reference to this model.""" self.default_reference_uid = self.cache_record(data) - def post_import(self) -> bool: + def post_process_references(self) -> bool: """Post import processing. Assigns referenced content_types to referencing instances. @@ -845,7 +1025,7 @@ def post_import(self) -> bool: if target_content_types: target_content_types.update(content_types) else: - instance.content_types = content_types + instance.content_types = content_types # type: ignore self.adapter.update(instance) return True @@ -864,7 +1044,44 @@ def add_reference(self, related_wrapper: "SourceModelWrapper", uid: Uid) -> None # pylint: disable=too-many-public-methods class SourceField: - """Source Field.""" + """Represents a field in the source data model and manages its mapping to Nautobot. + + This class handles field definition, transformation, and importing between source + data and Nautobot models. It provides mechanisms for customizing field mappings, + setting importers for different field types, and handling validation issues. + + Attributes: + wrapper (SourceModelWrapper): Reference to the parent wrapper that manages + this field, providing context about the model it belongs to. + + name (FieldName): The name of this field in the source data model. + + definition (SourceFieldDefinition): How this field should be processed, can be: + - None: Field should be ignored + - str: Field should be renamed to this name in Nautobot + - callable: Factory function to create custom field importer + + sources (set): Set of SourceFieldSource enum values indicating the origin of + this field (AUTO, CACHE, DATA, CUSTOM, SIBLING, IDENTIFIER). + + processed (bool): Flag indicating whether this field has been processed during + the import pipeline. + + _nautobot (NautobotField): Reference to the corresponding Nautobot field wrapper + that this source field maps to. + + importer (SourceFieldImporter): Function that handles importing data from source + to target for this field. The specific function depends on the field type. + + default_value (Any): Default value to use when the field is missing in source data. + Often derived from the Nautobot model's field default. + + disable_reason (str): If non-empty, explains why this field is disabled for import. + Disabled fields are skipped during the import process. + + The SourceField acts as a bridge between source data formats and Nautobot's data model, + providing type conversion, validation, and relationship mapping capabilities. + """ def __init__(self, wrapper: SourceModelWrapper, name: FieldName, source: SourceFieldSource): """Initialize the SourceField.""" @@ -879,9 +1096,11 @@ def __init__(self, wrapper: SourceModelWrapper, name: FieldName, source: SourceF self.default_value: Any = None self.disable_reason: str = "" + wrapper.adapter.logger.debug("Creating %s", self) + def __str__(self) -> str: """Return a string representation of the field.""" - return self.wrapper.format_field_name(self.name) + return f"SourceField<{self.wrapper.format_field_name(self.name)}>" @property def nautobot(self) -> NautobotField: @@ -919,6 +1138,8 @@ def handle_sibling(self, sibling: Union["SourceField", FieldName], nautobot_name if isinstance(sibling, FieldName): sibling = self.wrapper.add_field(sibling, SourceFieldSource.SIBLING) + else: + sibling.sources.add(SourceFieldSource.SIBLING) sibling.set_nautobot_field(nautobot_name or self.nautobot.name) sibling.importer = self.importer @@ -1031,6 +1252,8 @@ def set_importer( self.set_m2m_importer() elif internal_type == InternalFieldType.STATUS_FIELD: self.set_status_importer() + elif internal_type == InternalFieldType.TIMEZONE_FIELD: + self.set_timezone_importer() elif self.nautobot.is_reference: self.set_relation_importer() elif getattr(self.nautobot.field, "choices", None): @@ -1295,3 +1518,14 @@ def status_importer(source: RecordData, target: DiffSyncBaseModel) -> None: self.wrapper.add_reference(status_wrapper, value) self.set_importer(status_importer) + + def set_timezone_importer(self) -> None: + """Set a timezone importer.""" + + def timezone_importer(source: RecordData, target: DiffSyncBaseModel) -> None: + value = source.get(self.name, None) + if value not in EMPTY_VALUES: + value = pytz.timezone(value) + self.set_nautobot_value(target, value) + + self.set_importer(timezone_importer) diff --git a/nautobot_netbox_importer/management/commands/import_netbox.py b/nautobot_netbox_importer/management/commands/import_netbox.py index 02a955d1..1980967a 100644 --- a/nautobot_netbox_importer/management/commands/import_netbox.py +++ b/nautobot_netbox_importer/management/commands/import_netbox.py @@ -2,9 +2,12 @@ from django.core.management import call_command from django.core.management.base import BaseCommand +from packaging.version import Version from nautobot_netbox_importer.diffsync.adapters import NetBoxAdapter, NetBoxImporterOptions +_DEFAULT_NETBOX_VERSION = str(NetBoxImporterOptions._field_defaults["netbox_version"]) + class Command(BaseCommand): """Implementation of import_netbox command.""" @@ -52,12 +55,24 @@ def add_arguments(self, parser): dest="tag_issues", help="Whether to tag Nautobot records with any importer issues.", ) + parser.add_argument( + "--deduplicate-prefixes", + action="store_true", + dest="deduplicate_prefixes", + help="Deduplicate `ipam.prefix` and `ipam.aggregate` from NetBox. `prefix` value will be unique. (default: False)", + ) parser.add_argument( "--fix-powerfeed-locations", action="store_true", dest="fix_powerfeed_locations", help="Fix panel location to match rack location based on powerfeed.", ) + parser.add_argument( + "--create-missing-cable-terminations", + action="store_true", + dest="create_missing_cable_terminations", + help="Create missing cable terminations as Nautobot requires both cable terminations to be defined to save cable instances.", + ) parser.add_argument( "--print-summary", action="store_true", @@ -87,14 +102,32 @@ def add_arguments(self, parser): dest="trace_issues", help="Show a detailed trace of issues originated from any `Exception` found during the import.", ) + parser.add_argument( + "--customizations", + dest="customizations", + help="Paths to a Python module containing customizations to apply during the import. (default: empty)", + ) + parser.add_argument( + "--netbox-version", + dest="netbox_version", + help=f"SemVer NetBox version string to use for the import. (default: '{_DEFAULT_NETBOX_VERSION}')", + default=_DEFAULT_NETBOX_VERSION, + ) def handle(self, json_file, **kwargs): # type: ignore """Handle execution of the import_netbox management command.""" call_command("migrate") + customizations = (kwargs.pop("customizations") or "").split(",") + netbox_version = Version(kwargs.pop("netbox_version", _DEFAULT_NETBOX_VERSION)) + # pylint: disable=protected-access keys = NetBoxImporterOptions._fields - options = NetBoxImporterOptions(**{key: value for key, value in kwargs.items() if key in keys}) + options = NetBoxImporterOptions( + **{key: value for key, value in kwargs.items() if key in keys}, + customizations=customizations, + netbox_version=netbox_version, + ) adapter = NetBoxAdapter(json_file, options) adapter.import_to_nautobot() diff --git a/nautobot_netbox_importer/summary.py b/nautobot_netbox_importer/summary.py index b6070223..3b2a10a9 100644 --- a/nautobot_netbox_importer/summary.py +++ b/nautobot_netbox_importer/summary.py @@ -14,6 +14,13 @@ "MissingParentLocation": re.compile(r"A Location of type .* must have a parent Location"), } +_TAG_EXPRESSIONS: Mapping[str, Pattern[str]] = { + "InvalidCircuit": re.compile("A circuit termination must attach to either a location or a provider network"), + "IncompatibleTerminationTypes": re.compile("Incompatible termination types"), + "InvalidPlatform": re.compile("assigned platform is limited to"), + "MissingParentLocation": re.compile(r"A Location of type .* must have a parent Location"), +} + class ImporterIssue(NamedTuple): """Represents an issue encountered during the import process. @@ -124,11 +131,12 @@ class SourceModelSummary(NamedTuple): identifiers: Optional[List[FieldName]] disable_related_reference: bool forward_references: Optional[str] - pre_import: Optional[str] fields: List[FieldSummary] flags: str default_reference_uid: Optional[Uid] stats: SourceModelStats + pre_import: str | None = None + post_import: str | None = None class NautobotModelSummary(NamedTuple): diff --git a/nautobot_netbox_importer/tests/fixtures/nautobot-v2.2/3.7.custom/input.json b/nautobot_netbox_importer/tests/fixtures/nautobot-v2.2/3.7.custom/input.json index 702173aa..755c9dd8 100644 --- a/nautobot_netbox_importer/tests/fixtures/nautobot-v2.2/3.7.custom/input.json +++ b/nautobot_netbox_importer/tests/fixtures/nautobot-v2.2/3.7.custom/input.json @@ -1132,6 +1132,25 @@ { "model": "dcim.cable", "pk": 1, + "fields": { + "created": "2020-12-30T00:00:00Z", + "last_updated": "2020-12-30T19:32:32.969Z", + "custom_field_data": {}, + "description": "", + "comments": "", + "type": "smf", + "status": "connected", + "tenant": null, + "label": "", + "color": "ffeb3b", + "length": null, + "length_unit": "", + "_abs_length": null + } +}, +{ + "model": "dcim.cable", + "pk": 2, "fields": { "created": "2020-12-30T00:00:00Z", "last_updated": "2020-12-30T19:32:32.279Z", @@ -1178,7 +1197,7 @@ "face": "", "status": "active", "airflow": "", - "primary_ip4": null, + "primary_ip4": 1, "primary_ip6": null, "oob_ip": null, "cluster": null, @@ -1250,6 +1269,50 @@ "inventory_item_count": 0 } }, +{ + "model": "dcim.interface", + "pk": 1, + "fields": { + "created": "2021-04-14T00:00:00Z", + "last_updated": "2021-04-14T17:36:01.841Z", + "custom_field_data": {}, + "device": 1, + "name": "GigabitEthernet0/0/0", + "label": "", + "description": "", + "module": null, + "cable": null, + "cable_end": null, + "mark_connected": false, + "_path": null, + "enabled": true, + "mac_address": null, + "mtu": null, + "mode": "", + "parent": null, + "bridge": null, + "_name": "0000000099999999GigabitEthernet000000............", + "lag": null, + "type": "1000base-x-sfp", + "mgmt_only": false, + "speed": null, + "duplex": null, + "wwn": null, + "rf_role": "", + "rf_channel": "", + "rf_channel_frequency": null, + "rf_channel_width": null, + "tx_power": null, + "poe_mode": "", + "poe_type": "", + "wireless_link": null, + "untagged_vlan": null, + "vrf": null, + "vdcs": [], + "wireless_lans": [], + "tagged_vlans": [] + } +}, { "model": "dcim.devicerole", "pk": 1, @@ -1429,7 +1492,7 @@ "group": null, "tenant": null, "facility": "", - "time_zone": null, + "time_zone": "US/Pacific", "physical_address": "", "shipping_address": "", "latitude": null, @@ -2723,7 +2786,7 @@ "changed_object_id": 1, "related_object_type": null, "related_object_id": null, - "object_repr": "127.0.0.0/8", + "object_repr": "10.0.0.0/8", "prechange_data": null, "postchange_data": { "vrf": null, @@ -2731,7 +2794,7 @@ "site": null, "tags": [], "vlan": null, - "prefix": "127.0.0.0/8", + "prefix": "10.0.0.0/8", "status": "active", "tenant": null, "created": "2024-02-21T12:42:11.589Z", @@ -2762,7 +2825,7 @@ "changed_object_id": 1, "related_object_type": null, "related_object_id": null, - "object_repr": "127.0.0.1/8", + "object_repr": "10.0.0.1/8", "prechange_data": null, "postchange_data": { "vrf": null, @@ -2770,7 +2833,7 @@ "tags": [], "status": "active", "tenant": null, - "address": "127.0.0.1/8", + "address": "10.0.0.1/8", "created": "2024-02-21T12:20:21.970Z", "comments": "", "dns_name": "", @@ -2992,11 +3055,72 @@ "custom_field_data": {}, "description": "", "comments": "", - "address": "127.0.0.1/8", + "address": "10.0.0.1/8", "vrf": null, "tenant": null, "status": "active", "role": "loopback", + "nat_inside": null, + "dns_name": "" + } +}, +{ + "model": "ipam.ipaddress", + "pk": 2, + "fields": { + "created": "2024-02-21T12:20:21.970Z", + "last_updated": "2024-02-21T12:41:44.081Z", + "custom_field_data": {}, + "description": "", + "comments": "", + "address": "10.0.0.1/8", + "vrf": null, + "tenant": null, + "status": "active", + "role": "loopback", + "assigned_object_type": [ + "dcim", + "interface" + ], + "assigned_object_id": 1, + "nat_inside": null, + "dns_name": "" + } +}, +{ + "model": "ipam.ipaddress", + "pk": 3, + "fields": { + "created": "2024-02-21T12:20:21.970Z", + "last_updated": "2024-02-21T12:41:44.081Z", + "custom_field_data": {}, + "description": "", + "comments": "", + "address": "10.0.0.1/24", + "vrf": null, + "tenant": null, + "status": "active", + "role": "primary", + "assigned_object_type": null, + "assigned_object_id": null, + "nat_inside": null, + "dns_name": "" + } +}, +{ + "model": "ipam.ipaddress", + "pk": 4, + "fields": { + "created": "2024-02-21T12:20:21.970Z", + "last_updated": "2024-02-21T12:41:44.081Z", + "custom_field_data": {}, + "description": "", + "comments": "", + "address": "10.0.0.1/16", + "vrf": null, + "tenant": null, + "status": "active", + "role": "primary", "assigned_object_type": null, "assigned_object_id": null, "nat_inside": null, @@ -3012,7 +3136,29 @@ "custom_field_data": {}, "description": "", "comments": "", - "prefix": "127.0.0.0/8", + "prefix": "10.1.2.0/24", + "site": null, + "vrf": null, + "tenant": null, + "vlan": null, + "status": "active", + "role": null, + "is_pool": false, + "mark_utilized": false, + "_depth": 0, + "_children": 0 + } +}, +{ + "model": "ipam.prefix", + "pk": 2, + "fields": { + "created": "2024-02-21T12:42:11.589Z", + "last_updated": "2024-02-21T12:42:11.589Z", + "custom_field_data": {}, + "description": "", + "comments": "", + "prefix": "10.1.2.0/24", "site": null, "vrf": null, "tenant": null, @@ -3025,6 +3171,43 @@ "_children": 0 } }, +{ + "model": "ipam.prefix", + "pk": 3, + "fields": { + "created": "2024-02-21T12:42:11.589Z", + "last_updated": "2024-02-21T12:42:11.589Z", + "custom_field_data": {}, + "description": "", + "comments": "", + "prefix": "10.0.0.0/8", + "site": null, + "vrf": null, + "tenant": null, + "vlan": null, + "status": "active", + "role": null, + "is_pool": false, + "mark_utilized": false, + "_depth": 0, + "_children": 0 + } +}, +{ + "model": "ipam.aggregate", + "pk": 1, + "fields": { + "created": "2019-05-01T00:00:00Z", + "last_updated": "2019-05-01T14:35:04.385Z", + "custom_field_data": {}, + "description": "", + "comments": "", + "prefix": "10.1.2.0/24", + "rir": null, + "tenant": null, + "date_added": "2019-05-01" + } +}, { "model": "tenancy.tenant", "pk": 1, @@ -3240,5 +3423,298 @@ ] ] } +}, +{ + "model": "circuits.provider", + "pk": 1, + "fields": { + "created": "2020-12-30T00:00:00Z", + "last_updated": "2020-12-30T18:47:38.415Z", + "custom_field_data": {}, + "description": "", + "comments": "", + "name": "Level 3", + "slug": "level-3", + "asns": [] + } +}, +{ + "model": "circuits.circuittype", + "pk": 1, + "fields": { + "created": "2020-12-30T00:00:00Z", + "last_updated": "2020-12-30T18:49:05.484Z", + "custom_field_data": {}, + "name": "MPLS", + "slug": "mpls", + "description": "", + "color": "" + } +}, +{ + "model": "circuits.circuit", + "pk": 1, + "fields": { + "created": "2020-12-30T00:00:00Z", + "last_updated": "2023-12-29T15:45:04.504Z", + "custom_field_data": {}, + "description": "", + "comments": "", + "cid": "KKDG4923", + "provider": 1, + "provider_account": null, + "type": 1, + "status": "active", + "tenant": 5, + "install_date": null, + "termination_date": null, + "commit_rate": null, + "termination_a": 1, + "termination_z": 2 + } +}, +{ + "model": "circuits.circuittermination", + "pk": 1, + "fields": { + "created": "2021-04-14T00:00:00Z", + "last_updated": "2021-04-14T17:36:14.032Z", + "custom_field_data": {}, + "cable": 1, + "cable_end": "A", + "mark_connected": false, + "circuit": 1, + "term_side": "Z", + "site": 1, + "provider_network": null, + "port_speed": null, + "upstream_speed": null, + "xconnect_id": "", + "pp_info": "", + "description": "" + } +}, +{ + "model": "circuits.circuittermination", + "pk": 2, + "fields": { + "created": "2021-04-19T00:00:00Z", + "last_updated": "2021-04-19T16:06:48.802Z", + "custom_field_data": {}, + "cable": null, + "cable_end": "", + "mark_connected": false, + "circuit": 1, + "term_side": "A", + "site": null, + "provider_network": null, + "port_speed": null, + "upstream_speed": null, + "xconnect_id": "", + "pp_info": "", + "description": "" + } +}, +{ + "model": "dcim.cabletermination", + "pk": 1, + "fields": { + "created": null, + "last_updated": null, + "cable": 1, + "cable_end": "A", + "termination_type": [ + "circuits", + "circuittermination" + ], + "termination_id": 1, + "_device": null, + "_rack": null, + "_location": null, + "_site": 1 + } +}, +{ + "model": "dcim.cable", + "pk": 3, + "fields": { + "created": "2022-05-17T00:00:00Z", + "last_updated": "2022-05-17T13:46:00.947Z", + "custom_field_data": {}, + "description": "", + "comments": "", + "type": "mmf-om4", + "status": "connected", + "tenant": null, + "label": "", + "color": "ff66ff", + "length": null, + "length_unit": "", + "_abs_length": null + } +}, +{ + "model": "dcim.cabletermination", + "pk": 2, + "fields": { + "created": "2023-11-03T10:51:45.092Z", + "last_updated": "2023-11-03T10:51:45.338Z", + "cable": 3, + "cable_end": "A", + "termination_type": [ + "dcim", + "frontport" + ], + "termination_id": 1, + "_device": 3, + "_rack": 2, + "_location": 1, + "_site": 1 + } +}, +{ + "model": "dcim.frontport", + "pk": 1, + "fields": { + "created": "2022-05-16T00:00:00Z", + "last_updated": "2022-05-17T13:46:00.948Z", + "custom_field_data": {}, + "device": 3, + "name": "2", + "_name": "00000002", + "label": "", + "description": "", + "module": null, + "cable": 3, + "cable_end": "A", + "mark_connected": false, + "type": "lc", + "color": "", + "rear_port": 1, + "rear_port_position": 2 + } +}, +{ + "model": "dcim.device", + "pk": 3, + "fields": { + "created": "2022-05-16T00:00:00Z", + "last_updated": "2022-05-17T14:05:57.005Z", + "custom_field_data": {}, + "description": "", + "comments": "", + "local_context_data": null, + "config_template": null, + "device_type": 1, + "role": 1, + "tenant": null, + "platform": null, + "name": "DEVICE 3", + "_name": "DEVICE-3-R00000005-U00000042-00000002", + "serial": "", + "asset_tag": null, + "site": 1, + "location": 1, + "rack": 2, + "position": null, + "face": "", + "status": "active", + "airflow": "", + "primary_ip4": null, + "primary_ip6": null, + "oob_ip": null, + "cluster": null, + "virtual_chassis": null, + "vc_position": null, + "vc_priority": null, + "latitude": null, + "longitude": null, + "console_port_count": 0, + "console_server_port_count": 0, + "power_port_count": 0, + "power_outlet_count": 0, + "interface_count": 0, + "front_port_count": 12, + "rear_port_count": 1, + "device_bay_count": 0, + "module_bay_count": 0, + "inventory_item_count": 0 + } +}, +{ + "model": "dcim.rack", + "pk": 2, + "fields": { + "created": "2021-12-17T00:00:00Z", + "last_updated": "2024-01-09T18:58:42.654Z", + "custom_field_data": {}, + "description": "AR3150 NetShelter SX42U", + "comments": "", + "weight": null, + "weight_unit": "", + "_abs_weight": null, + "name": "Rack 2", + "_name": "Rack 00000002", + "facility_id": null, + "site": 1, + "location": 1, + "tenant": null, + "status": "active", + "role": null, + "serial": "", + "asset_tag": "635", + "type": "4-post-cabinet", + "width": 19, + "u_height": 42, + "starting_unit": 1, + "desc_units": false, + "outer_width": 750, + "outer_depth": 1070, + "outer_unit": "mm", + "max_weight": null, + "_abs_max_weight": null, + "mounting_depth": null + } +}, +{ + "model": "dcim.rearport", + "pk": 1, + "fields": { + "created": "2022-05-16T00:00:00Z", + "last_updated": "2022-06-01T10:38:20.527Z", + "custom_field_data": {}, + "device": 3, + "name": "RearPort1", + "_name": "RearPort00000001", + "label": "", + "description": "", + "module": null, + "cable": null, + "cable_end": null, + "mark_connected": false, + "type": "8p8c", + "color": "", + "positions": 24 + } +}, +{ + "model": "dcim.location", + "pk": 1, + "fields": { + "created": "2019-02-28T00:00:00Z", + "last_updated": "2019-03-04T15:09:40.876Z", + "custom_field_data": {}, + "parent": null, + "name": "Location 1", + "slug": "location-1", + "description": "", + "site": 1, + "status": "active", + "tenant": null, + "lft": 1, + "rght": 2, + "tree_id": 21, + "level": 0 + } } ] diff --git a/nautobot_netbox_importer/tests/fixtures/nautobot-v2.2/3.7.custom/samples/dcim.device.json b/nautobot_netbox_importer/tests/fixtures/nautobot-v2.2/3.7.custom/samples/dcim.device.json index 50cbf548..78ee7ac3 100644 --- a/nautobot_netbox_importer/tests/fixtures/nautobot-v2.2/3.7.custom/samples/dcim.device.json +++ b/nautobot_netbox_importer/tests/fixtures/nautobot-v2.2/3.7.custom/samples/dcim.device.json @@ -63,7 +63,7 @@ "rack": "87cd3a0d-c203-5af5-8e51-f402bb23bc00", "position": null, "face": "", - "primary_ip4": null, + "primary_ip4": "a2da6287-203a-5496-88c6-4fd8283630f7", "primary_ip6": null, "cluster": null, "virtual_chassis": null, diff --git a/nautobot_netbox_importer/tests/fixtures/nautobot-v2.2/3.7.custom/samples/dcim.interface.json b/nautobot_netbox_importer/tests/fixtures/nautobot-v2.2/3.7.custom/samples/dcim.interface.json new file mode 100644 index 00000000..b142fc1e --- /dev/null +++ b/nautobot_netbox_importer/tests/fixtures/nautobot-v2.2/3.7.custom/samples/dcim.interface.json @@ -0,0 +1,33 @@ +[ +{ + "model": "dcim.interface", + "pk": "d06b2a9d-572f-5757-a770-af17f798f4e9", + "fields": { + "created": "2021-04-14T00:00:00Z", + "last_updated": "2025-03-25T06:46:25.863Z", + "_custom_field_data": {}, + "device": "f510c5ff-e1f8-5adc-9fb5-30bc5791b42e", + "name": "GigabitEthernet0/0/0", + "label": "", + "description": "", + "cable": null, + "_cable_peer_type": null, + "_cable_peer_id": null, + "_path": null, + "status": "623acb70-22ce-5f69-bd7e-76bc921aa6c4", + "enabled": true, + "mac_address": null, + "mtu": null, + "mode": "", + "parent_interface": null, + "bridge": null, + "_name": "0000000099999999GigabitEthernet000000............", + "lag": null, + "type": "1000base-x-sfp", + "mgmt_only": false, + "untagged_vlan": null, + "vrf": null, + "tagged_vlans": [] + } +} +] diff --git a/nautobot_netbox_importer/tests/fixtures/nautobot-v2.2/3.7.custom/samples/dcim.location.json b/nautobot_netbox_importer/tests/fixtures/nautobot-v2.2/3.7.custom/samples/dcim.location.json index 0616e9d2..f2fd264f 100644 --- a/nautobot_netbox_importer/tests/fixtures/nautobot-v2.2/3.7.custom/samples/dcim.location.json +++ b/nautobot_netbox_importer/tests/fixtures/nautobot-v2.2/3.7.custom/samples/dcim.location.json @@ -15,7 +15,7 @@ "description": "", "facility": "", "asn": null, - "time_zone": null, + "time_zone": "US/Pacific", "physical_address": "", "shipping_address": "", "latitude": null, diff --git a/nautobot_netbox_importer/tests/fixtures/nautobot-v2.2/3.7.custom/samples/extras.status.json b/nautobot_netbox_importer/tests/fixtures/nautobot-v2.2/3.7.custom/samples/extras.status.json index 83455947..fda95484 100644 --- a/nautobot_netbox_importer/tests/fixtures/nautobot-v2.2/3.7.custom/samples/extras.status.json +++ b/nautobot_netbox_importer/tests/fixtures/nautobot-v2.2/3.7.custom/samples/extras.status.json @@ -10,6 +10,7 @@ "color": "9e9e9e", "description": "", "content_types": [ + 32, 33 ] } diff --git a/nautobot_netbox_importer/tests/fixtures/nautobot-v2.2/3.7.custom/samples/ipam.ipaddress.json b/nautobot_netbox_importer/tests/fixtures/nautobot-v2.2/3.7.custom/samples/ipam.ipaddress.json index b7b22c78..a1d6aa7b 100644 --- a/nautobot_netbox_importer/tests/fixtures/nautobot-v2.2/3.7.custom/samples/ipam.ipaddress.json +++ b/nautobot_netbox_importer/tests/fixtures/nautobot-v2.2/3.7.custom/samples/ipam.ipaddress.json @@ -1,17 +1,17 @@ [ { "model": "ipam.ipaddress", - "pk": "1fa230ed-bcba-5ac9-9749-e14c66564e01", + "pk": "a2da6287-203a-5496-88c6-4fd8283630f7", "fields": { "created": "2024-02-21T12:20:21.970Z", - "last_updated": "2024-02-21T13:57:15.708Z", + "last_updated": "2025-03-26T10:46:41.589Z", "_custom_field_data": {}, - "host": "127.0.0.1", - "mask_length": 8, + "host": "10.0.0.1", + "mask_length": 16, "type": "host", "status": "fb37b4d1-40ae-5d11-b8cd-46d3f9c25c14", - "role": "e38863d3-cb3d-5cc0-aef2-7844b8288b1d", - "parent": "99d61314-b42c-5725-8c17-90a3261db527", + "role": "42895040-37c3-5bcb-8326-c22198fe6dfd", + "parent": "015c4836-67b9-571d-aeb7-bb26b3438b92", "ip_version": 4, "tenant": null, "nat_inside": null, diff --git a/nautobot_netbox_importer/tests/fixtures/nautobot-v2.2/3.7.custom/samples/ipam.ipaddresstointerface.json b/nautobot_netbox_importer/tests/fixtures/nautobot-v2.2/3.7.custom/samples/ipam.ipaddresstointerface.json new file mode 100644 index 00000000..04b562ef --- /dev/null +++ b/nautobot_netbox_importer/tests/fixtures/nautobot-v2.2/3.7.custom/samples/ipam.ipaddresstointerface.json @@ -0,0 +1,18 @@ +[ +{ + "model": "ipam.ipaddresstointerface", + "pk": "8c6f920c-121e-5cae-aa39-01b93a4866c2", + "fields": { + "ip_address": "a2da6287-203a-5496-88c6-4fd8283630f7", + "interface": "d06b2a9d-572f-5757-a770-af17f798f4e9", + "vm_interface": null, + "is_source": false, + "is_destination": false, + "is_default": false, + "is_preferred": false, + "is_primary": false, + "is_secondary": false, + "is_standby": false + } +} +] diff --git a/nautobot_netbox_importer/tests/fixtures/nautobot-v2.2/3.7.custom/samples/ipam.prefix.json b/nautobot_netbox_importer/tests/fixtures/nautobot-v2.2/3.7.custom/samples/ipam.prefix.json index 38dbfa5e..35ee55c0 100644 --- a/nautobot_netbox_importer/tests/fixtures/nautobot-v2.2/3.7.custom/samples/ipam.prefix.json +++ b/nautobot_netbox_importer/tests/fixtures/nautobot-v2.2/3.7.custom/samples/ipam.prefix.json @@ -1,13 +1,36 @@ [ { "model": "ipam.prefix", - "pk": "99d61314-b42c-5725-8c17-90a3261db527", + "pk": "015c4836-67b9-571d-aeb7-bb26b3438b92", + "fields": { + "created": "2025-03-26T10:46:41.554Z", + "last_updated": "2025-03-26T10:46:41.554Z", + "_custom_field_data": {}, + "network": "10.0.0.0", + "broadcast": "10.0.0.255", + "prefix_length": 24, + "type": "network", + "status": "76782d4e-25fa-5add-8129-3a4540d09ad1", + "role": null, + "parent": "c87d7e7b-0e83-537c-bb74-b5517b3fa00d", + "ip_version": 4, + "namespace": "26756c2d-fddd-4128-9f88-dbcbddcbef45", + "tenant": null, + "vlan": null, + "rir": null, + "date_allocated": null, + "description": "" + } +}, +{ + "model": "ipam.prefix", + "pk": "56a49e57-e4e5-5c63-9e9c-188044a2aae0", "fields": { "created": "2024-02-21T12:42:11.589Z", - "last_updated": "2024-05-14T14:04:23.413Z", + "last_updated": "2025-03-26T10:46:41.566Z", "_custom_field_data": {}, - "network": "127.0.0.0", - "broadcast": "127.255.255.255", + "network": "10.0.0.0", + "broadcast": "10.255.255.255", "prefix_length": 8, "type": "network", "status": "fb37b4d1-40ae-5d11-b8cd-46d3f9c25c14", @@ -21,5 +44,28 @@ "date_allocated": null, "description": "" } +}, +{ + "model": "ipam.prefix", + "pk": "c87d7e7b-0e83-537c-bb74-b5517b3fa00d", + "fields": { + "created": "2025-03-26T10:46:41.560Z", + "last_updated": "2025-03-26T10:46:41.560Z", + "_custom_field_data": {}, + "network": "10.0.0.0", + "broadcast": "10.0.255.255", + "prefix_length": 16, + "type": "network", + "status": "76782d4e-25fa-5add-8129-3a4540d09ad1", + "role": null, + "parent": "56a49e57-e4e5-5c63-9e9c-188044a2aae0", + "ip_version": 4, + "namespace": "26756c2d-fddd-4128-9f88-dbcbddcbef45", + "tenant": null, + "vlan": null, + "rir": null, + "date_allocated": null, + "description": "" + } } ] diff --git a/netbox-checker/netbox_checker/__init__.py b/netbox-checker/netbox_checker/__init__.py new file mode 100644 index 00000000..27d1b181 --- /dev/null +++ b/netbox-checker/netbox_checker/__init__.py @@ -0,0 +1,7 @@ +"""NetBox Checker script base module.""" + +from .checker import check_netbox + +__all__ = [ + "check_netbox", +] diff --git a/netbox-checker/netbox_checker/checker.py b/netbox-checker/netbox_checker/checker.py new file mode 100644 index 00000000..41dd82ab --- /dev/null +++ b/netbox-checker/netbox_checker/checker.py @@ -0,0 +1,52 @@ +"""NetBox check script to validate data before importing to Nautobot.""" + +import sys +from os import getenv + +from django.apps import apps +from django.urls import reverse + +_SETTINGS = { + "max_outputs": 0, + "base_url": "", +} + + +def _print_issues(message: str, queryset): + """Print issues to the console.""" + for obj in queryset: + _SETTINGS["max_outputs"] -= 1 + if _SETTINGS["max_outputs"] < 0: + sys.exit(0) + + print(f"{message} {obj.id} {obj}") + + app_label = obj._meta.app_label + model_name = obj._meta.model_name + url = reverse(f"{app_label}:{model_name}", kwargs={"pk": obj.id}) + print(f"{_SETTINGS['base_url']}{url}") + + print(100 * "-") + + +def _find_missing_cables(side: str): + """Check for cables missing termination side `A` or `B`.""" + Cable = apps.get_model("dcim", "Cable") + CableTermination = apps.get_model("dcim", "CableTermination") + + cables_missing = Cable.objects.exclude(id__in=CableTermination.objects.filter(cable_end="A").values("cable_id")) + + if cables_missing.count(): + _print_issues(f"Missing cable termination on side {side}:", cables_missing) + + +def check_netbox( + max_outputs=int(getenv("NETBOX_CHECKER_MAX_OUTPUTS") or 10), + base_url=getenv("NETBOX_CHECKER_BASE_URL", "http://localhost:8000"), +): + """Main function.""" + _SETTINGS["max_outputs"] = max_outputs + _SETTINGS["base_url"] = base_url + + _find_missing_cables("A") + _find_missing_cables("B") diff --git a/netbox-checker/pyproject.toml b/netbox-checker/pyproject.toml new file mode 100644 index 00000000..0b71a799 --- /dev/null +++ b/netbox-checker/pyproject.toml @@ -0,0 +1,12 @@ +[project] +name = "netbox-checker" +version = "0.1.0" +description = "Simple tool to check the status of a NetBox instance" +requires-python = ">=3.8" +dependencies = [ + "requests", +] + +[build-system] +requires = ["setuptools>=61.0"] +build-backend = "setuptools.build_meta" diff --git a/netbox-checker/run.sh b/netbox-checker/run.sh new file mode 100755 index 00000000..4715c99a --- /dev/null +++ b/netbox-checker/run.sh @@ -0,0 +1,30 @@ +#!/bin/bash +# +# Usage with `netbox-community/netbox-docker` +# +# ./run.sh + +set -e + +if [[ $1 == "inside-docker" ]]; then + echo "Running inside container" + + pip install -e /netbox-checker + + /opt/netbox/netbox/manage.py nbshell \ + --command='from netbox_checker import check_netbox; check_netbox()' +else + echo "Running docker compose" + + export NETBOX_CHECKER_PATH="$PWD" + export NETBOX_DOCKER_PATH=${1:-$NETBOX_CHECKER_PATH/../netbox-docker} + + cd "$NETBOX_DOCKER_PATH" + + docker compose run \ + --user=root \ + --env NETBOX_CHECKER_MAX_OUTPUTS \ + --env NETBOX_CHECKER_BASE_URL \ + --volume "$NETBOX_CHECKER_PATH:/netbox-checker" \ + netbox /netbox-checker/run.sh inside-docker +fi diff --git a/poetry.lock b/poetry.lock index 89a0b0be..a90bc44e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. [[package]] name = "amqp" @@ -2980,13 +2980,13 @@ postgresql = ["psycopg2"] [[package]] name = "pytz" -version = "2024.2" +version = "2025.2" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ - {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, - {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, + {file = "pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00"}, + {file = "pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3"}, ] [[package]] @@ -4013,4 +4013,4 @@ all = [] [metadata] lock-version = "2.0" python-versions = ">=3.8,<3.13" -content-hash = "a9c8020b3821f792f4c50329bdacfcfbae9271d8a3701ff1a4987fe2763ca149" +content-hash = "b70d5e980941402b63815c0de5bfeb63b1544c30af3624e94a119a125090e942" diff --git a/pyproject.toml b/pyproject.toml index 6678d683..fc6536eb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -37,6 +37,8 @@ Markdown = "!=3.3.5" pydantic = "^1.10.2" python-dateutil = "^2.8.2" ijson = "^3.2.3" +pytz = "^2025.1" +netaddr = ">=0.10.1" [tool.poetry.group.dev.dependencies] coverage = "*" diff --git a/tasks.py b/tasks.py index d14954f0..004feedf 100644 --- a/tasks.py +++ b/tasks.py @@ -641,12 +641,16 @@ def import_db(context, db_name="", input_file="dump.sql"): @task( help={ "db-name": "Database name to backup (default: Nautobot database)", + "format": "Database dump format (default: `sql`)", "output-file": "Ouput file, overwrite if exists (default: `dump.sql`)", "readable": "Flag to dump database data in more readable format (default: `True`)", } ) -def backup_db(context, db_name="", output_file="dump.sql", readable=True): +def backup_db(context, db_name="", format="sql", output_file="", readable=True): """Dump database into `output_file` file from `db` container.""" + if not output_file: + output_file = f"dump.{format}" + start(context, "db") _await_healthy_service(context, "db") @@ -665,6 +669,7 @@ def backup_db(context, db_name="", output_file="dump.sql", readable=True): "pg_dump", "--username=$POSTGRES_USER", f"--dbname={db_name or '$POSTGRES_DB'}", + f"--format={format}", "--inserts" if readable else "", ] else: @@ -1103,63 +1108,89 @@ def validate_app_config(context): @task( help={ "file": "URL or path to the JSON file to import.", - "bypass-data-validation": "Bypass as much of Nautobot's internal data validation logic as possible, allowing the import of data from NetBox that would be rejected as invalid if entered as-is through the GUI or REST API. USE WITH CAUTION: it is generally more desirable to *take note* of any data validation errors, *correct* the invalid data in NetBox, and *re-import* with the corrected data! (default: False)", "demo-version": "Version of the demo data to import from `https://github.com/netbox-community/netbox-demo-data/json` instead of using the `--file` option (default: empty).", + "test-input": "Version of the test data to import from `nautobot_netbox_importer/tests/fixtures/nautobot-v` instead of using the `--file` option (default: empty).", + "bypass-data-validation": "Bypass as much of Nautobot's internal data validation logic as possible, allowing the import of data from NetBox that would be rejected as invalid if entered as-is through the GUI or REST API. USE WITH CAUTION: it is generally more desirable to *take note* of any data validation errors, *correct* the invalid data in NetBox, and *re-import* with the corrected data! (default: False)", + "create-missing-cable-terminations": "Create missing cable terminations as Nautobot requires both cable terminations to be defined to save cable instances.", + "customizations": "Path to a Python module containing customizations to apply during the import. (default: empty)", + "deduplicate-prefixes": "Deduplicate `ipam.prefix` and `ipam.aggregate` from NetBox. `prefix` value will be unique. (default: False)", "dry-run": "Do not write any data to the database. (default: False)", "fix-powerfeed-locations": "Fix panel location to match rack location based on powerfeed. (default: False)", "print-summary": "Show a summary of the import. (default: True)", "save-json-summary-path": "File path to write the JSON mapping to. (default: generated-mappings.json)", "save-text-summary-path": "File path to write the text mapping to. (default: generated-mappings.txt)", "sitegroup-parent-always-region": "When importing `dcim.sitegroup` to `dcim.locationtype`, always set the parent of a site group, to be a `Region` location type. This is a workaround to fix validation errors `'A Location of type Location may only have a Location of the same type as its parent.'`. (default: False)", - "update-paths": "Call management command `trace_paths` to update paths after the import. (default: False)", - "unrack-zero-uheight-devices": "Cleans the `position` field in `dcim.device` instances with `u_height == 0`. (default: True)", "tag-issues": "Whether to tag Nautobot records with any importer issues. (default: False)", "trace-issues": "Show a detailed trace of issues originated from any `Exception` found during the import.", + "unrack-zero-uheight-devices": "Cleans the `position` field in `dcim.device` instances with `u_height == 0`. (default: True)", + "update-paths": "Call management command `trace_paths` to update paths after the import. (default: False)", } ) def import_netbox( # noqa: PLR0913 context, file="", demo_version="", - save_json_summary_path="", - save_text_summary_path="", + test_input="", bypass_data_validation=False, + create_missing_cable_terminations=False, + customizations="", + deduplicate_prefixes=False, dry_run=True, fix_powerfeed_locations=False, - sitegroup_parent_always_region=False, print_summary=True, - update_paths=False, - unrack_zero_uheight_devices=True, + save_json_summary_path="", + save_text_summary_path="", + sitegroup_parent_always_region=False, tag_issues=False, trace_issues=False, + unrack_zero_uheight_devices=True, + update_paths=False, ): """Import NetBox data into Nautobot.""" - if demo_version: - if file: - raise ValueError("Cannot specify both, `file` and `demo` arguments") + if sum(bool(x) for x in [file, demo_version, test_input]) > 1: + raise ValueError("Cannot specify more than one of `file`, `demo`, or `test_input` arguments") + if demo_version: file = ( "https://raw.githubusercontent.com/netbox-community/netbox-demo-data/master/json/netbox-demo-v" + demo_version + ".json" ) + if test_input: + if is_truthy(context.nautobot_netbox_importer.local): + path = Path(__file__).parent + else: + path = Path("/source") + + path = path / f"nautobot_netbox_importer/tests/fixtures/nautobot-v{test_input}" + + file = path / "input.json" + + if not save_json_summary_path: + save_json_summary_path = path / "summary.json" + if not save_text_summary_path: + save_text_summary_path = path / "summary.json" + command = [ "nautobot-server", "import_netbox", - f"--save-json-summary-path={save_json_summary_path}" if save_json_summary_path else "", - f"--save-text-summary-path={save_text_summary_path}" if save_text_summary_path else "", "--bypass-data-validation" if bypass_data_validation else "", + "--create-missing-cable-terminations" if create_missing_cable_terminations else "", + f"--customizations={customizations}" if customizations else "", + "--deduplicate-prefixes" if deduplicate_prefixes else "", "--dry-run" if dry_run else "", "--fix-powerfeed-locations" if fix_powerfeed_locations else "", - "--sitegroup-parent-always-region" if sitegroup_parent_always_region else "", - "--print-summary" if print_summary else "", - "--update-paths" if update_paths else "", "--no-color", - "" if unrack_zero_uheight_devices else "--no-unrack-zero-uheight-devices", + "--print-summary" if print_summary else "", + "--sitegroup-parent-always-region" if sitegroup_parent_always_region else "", + f"--save-json-summary-path={save_json_summary_path}" if save_json_summary_path else "", + f"--save-text-summary-path={save_text_summary_path}" if save_text_summary_path else "", "--tag-issues" if tag_issues else "", "--trace-issues" if trace_issues else "", - file, + "" if unrack_zero_uheight_devices else "--no-unrack-zero-uheight-devices", + "--update-paths" if update_paths else "", + f"{file}", ] run_command(context, " ".join(command))