diff --git a/changes/427.housekeeping b/changes/427.housekeeping new file mode 100644 index 000000000..9679c24c4 --- /dev/null +++ b/changes/427.housekeeping @@ -0,0 +1 @@ +Created release for 2.6.0 \ No newline at end of file diff --git a/development/run_example_job.py b/development/run_example_job.py new file mode 100644 index 000000000..ebbeabb6b --- /dev/null +++ b/development/run_example_job.py @@ -0,0 +1,89 @@ +"""Executes a job locally for testing purposes. + +To run this script use the following command: + +``` +invoke nbshell \ + --plain \ + --file development/run_example_job.py \ + --env RUN_SSOT_TARGET_JOB=False \ + --env RUN_SSOT_JOB_DRY_RUN=True +``` + +Passing environment variables to the script is optional. The script will default to running the data source job with a dry run enabled. +""" + +import json +import os + +from django.core.management import call_command +from nautobot.core.settings_funcs import is_truthy +from nautobot.extras.choices import SecretsGroupAccessTypeChoices +from nautobot.extras.choices import SecretsGroupSecretTypeChoices +from nautobot.extras.models import ExternalIntegration +from nautobot.extras.models import Job +from nautobot.extras.models import Secret +from nautobot.extras.models import SecretsGroup +from nautobot.extras.models import SecretsGroupAssociation + +_TOKEN = 40 * "a" +os.environ["NAUTOBOT_DEMO_TOKEN"] = _TOKEN + +_NAUTOBOT_DEMO_URL = "https://demo.nautobot.com" +_DRY_RUN = is_truthy(os.getenv("RUN_SSOT_JOB_DRY_RUN", "True")) + +module_name = "nautobot_ssot.jobs.examples" +is_target_job = is_truthy(os.getenv("RUN_SSOT_TARGET_JOB", "False")) +job_class_name = "ExampleDataTarget" if is_target_job else "ExampleDataSource" + +job = Job.objects.get(module_name=module_name, job_class_name=job_class_name) +if not job.enabled: + job.enabled = True + job.validated_save() + +nautobot_demo, created = ExternalIntegration.objects.get_or_create( + name="Nautobot Demo", + defaults={ + "remote_url": _NAUTOBOT_DEMO_URL, + "verify_ssl": False, + }, +) + +if created: + secret = Secret.objects.create( + name="nautobot-demo-token", + provider="environment-variable", + parameters={"variable": "NAUTOBOT_DEMO_TOKEN"}, + ) + secrets_group = SecretsGroup.objects.create(name="Nautobot Demo Group") + SecretsGroupAssociation.objects.create( + secret=secret, + secrets_group=secrets_group, + access_type=SecretsGroupAccessTypeChoices.TYPE_HTTP, + secret_type=SecretsGroupSecretTypeChoices.TYPE_TOKEN, + ) + nautobot_demo.secrets_group = secrets_group + nautobot_demo.validated_save() + +data: dict = { + "debug": True, + "dryrun": _DRY_RUN, + "memory_profiling": False, +} + +if is_target_job: + data["target"] = str(nautobot_demo.pk) + data["target_url"] = _NAUTOBOT_DEMO_URL + data["target_token"] = _TOKEN +else: + data["source"] = str(nautobot_demo.pk) + data["source_url"] = _NAUTOBOT_DEMO_URL + data["source_token"] = _TOKEN + +call_command( + "runjob", + f"{module_name}.{job_class_name}", + data=json.dumps(data), + username="admin", + local=True, # Enable to run the job locally (not as a celery task) +) diff --git a/docs/admin/install.md b/docs/admin/install.md index 4c39dce42..4d85a3575 100644 --- a/docs/admin/install.md +++ b/docs/admin/install.md @@ -79,9 +79,9 @@ sudo systemctl restart nautobot nautobot-worker nautobot-scheduler The app behavior can be controlled with the following list of settings: -| Key | Example | Default | Description | -| ------------------- | ------- | ------- | ---------------------------------------------------------- | -| `hide_example_jobs` | `True` | `False` | A boolean to represent whether or display the example job. | +| Key | Example | Default | Description | +| ------------------- | ------- | ------- | ----------------------------------------------------------------- | +| `hide_example_jobs` | `True` | `False` | A boolean to represent whether or not to display the example job. | ## Integrations Configuration diff --git a/docs/admin/release_notes/version_1.6.md b/docs/admin/release_notes/version_1.6.md index 5528dc0f1..7e5087f17 100644 --- a/docs/admin/release_notes/version_1.6.md +++ b/docs/admin/release_notes/version_1.6.md @@ -17,3 +17,58 @@ - [161](https://github.com/nautobot/nautobot-app-ssot/pull/161) - Reverts ChatOps dependency removal by @snaselj - [213](https://github.com/nautobot/nautobot-app-ssot/pull/213) - fix: :bug: Several fixes in the ACI integration by @chadell - [205](https://github.com/nautobot/nautobot-app-ssot/pull/205) - Migrate PR #164 from Arista Child Repo by @qduk + +## v1.6.1 - 2024-02-21 + +## Fixed + +- [243](https://github.com/nautobot/nautobot-app-ssot/pull/243) - Fix Infoblox import_subnet for ltm-1.6 by @jdrew82 +- [261](https://github.com/nautobot/nautobot-app-ssot/pull/261) - Fix Device42 documentation. by @jdrew82 + +## Changed + +- [245](https://github.com/nautobot/nautobot-app-ssot/pull/245) - IPFabric integration settings updates by @alhogan + +- [357](https://github.com/nautobot/nautobot-app-ssot/pull/357) - Backport contrib changes to LTM by @Kircheneer +- [361](https://github.com/nautobot/nautobot-app-ssot/pull/361) - Backport of #350 by @Kircheneer +- [363](https://github.com/nautobot/nautobot-app-ssot/pull/363) - Backport #362 by @Kircheneer + +## v1.6.2 - 2024-03-12 + +## Fixed + +- [386](https://github.com/nautobot/nautobot-app-ssot/pull/386) - Fixes bug in backport of contrib custom relationship handling + +## Changed + +- [386](https://github.com/nautobot/nautobot-app-ssot/pull/386) - Improves error handling in contrib (backport of #374) +- [373](https://github.com/nautobot/nautobot-app-ssot/pull/373) - Change contrib.NautobotModel.get_from_db to use a PK (backport of #371) + +## v1.6.3 - 2024-03-20 + +## Fixed + +- [396](https://github.com/nautobot/nautobot-app-ssot/pull/396) - Fix custom one-to-many relationships (backport of #393) +- [396](https://github.com/nautobot/nautobot-app-ssot/pull/396) - Use `typing.get_args` in favor of accessing `__args__` directly (backport of #390) +- [396](https://github.com/nautobot/nautobot-app-ssot/pull/396) - Fixed issue with generic relationships and `NautobotAdapter.load` (backport of #388) +- [396](https://github.com/nautobot/nautobot-app-ssot/pull/396) - Allow foreign keys inside of many to many relationships (backport of #377) + +## Housekeeping + +- Replicate module and test module structure for contrib code in LTM branch + +## v1.6.4 - 2024-04-16 + +## Fixed + +- [243](https://github.com/nautobot/nautobot-app-ssot/pull/243) - Fix Infoblox import_subnet for ltm-1.6 by @jdrew82 +- [261](https://github.com/nautobot/nautobot-app-ssot/pull/261) - Fix Device42 documentation. by @jdrew82 +- [419](https://github.com/nautobot/nautobot-app-ssot/pull/419) - Fix Device42 Plugin Settings for LTM by @jdrew82 + +## Changed + +- [245](https://github.com/nautobot/nautobot-app-ssot/pull/245) - IPFabric integration settings updates by @alhogan +- [357](https://github.com/nautobot/nautobot-app-ssot/pull/357) - backport contrib changes to LTM by @Kircheneer +- [361](https://github.com/nautobot/nautobot-app-ssot/pull/361) - Backport of #350 by @Kircheneer +- [363](https://github.com/nautobot/nautobot-app-ssot/pull/363) - Backport #362 by @Kircheneer +- [373](https://github.com/nautobot/nautobot-app-ssot/pull/373) - change contrib.NautobotModel.get_from_db to use a PK by @Kircheneer diff --git a/docs/admin/release_notes/version_2.6.md b/docs/admin/release_notes/version_2.6.md new file mode 100644 index 000000000..2f7cc8b84 --- /dev/null +++ b/docs/admin/release_notes/version_2.6.md @@ -0,0 +1,25 @@ + +# v2.6 Release Notes + +## [v2.6.0 (2024-04-16)](https://github.com/nautobot/nautobot-app-ssot/releases/tag/v2.6.0) + +### Added + +- [#367](https://github.com/nautobot/nautobot-app-ssot/issues/367) - Added support of Roles, Platforms, Manufacturers, DeviceTypes, and Devices to example Jobs. + +### Changed + +- [#398](https://github.com/nautobot/nautobot-app-ssot/issues/398) - Changed Arista Cloud Vision jobs to optionally use ExternalIntegration. +- [#414](https://github.com/nautobot/nautobot-app-ssot/issues/414) - Changed IPFabric interface media matching to fall back on interface names. + +### Fixed + +- [#367](https://github.com/nautobot/nautobot-app-ssot/issues/367) - Fixed issues with example Jobs. +- [#407](https://github.com/nautobot/nautobot-app-ssot/issues/407) - Fixed logic check for 'hide_example_jobs' when defined, and also set to False. +- [#409](https://github.com/nautobot/nautobot-app-ssot/issues/409) - Fixed tagging and custom field updates for Nautobot objects synced to/from Infoblox. +- [#413](https://github.com/nautobot/nautobot-app-ssot/issues/413) - Fixed method of retrieving objects from IPFabric's technology categories. + +### Housekeeping + +- [#418](https://github.com/nautobot/nautobot-app-ssot/issues/418) - Unpins multiple dependencies. +- [#421](https://github.com/nautobot/nautobot-app-ssot/issues/421) - Opened prometheus-client dependency range and removed direct drf-spectacular dependency. diff --git a/mkdocs.yml b/mkdocs.yml index 145b2ab3e..ded5b3db3 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -132,6 +132,7 @@ nav: - Compatibility Matrix: "admin/compatibility_matrix.md" - Release Notes: - "admin/release_notes/index.md" + - v2.6: "admin/release_notes/version_2.6.md" - v2.5: "admin/release_notes/version_2.5.md" - v2.4: "admin/release_notes/version_2.4.md" - v2.3: "admin/release_notes/version_2.3.md" diff --git a/nautobot_ssot/__init__.py b/nautobot_ssot/__init__.py index aa1a5c41f..27bc4bc1c 100644 --- a/nautobot_ssot/__init__.py +++ b/nautobot_ssot/__init__.py @@ -74,6 +74,7 @@ class NautobotSSOTAppConfig(NautobotAppConfig): "aristacv_from_cloudvision_default_site": "", "aristacv_hostname_patterns": [], "aristacv_import_active": False, + "aristacv_external_integration_name": "", "aristacv_role_mappings": {}, "aristacv_site_mappings": {}, "aristacv_verify": True, diff --git a/nautobot_ssot/integrations/aristacv/constant.py b/nautobot_ssot/integrations/aristacv/constants.py similarity index 88% rename from nautobot_ssot/integrations/aristacv/constant.py rename to nautobot_ssot/integrations/aristacv/constants.py index 5a30f014a..876a26988 100644 --- a/nautobot_ssot/integrations/aristacv/constant.py +++ b/nautobot_ssot/integrations/aristacv/constants.py @@ -1,14 +1,18 @@ """Storage of data that will not change throughout the life cycle of the application.""" -from django.conf import settings - - -def _read_settings() -> dict: - config = settings.PLUGINS_CONFIG["nautobot_ssot"] - return config - - -APP_SETTINGS = _read_settings() +ARISTA_PLATFORM = "arista.eos.eos" +CLOUDVISION_PLATFORM = "Arista EOS-CloudVision" +DEFAULT_APPLY_IMPORT_TAG = False +DEFAULT_CREATE_CONTROLLER = False +DEFAULT_CVAAS_URL = "https://www.arista.io" +DEFAULT_DELETE_DEVICES_ON_SYNC = False +DEFAULT_DEVICE_ROLE = "network" +DEFAULT_DEVICE_ROLE_COLOR = "ff0000" +DEFAULT_DEVICE_STATUS = "cloudvision_imported" +DEFAULT_DEVICE_STATUS_COLOR = "ff0000" +DEFAULT_IMPORT_ACTIVE = False +DEFAULT_SITE = "cloudvision_imported" +DEFAULT_VERIFY_SSL = True PORT_TYPE_MAP = { "xcvr1000BaseT": "1000base-t", @@ -83,7 +87,3 @@ def _read_settings() -> dict: "400GBASE-2FR4": "400gbase-x-osfp", "400GBASE-ZR": "400gbase-x-qsfpdd", } - -CLOUDVISION_PLATFORM = "Arista EOS-CloudVision" - -ARISTA_PLATFORM = "arista.eos.eos" diff --git a/nautobot_ssot/integrations/aristacv/diffsync/adapters/cloudvision.py b/nautobot_ssot/integrations/aristacv/diffsync/adapters/cloudvision.py index 40c460f2e..074772bae 100644 --- a/nautobot_ssot/integrations/aristacv/diffsync/adapters/cloudvision.py +++ b/nautobot_ssot/integrations/aristacv/diffsync/adapters/cloudvision.py @@ -7,7 +7,6 @@ from diffsync import DiffSync from diffsync.exceptions import ObjectAlreadyExists, ObjectNotFound -from nautobot_ssot.integrations.aristacv.constant import APP_SETTINGS from nautobot_ssot.integrations.aristacv.diffsync.models.cloudvision import ( CloudvisionCustomField, CloudvisionDevice, @@ -17,6 +16,7 @@ CloudvisionIPAddress, CloudvisionIPAssignment, ) +from nautobot_ssot.integrations.aristacv.types import CloudVisionAppConfig from nautobot_ssot.integrations.aristacv.utils import cloudvision @@ -41,8 +41,13 @@ def __init__(self, *args, job=None, conn: cloudvision.CloudvisionApi, **kwargs): def load_devices(self): """Load devices from CloudVision.""" - if APP_SETTINGS.get("aristacv_create_controller"): - cvp_version = cloudvision.get_cvp_version() + config: CloudVisionAppConfig = self.job.app_config + if config.hostname_patterns and not (config.site_mappings and config.role_mappings): + self.job.logger.warning( + "Configuration found for aristacv_hostname_patterns but no aristacv_site_mappings or aristacv_role_mappings. Please ensure your mappings are defined." + ) + if config.create_controller: + cvp_version = cloudvision.get_cvp_version(config) cvp_ver_cf = self.cf(name="arista_eos", value=cvp_version, device_name="CloudVision") try: self.add(cvp_ver_cf) @@ -258,10 +263,4 @@ def load_device_tags(self, device): def load(self): """Load devices and associated data from CloudVision.""" - if APP_SETTINGS.get("aristacv_hostname_patterns") and not ( - APP_SETTINGS.get("aristacv_site_mappings") and APP_SETTINGS.get("aristacv_role_mappings") - ): - self.job.logger.warning( - "Configuration found for aristacv_hostname_patterns but no aristacv_site_mappings or aristacv_role_mappings. Please ensure your mappings are defined." - ) self.load_devices() diff --git a/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py b/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py index 8cc12e337..5e994a549 100644 --- a/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py +++ b/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py @@ -11,7 +11,6 @@ from diffsync import DiffSync from diffsync.exceptions import ObjectNotFound, ObjectAlreadyExists -from nautobot_ssot.integrations.aristacv.constant import APP_SETTINGS from nautobot_ssot.integrations.aristacv.diffsync.models.nautobot import ( NautobotDevice, NautobotCustomField, @@ -21,6 +20,7 @@ NautobotIPAssignment, NautobotPort, ) +from nautobot_ssot.integrations.aristacv.types import CloudVisionAppConfig from nautobot_ssot.integrations.aristacv.utils import nautobot @@ -166,8 +166,9 @@ def sync_complete(self, source: DiffSync, *args, **kwargs): self.job.logger.warning(f"Deletion failed for protected object: {nautobot_object}. {err}") self.objects_to_delete[grouping] = [] + config: CloudVisionAppConfig = self.job.app_config # type: ignore # if Controller is created we need to ensure all imported Devices have RelationshipAssociation to it. - if APP_SETTINGS.get("aristacv_create_controller"): + if config.create_controller: self.job.logger.info("Creating Relationships between CloudVision and connected Devices.") controller_relation = OrmRelationship.objects.get(label="Controller -> Device") device_ct = ContentType.objects.get_for_model(OrmDevice) diff --git a/nautobot_ssot/integrations/aristacv/diffsync/models/cloudvision.py b/nautobot_ssot/integrations/aristacv/diffsync/models/cloudvision.py index 015e8f5c0..5763c2556 100644 --- a/nautobot_ssot/integrations/aristacv/diffsync/models/cloudvision.py +++ b/nautobot_ssot/integrations/aristacv/diffsync/models/cloudvision.py @@ -1,5 +1,4 @@ -"""Cloudvision DiffSync models for AristaCV SSoT.""" -from nautobot_ssot.integrations.aristacv.constant import APP_SETTINGS +"""CloudVision DiffSync models for AristaCV SSoT.""" from nautobot_ssot.integrations.aristacv.diffsync.models.base import ( Device, CustomField, @@ -9,11 +8,12 @@ IPAssignment, Port, ) +from nautobot_ssot.integrations.aristacv.types import CloudVisionAppConfig from nautobot_ssot.integrations.aristacv.utils.cloudvision import CloudvisionApi class CloudvisionDevice(Device): - """Cloudvision Device model.""" + """CloudVision Device model.""" @classmethod def create(cls, diffsync, ids, attrs): @@ -30,7 +30,7 @@ def delete(self): class CloudvisionPort(Port): - """Cloudvision Port model.""" + """CloudVision Port model.""" @classmethod def create(cls, diffsync, ids, attrs): @@ -47,7 +47,7 @@ def delete(self): class CloudvisionNamespace(Namespace): - """Cloudvision Namespace model.""" + """CloudVision Namespace model.""" @classmethod def create(cls, diffsync, ids, attrs): @@ -67,7 +67,7 @@ def delete(self): class CloudvisionPrefix(Prefix): - """Cloudvision IPAdress model.""" + """CloudVision IPAdress model.""" @classmethod def create(cls, diffsync, ids, attrs): @@ -87,7 +87,7 @@ def delete(self): class CloudvisionIPAddress(IPAddress): - """Cloudvision IPAdress model.""" + """CloudVision IPAdress model.""" @classmethod def create(cls, diffsync, ids, attrs): @@ -107,7 +107,7 @@ def delete(self): class CloudvisionIPAssignment(IPAssignment): - """Cloudvision IPAssignment model.""" + """CloudVision IPAssignment model.""" @classmethod def create(cls, diffsync, ids, attrs): @@ -127,24 +127,19 @@ def delete(self): class CloudvisionCustomField(CustomField): - """Cloudvision CustomField model.""" + """CloudVision CustomField model.""" @staticmethod - def connect_cvp(): - """Connect to Cloudvision gRPC endpoint.""" - return CloudvisionApi( - cvp_host=APP_SETTINGS["aristacv_cvp_host"], - cvp_port=APP_SETTINGS.get("aristacv_cvp_port", "8443"), - verify=APP_SETTINGS["aristacv_verify"], - username=APP_SETTINGS["aristacv_cvp_user"], - password=APP_SETTINGS["aristacv_cvp_password"], - cvp_token=APP_SETTINGS["aristacv_cvp_token"], - ) + def connect_cvp(config: CloudVisionAppConfig): + """Connect to CloudVision gRPC endpoint.""" + return CloudvisionApi(config) @classmethod def create(cls, diffsync, ids, attrs): """Create a user tag in cvp.""" - cvp = cls.connect_cvp() + config: CloudVisionAppConfig = diffsync.job.app_config # type: ignore + # TBD: Isn't this a performance bottleneck? We are connecting to CVP for each operation. + cvp = cls.connect_cvp(config) cvp.create_tag(ids["name"], attrs["value"]) # Create mapping from device_name to CloudVision device_id device_ids = {dev["hostname"]: dev["device_id"] for dev in cvp.get_devices()} @@ -159,7 +154,9 @@ def create(cls, diffsync, ids, attrs): def update(self, attrs): """Update user tag in cvp.""" - cvp = self.connect_cvp() + config: CloudVisionAppConfig = self.diffsync.job.app_config # type: ignore + # TBD: Isn't this a performance bottleneck? We are connecting to CVP for each operation. + cvp = self.connect_cvp(config) remove = set(self.device_name) - set(attrs["devices"]) add = set(attrs["devices"]) - set(self.device_name) # Create mapping from device_name to CloudVision device_id @@ -180,7 +177,9 @@ def update(self, attrs): def delete(self): """Delete user tag applied to devices in cvp.""" - cvp = self.connect_cvp() + config: CloudVisionAppConfig = self.diffsync.job.app_config # type: ignore + # TBD: Isn't this performance bottleneck? We are connecting to CVP for each operation. + cvp = self.connect_cvp(config) device_ids = {dev["hostname"]: dev["device_id"] for dev in cvp.get_devices()} for device in self.device_name: cvp.remove_tag_from_device(device_ids[device], self.name, self.value) diff --git a/nautobot_ssot/integrations/aristacv/diffsync/models/nautobot.py b/nautobot_ssot/integrations/aristacv/diffsync/models/nautobot.py index a67708862..8ce5c20ac 100644 --- a/nautobot_ssot/integrations/aristacv/diffsync/models/nautobot.py +++ b/nautobot_ssot/integrations/aristacv/diffsync/models/nautobot.py @@ -14,10 +14,10 @@ from nautobot.ipam.models import IPAddressToInterface import distutils -from nautobot_ssot.integrations.aristacv.constant import ( - APP_SETTINGS, +from nautobot_ssot.integrations.aristacv.constants import ( ARISTA_PLATFORM, CLOUDVISION_PLATFORM, + DEFAULT_DEVICE_ROLE_COLOR, ) from nautobot_ssot.integrations.aristacv.diffsync.models.base import ( Device, @@ -28,6 +28,7 @@ Port, Prefix, ) +from nautobot_ssot.integrations.aristacv.types import CloudVisionAppConfig from nautobot_ssot.integrations.aristacv.utils import nautobot try: @@ -38,15 +39,6 @@ print("Device Lifecycle app isn't installed so will revert to CustomField for OS version.") LIFECYCLE_MGMT = False - -# TODO: Move to constant.py -DEFAULT_SITE = "cloudvision_imported" -DEFAULT_DEVICE_ROLE = "network" -DEFAULT_DEVICE_ROLE_COLOR = "ff0000" -DEFAULT_DEVICE_STATUS = "cloudvision_imported" -DEFAULT_DEVICE_STATUS_COLOR = "ff0000" -DEFAULT_DELETE_DEVICES_ON_SYNC = False -APPLY_IMPORT_TAG = False MISSING_CUSTOM_FIELDS = [] @@ -56,40 +48,35 @@ class NautobotDevice(Device): @classmethod def create(cls, diffsync, ids, attrs): """Create device object in Nautobot.""" - site_code, role_code = nautobot.parse_hostname(ids["name"].lower()) - site_map = APP_SETTINGS.get("aristacv_site_mappings") - role_map = APP_SETTINGS.get("aristacv_role_mappings") + config: CloudVisionAppConfig = diffsync.job.app_config # type: ignore + site_code, role_code = nautobot.parse_hostname(ids["name"].lower(), config.hostname_patterns) + site_map = config.site_mappings + role_map = config.role_mappings if site_code and site_code in site_map: site = nautobot.verify_site(site_map[site_code]) elif "CloudVision" in ids["name"]: - if APP_SETTINGS.get("aristacv_controller_site"): - site = nautobot.verify_site(APP_SETTINGS["aristacv_controller_site"]) + if config.controller_site: + site = nautobot.verify_site(config.controller_site) else: site = nautobot.verify_site("CloudVision") else: - site = nautobot.verify_site(APP_SETTINGS.get("aristacv_from_cloudvision_default_site", DEFAULT_SITE)) + site = nautobot.verify_site(config.from_cloudvision_default_site) if role_code and role_code in role_map: role = nautobot.verify_device_role_object( role_map[role_code], - APP_SETTINGS.get( - "aristacv_from_cloudvision_default_device_role_color", - DEFAULT_DEVICE_ROLE_COLOR, - ), + config.from_cloudvision_default_device_role_color, ) elif "CloudVision" in ids["name"]: role = nautobot.verify_device_role_object("Controller", DEFAULT_DEVICE_ROLE_COLOR) else: role = nautobot.verify_device_role_object( - APP_SETTINGS.get("aristacv_from_cloudvision_default_device_role", DEFAULT_DEVICE_ROLE), - APP_SETTINGS.get( - "aristacv_from_cloudvision_default_device_role_color", - DEFAULT_DEVICE_ROLE_COLOR, - ), + config.from_cloudvision_default_device_role, + config.from_cloudvision_default_device_role_color, ) - if APP_SETTINGS.get("aristacv_create_controller") and "CloudVision" in ids["name"]: + if config.create_controller and "CloudVision" in ids["name"]: platform = OrmPlatform.objects.get(name=CLOUDVISION_PLATFORM) else: platform = OrmPlatform.objects.get(name=ARISTA_PLATFORM) @@ -106,7 +93,7 @@ def create(cls, diffsync, ids, attrs): serial=attrs["serial"] if attrs.get("serial") else "", ) - if APP_SETTINGS.get("aristacv_apply_import_tag", APPLY_IMPORT_TAG): + if config.apply_import_tag: import_tag = nautobot.verify_import_tag() new_device.tags.add(import_tag) try: @@ -143,7 +130,8 @@ def update(self, attrs): def delete(self): """Delete device object in Nautobot.""" - if APP_SETTINGS.get("aristacv_delete_devices_on_sync", DEFAULT_DELETE_DEVICES_ON_SYNC): + config: CloudVisionAppConfig = self.diffsync.job.app_config # type: ignore + if config.delete_devices_on_sync: self.diffsync.job.logger.warning(f"Device {self.name} will be deleted per app settings.") device = OrmDevice.objects.get(id=self.uuid) self.diffsync.objects_to_delete["devices"].append(device) @@ -242,7 +230,8 @@ def update(self, attrs): def delete(self): """Delete Interface in Nautobot.""" - if APP_SETTINGS.get("aristacv_delete_devices_on_sync"): + config: CloudVisionAppConfig = self.diffsync.job.app_config # type: ignore + if config.delete_devices_on_sync: super().delete() if self.diffsync.job.debug: self.diffsync.job.logger.warning(f"Interface {self.name} for {self.device} will be deleted.") diff --git a/nautobot_ssot/integrations/aristacv/jobs.py b/nautobot_ssot/integrations/aristacv/jobs.py index b83ef1fa9..efbb63018 100644 --- a/nautobot_ssot/integrations/aristacv/jobs.py +++ b/nautobot_ssot/integrations/aristacv/jobs.py @@ -1,19 +1,19 @@ # pylint: disable=invalid-name,too-few-public-methods """Jobs for CloudVision integration with SSoT app.""" + from django.templatetags.static import static from django.urls import reverse - -from nautobot.dcim.models import DeviceType -from nautobot.extras.jobs import Job, BooleanVar from nautobot.core.utils.lookup import get_route_for_model -from nautobot_ssot.jobs.base import DataTarget, DataSource, DataMapping - -from nautobot_ssot.integrations.aristacv.constant import APP_SETTINGS +from nautobot.dcim.models import DeviceType +from nautobot.extras.jobs import BooleanVar +from nautobot.extras.jobs import Job from nautobot_ssot.integrations.aristacv.diffsync.adapters.cloudvision import CloudvisionAdapter from nautobot_ssot.integrations.aristacv.diffsync.adapters.nautobot import NautobotAdapter -from nautobot_ssot.integrations.aristacv.diffsync.models import nautobot from nautobot_ssot.integrations.aristacv.utils.cloudvision import CloudvisionApi - +from nautobot_ssot.integrations.aristacv.utils.nautobot import get_config +from nautobot_ssot.jobs.base import DataMapping +from nautobot_ssot.jobs.base import DataSource +from nautobot_ssot.jobs.base import DataTarget name = "SSoT - Arista CloudVision" # pylint: disable=invalid-name @@ -41,38 +41,26 @@ class Meta: """Meta data for DataSource.""" name = "CloudVision ⟹ Nautobot" - data_source = "Cloudvision" + data_source = "CloudVision" data_source_icon = static("nautobot_ssot_aristacv/cvp_logo.png") description = "Sync system tag data from CloudVision to Nautobot" @classmethod def config_information(cls): """Dictionary describing the configuration of this DataSource.""" - if APP_SETTINGS.get("aristacv_cvp_host"): - server_type = "On prem" - host = APP_SETTINGS.get("aristacv_cvp_host") - else: - server_type = "CVaaS" - host = APP_SETTINGS.get("aristacv_cvaas_url") + config = get_config() + return { - "Server type": server_type, - "CloudVision host": host, - "Username": APP_SETTINGS.get("aristacv_cvp_user"), - "Verify": str(APP_SETTINGS.get("aristacv_verify")), - "Delete devices on sync": APP_SETTINGS.get( - "aristacv_delete_devices_on_sync", str(nautobot.DEFAULT_DELETE_DEVICES_ON_SYNC) - ), - "New device default site": APP_SETTINGS.get( - "aristacv_from_cloudvision_default_site", nautobot.DEFAULT_SITE - ), - "New device default role": APP_SETTINGS.get( - "aristacv_from_cloudvision_default_device_role", nautobot.DEFAULT_DEVICE_ROLE - ), - "New device default role color": APP_SETTINGS.get( - "aristacv_from_cloudvision_default_device_role_color", nautobot.DEFAULT_DEVICE_ROLE_COLOR - ), - "Apply import tag": str(APP_SETTINGS.get("aristacv_apply_import_tag", nautobot.APPLY_IMPORT_TAG)), - "Import Active": str(APP_SETTINGS.get("aristacv_import_active", "True")) + "Server Type": "On prem" if config.is_on_premise else "CVaaS", + "CloudVision URL": config.url, + "User Name": config.cvp_user, + "Verify SSL": str(config.verify_ssl), + "Delete Devices On Sync": config.delete_devices_on_sync, + "New Device Default Site": config.from_cloudvision_default_site, + "New Device Default Role": config.from_cloudvision_default_device_role, + "New Device Default Role Color": config.from_cloudvision_default_device_role_color, + "Apply Import Tag": str(config.apply_import_tag), + "Import Active": str(config.import_active), # Password and Token are intentionally omitted! } @@ -98,36 +86,34 @@ def data_mappings(cls): DataMapping("topology_type", None, "Topology Type", None), ) + def __init__(self, *args, **kwargs): + """Initialize the CloudVision Data Target.""" + super().__init__(*args, **kwargs) + self.app_config = get_config() + def load_source_adapter(self): """Load data from CloudVision into DiffSync models.""" - if not APP_SETTINGS.get("aristacv_from_cloudvision_default_site"): + if not self.app_config.from_cloudvision_default_site: self.logger.error( - "App setting `aristacv_from_cloudvision_default_site` is not defined. This setting is required for the App to function." + "App setting `from_cloudvision_default_site` is not defined. This setting is required for the App to function." ) - raise MissingConfigSetting(setting="aristacv_from_cloudvision_default_site") - if not APP_SETTINGS.get("aristacv_from_cloudvision_default_device_role"): + raise MissingConfigSetting(setting="from_cloudvision_default_site") + if not self.app_config.from_cloudvision_default_device_role: self.logger.error( - "App setting `aristacv_from_cloudvision_default_device_role` is not defined. This setting is required for the App to function." + "App setting `from_cloudvision_default_device_role` is not defined. This setting is required for the App to function." ) - raise MissingConfigSetting(setting="aristacv_from_cloudvision_default_device_role") + raise MissingConfigSetting(setting="from_cloudvision_default_device_role") if self.debug: - if APP_SETTINGS.get("aristacv_delete_devices_on_sync"): + if self.app_config.delete_devices_on_sync: self.logger.warning( - "Devices not present in Cloudvision but present in Nautobot will be deleted from Nautobot." + "Devices not present in CloudVision but present in Nautobot will be deleted from Nautobot." ) else: self.logger.warning( - "Devices not present in Cloudvision but present in Nautobot will not be deleted from Nautobot." + "Devices not present in CloudVision but present in Nautobot will not be deleted from Nautobot." ) self.logger.info("Connecting to CloudVision") - with CloudvisionApi( - cvp_host=APP_SETTINGS["aristacv_cvp_host"], - cvp_port=APP_SETTINGS.get("aristacv_cvp_port", "8443"), - verify=APP_SETTINGS["aristacv_verify"], - username=APP_SETTINGS["aristacv_cvp_user"], - password=APP_SETTINGS["aristacv_cvp_password"], - cvp_token=APP_SETTINGS["aristacv_cvp_token"], - ) as client: + with CloudvisionApi(self.app_config) as client: self.logger.info("Loading data from CloudVision") self.source_adapter = CloudvisionAdapter(job=self, conn=client) self.source_adapter.load() @@ -139,12 +125,18 @@ def load_target_adapter(self): self.target_adapter.load() def run( # pylint: disable=arguments-differ, too-many-arguments, duplicate-code - self, dryrun, memory_profiling, debug, *args, **kwargs + self, + dryrun, + memory_profiling, + debug, + *args, + **kwargs, ): """Perform data synchronization.""" self.debug = debug self.dryrun = dryrun self.memory_profiling = memory_profiling + super().run(dryrun=self.dryrun, memory_profiling=self.memory_profiling, *args, **kwargs) @@ -164,17 +156,19 @@ class Meta: @classmethod def config_information(cls): """Dictionary describing the configuration of this DataTarget.""" - if APP_SETTINGS.get("aristacv_cvp_host"): + config = get_config() + + if config.is_on_premise: return { - "Server type": "On prem", - "CloudVision host": APP_SETTINGS.get("aristacv_cvp_host"), - "Username": APP_SETTINGS.get("aristacv_cvp_user"), - "Verify": str(APP_SETTINGS.get("aristacv_verify")) + "Server Type": "On prem", + "CloudVision URL": config.url, + "Verify": str(config.verify_ssl), + "User Name": config.cvp_user, # Password is intentionally omitted! } return { - "Server type": "CVaaS", - "CloudVision host": APP_SETTINGS.get("aristacv_cvaas_url"), + "Server Type": "CVaaS", + "CloudVision URL": config.url, # Token is intentionally omitted! } @@ -183,6 +177,11 @@ def data_mappings(cls): """List describing the data mappings involved in this DataTarget.""" return (DataMapping("Tags", reverse("extras:tag_list"), "Device Tags", None),) + def __init__(self, *args, **kwargs): + """Initialize the CloudVision Data Target.""" + super().__init__(*args, **kwargs) + self.app_config = get_config() + def load_source_adapter(self): """Load data from Nautobot into DiffSync models.""" self.logger.info("Loading data from Nautobot") @@ -192,34 +191,33 @@ def load_source_adapter(self): def load_target_adapter(self): """Load data from CloudVision into DiffSync models.""" if self.debug: - if APP_SETTINGS.get("aristacv_delete_devices_on_sync"): + if self.app_config.delete_devices_on_sync: self.logger.warning( - "Devices not present in Cloudvision but present in Nautobot will be deleted from Nautobot." + "Devices not present in CloudVision but present in Nautobot will be deleted from Nautobot." ) else: self.logger.warning( - "Devices not present in Cloudvision but present in Nautobot will not be deleted from Nautobot." + "Devices not present in CloudVision but present in Nautobot will not be deleted from Nautobot." ) self.logger.info("Connecting to CloudVision") - with CloudvisionApi( - cvp_host=APP_SETTINGS["aristacv_cvp_host"], - cvp_port=APP_SETTINGS.get("aristacv_cvp_port", "8443"), - verify=APP_SETTINGS["aristacv_verify"], - username=APP_SETTINGS["aristacv_cvp_user"], - password=APP_SETTINGS["aristacv_cvp_password"], - cvp_token=APP_SETTINGS["aristacv_cvp_token"], - ) as client: + with CloudvisionApi(self.app_config) as client: self.logger.info("Loading data from CloudVision") self.target_adapter = CloudvisionAdapter(job=self, conn=client) self.target_adapter.load() def run( # pylint: disable=arguments-differ, too-many-arguments, duplicate-code - self, dryrun, memory_profiling, debug, *args, **kwargs + self, + dryrun, + memory_profiling, + debug, + *args, + **kwargs, ): """Perform data synchronization.""" self.debug = debug self.dryrun = dryrun self.memory_profiling = memory_profiling + super().run(dryrun=self.dryrun, memory_profiling=self.memory_profiling, *args, **kwargs) diff --git a/nautobot_ssot/integrations/aristacv/signals.py b/nautobot_ssot/integrations/aristacv/signals.py index 46748e6e1..fda54c728 100644 --- a/nautobot_ssot/integrations/aristacv/signals.py +++ b/nautobot_ssot/integrations/aristacv/signals.py @@ -5,7 +5,7 @@ from django.db.models.signals import post_migrate from nautobot.extras.choices import CustomFieldTypeChoices, RelationshipTypeChoices -from nautobot_ssot.integrations.aristacv.constant import APP_SETTINGS +from nautobot_ssot.integrations.aristacv.utils.nautobot import get_config # pylint: disable-next=unused-argument @@ -15,7 +15,7 @@ def register_signals(sender): post_migrate.connect(post_migrate_create_manufacturer) post_migrate.connect(post_migrate_create_platform) - if APP_SETTINGS.get("aristacv_create_controller"): + if get_config().create_controller: post_migrate.connect(post_migrate_create_controller_relationship) @@ -134,7 +134,7 @@ def post_migrate_create_platform(apps=global_apps, **kwargs): }, ) - if APP_SETTINGS.get("aristacv_create_controller"): + if get_config().create_controller: Platform.objects.get_or_create( name="Arista EOS-CloudVision", manufacturer=Manufacturer.objects.get(name="Arista"), diff --git a/nautobot_ssot/integrations/aristacv/types.py b/nautobot_ssot/integrations/aristacv/types.py new file mode 100644 index 000000000..ed87e2244 --- /dev/null +++ b/nautobot_ssot/integrations/aristacv/types.py @@ -0,0 +1,25 @@ +"""Arista CloudVision Type Definitions.""" + +from typing import NamedTuple + + +class CloudVisionAppConfig(NamedTuple): + """Arista CloudVision Configuration.""" + + is_on_premise: bool + url: str + verify_ssl: bool + cvp_user: str + cvp_password: str + token: str + delete_devices_on_sync: bool + from_cloudvision_default_site: str + from_cloudvision_default_device_role: str + from_cloudvision_default_device_role_color: str + apply_import_tag: bool + import_active: bool + hostname_patterns: list + site_mappings: dict + role_mappings: dict + controller_site: str + create_controller: bool diff --git a/nautobot_ssot/integrations/aristacv/utils/cloudvision.py b/nautobot_ssot/integrations/aristacv/utils/cloudvision.py index 5757ca4a3..3dfff318c 100644 --- a/nautobot_ssot/integrations/aristacv/utils/cloudvision.py +++ b/nautobot_ssot/integrations/aristacv/utils/cloudvision.py @@ -3,6 +3,7 @@ import ssl from datetime import datetime from typing import Any, Iterable, List, Optional, Tuple, Union +from urllib.parse import urlparse import google.protobuf.timestamp_pb2 as pbts import grpc @@ -23,7 +24,8 @@ from cloudvision.Connector.codec.custom_types import FrozenDict from cloudvision.Connector.grpc_client.grpcClient import create_query, to_pbts -from nautobot_ssot.integrations.aristacv.constant import APP_SETTINGS, PORT_TYPE_MAP +from nautobot_ssot.integrations.aristacv.constants import PORT_TYPE_MAP +from nautobot_ssot.integrations.aristacv.types import CloudVisionAppConfig RPC_TIMEOUT = 30 TIME_TYPE = Union[pbts.Timestamp, datetime] @@ -42,66 +44,51 @@ def __init__(self, error_code, message): class CloudvisionApi: # pylint: disable=too-many-instance-attributes, too-many-arguments - """Arista Cloudvision gRPC client.""" + """Arista CloudVision gRPC client.""" AUTH_KEY_PATH = "access_token" - def __init__( - self, - cvp_host: str, - cvp_port: str = "", - verify: bool = True, - username: str = "", - password: str = "", - cvp_token: str = "", - ): - """Create Cloudvision API connection.""" + def __init__(self, config: CloudVisionAppConfig): + """Create CloudVision API connection.""" self.metadata = None - self.cvp_host = cvp_host - self.cvp_port = cvp_port - self.cvp_url = f"{cvp_host}:{cvp_port}" - self.verify = verify - self.username = username - self.password = password - self.cvp_token = cvp_token - - # If CVP_HOST is defined, we assume an on-prem installation. - if self.cvp_host: - if self.verify: + + parsed_url = urlparse(config.url) + if not parsed_url.hostname or not parsed_url.port: + raise ValueError("Invalid URL provided for CloudVision") + token = config.token + if config.is_on_premise: + if config.verify_ssl: channel_creds = grpc.ssl_channel_credentials() else: channel_creds = grpc.ssl_channel_credentials( - bytes(ssl.get_server_certificate((self.cvp_host, int(self.cvp_port))), "utf-8") + bytes(ssl.get_server_certificate((parsed_url.hostname, parsed_url.port)), "utf-8") ) - if self.cvp_token: - call_creds = grpc.access_token_call_credentials(self.cvp_token) - elif self.username != "" and self.password != "": # nosec + if token: + call_creds = grpc.access_token_call_credentials(token) + elif config.cvp_user != "" and config.cvp_password != "": # nosec response = requests.post( # nosec - f"https://{self.cvp_host}/cvpservice/login/authenticate.do", - auth=(self.username, self.password), + f"{parsed_url.hostname}:{parsed_url.port}/cvpservice/login/authenticate.do", + auth=(config.cvp_user, config.cvp_password), timeout=60, - verify=self.verify, + verify=config.verify_ssl, ) session_id = response.json().get("sessionId") if not session_id: error_code = response.json().get("errorCode") error_message = response.json().get("errorMessage") raise AuthFailure(error_code, error_message) - if not self.cvp_token: - self.cvp_token = session_id + token = session_id call_creds = grpc.access_token_call_credentials(session_id) else: raise AuthFailure( error_code="Missing Credentials", message="Unable to authenticate due to missing credentials." ) - self.metadata = ((self.AUTH_KEY_PATH, self.cvp_token),) - # Set up credentials for CVaaS using supplied token. + self.metadata = ((self.AUTH_KEY_PATH, token),) else: - self.cvp_url = APP_SETTINGS.get("aristacv_cvaas_url", "www.arista.io:443") - call_creds = grpc.access_token_call_credentials(self.cvp_token) + call_creds = grpc.access_token_call_credentials(token) channel_creds = grpc.ssl_channel_credentials() conn_creds = grpc.composite_channel_credentials(channel_creds, call_creds) - self.comm_channel = grpc.secure_channel(self.cvp_url, conn_creds) + self.comm_channel = grpc.secure_channel(f"{parsed_url.hostname}:{parsed_url.port}", conn_creds) self.__client = rtr_client.RouterV1Stub(self.comm_channel) self.__auth_client = rtr_client.AuthStub(self.comm_channel) self.__search_client = rtr_client.SearchStub(self.comm_channel) @@ -266,10 +253,10 @@ def search( # pylint:disable=dangerous-default-value, too-many-locals return (self.decode_batch(nb) for nb in res) -def get_devices(client): +def get_devices(client, import_active: bool): """Get devices from CloudVision inventory.""" device_stub = services.DeviceServiceStub(client) - if APP_SETTINGS.get("aristacv_import_active"): + if import_active: req = services.DeviceStreamRequest( partial_eq_filter=[models.Device(streaming_status=models.STREAMING_STATUS_ACTIVE)] ) @@ -440,7 +427,7 @@ def get_device_type(client: CloudvisionApi, dId: str): """Returns the type of the device: modular/fixed. Args: - client (CloudvisionApi): Cloudvision connection. + client (CloudvisionApi): CloudVision connection. dId (str): Device ID to determine type for. Returns: @@ -462,7 +449,7 @@ def get_interfaces_chassis(client: CloudvisionApi, dId): """Gets information about interfaces for a modular device. Args: - client (CloudvisionApi): Cloudvision connection. + client (CloudvisionApi): CloudVision connection. dId (str): Device ID to determine type for. """ # Fetch the list of slices/linecards @@ -502,7 +489,7 @@ def get_interfaces_fixed(client: CloudvisionApi, dId: str): """Gets information about interfaces for a fixed system device. Args: - client (CloudvisionApi): Cloudvision connection. + client (CloudvisionApi): CloudVision connection. dId (str): Device ID to determine type for. """ pathElts = ["Sysdb", "interface", "status", "eth", "phy", "slice", "1", "intfStatus", Wildcard()] @@ -534,7 +521,7 @@ def get_interface_transceiver(client: CloudvisionApi, dId: str, interface: str): """Gets transceiver information for specified interface on specific device. Args: - client (CloudvisionApi): Cloudvision connection. + client (CloudvisionApi): CloudVision connection. dId (str): Device ID to determine transceiver type for. interface (str): Name of interface to get transceiver information for. """ @@ -559,7 +546,7 @@ def get_interface_mode(client: CloudvisionApi, dId: str, interface: str): """Gets interface mode, ie access/trunked. Args: - client (CloudvisionApi): Cloudvision connection. + client (CloudvisionApi): CloudVision connection. dId (str): Device ID to determine type for. interface (str): Name of interface to get mode information for. """ @@ -625,7 +612,7 @@ def get_interface_description(client: CloudvisionApi, dId: str, interface: str): """Gets interface description. Args: - client (CloudvisionApi): Cloudvision connection. + client (CloudvisionApi): CloudVision connection. dId (str): Device ID to get description for. interface (str): Name of interface to get description for. """ @@ -644,7 +631,7 @@ def get_interface_vrf(client: CloudvisionApi, dId: str, interface: str) -> str: """Gets interface VRF. Args: - client (CloudvisionApi): Cloudvision connection. + client (CloudvisionApi): CloudVision connection. dId (str): Device ID to determine type for. interface (str): Name of interface to get mode information for. """ @@ -663,7 +650,7 @@ def get_ip_interfaces(client: CloudvisionApi, dId: str): """Gets interfaces with IP Addresses configured from specified device. Args: - client (CloudvisionApi): Cloudvision connection. + client (CloudvisionApi): CloudVision connection. dId (str): Device ID to retrieve IP Addresses and associated interfaces for. """ pathElts = ["Sysdb", "ip", "config", "ipIntfConfig", Wildcard()] @@ -686,7 +673,7 @@ def get_ip_interfaces(client: CloudvisionApi, dId: str): return ip_intfs -def get_cvp_version(): +def get_cvp_version(config: CloudVisionAppConfig): """Returns CloudVision portal version. Returns: @@ -694,19 +681,19 @@ def get_cvp_version(): """ client = CvpClient() try: - if APP_SETTINGS.get("aristacv_cvp_token") and not APP_SETTINGS.get("aristacv_cvp_host"): + if config.token and not config.is_on_premise: client.connect( - nodes=[APP_SETTINGS["aristacv_cvaas_url"]], + nodes=[config.url], username="", password="", # nosec: B106 is_cvaas=True, - api_token=APP_SETTINGS.get("aristacv_cvp_token"), + api_token=config.token, ) else: client.connect( - nodes=[APP_SETTINGS["aristacv_cvp_host"]], - username=APP_SETTINGS.get("aristacv_cvp_user"), - password=APP_SETTINGS.get("aristacv_cvp_password"), + nodes=[config.url], + username=config.cvp_user, + password=config.cvp_password, is_cvaas=False, ) except CvpLoginError as err: diff --git a/nautobot_ssot/integrations/aristacv/utils/nautobot.py b/nautobot_ssot/integrations/aristacv/utils/nautobot.py index f6378ea57..32642aac5 100644 --- a/nautobot_ssot/integrations/aristacv/utils/nautobot.py +++ b/nautobot_ssot/integrations/aristacv/utils/nautobot.py @@ -1,10 +1,32 @@ """Utility functions for Nautobot ORM.""" + +import logging import re +from typing import Mapping +from urllib.parse import urlparse + +from django.conf import settings from django.contrib.contenttypes.models import ContentType -from nautobot.dcim.models import Device, DeviceType, Location, LocationType, Manufacturer -from nautobot.extras.models import Role, Status, Tag, Relationship +from nautobot.core.models.utils import slugify +from nautobot.core.settings_funcs import is_truthy +from nautobot.dcim.models import Device +from nautobot.dcim.models import DeviceType +from nautobot.dcim.models import Location +from nautobot.dcim.models import LocationType +from nautobot.dcim.models import Manufacturer +from nautobot.extras.choices import SecretsGroupAccessTypeChoices +from nautobot.extras.choices import SecretsGroupSecretTypeChoices +from nautobot.extras.models import ExternalIntegration +from nautobot.extras.models import Relationship +from nautobot.extras.models import Role +from nautobot.extras.models import Secret +from nautobot.extras.models import SecretsGroup +from nautobot.extras.models import SecretsGroupAssociation +from nautobot.extras.models import Status +from nautobot.extras.models import Tag -from nautobot_ssot.integrations.aristacv.constant import APP_SETTINGS +from nautobot_ssot.integrations.aristacv import constants +from nautobot_ssot.integrations.aristacv.types import CloudVisionAppConfig try: from nautobot_device_lifecycle_mgmt.models import SoftwareLCM # noqa: F401 # pylint: disable=unused-import @@ -15,6 +37,168 @@ LIFECYCLE_MGMT = False +logger = logging.getLogger(__name__) + + +def _get_or_create_integration(integration_name: str, config: dict) -> ExternalIntegration: + slugified_integration_name = slugify(integration_name) + integration_env_name = slugified_integration_name.upper().replace("-", "_") + + integration, created = ExternalIntegration.objects.get_or_create( + name=integration_name, + defaults={ + "remote_url": config.pop("url"), + "verify_ssl": config.pop("verify_ssl", False), + "extra_config": config, + }, + ) + if not created: + return integration + + secrets_group = SecretsGroup.objects.create(name=f"{slugified_integration_name}-group") + secret_token = Secret.objects.create( + name=f"{slugified_integration_name}-token", + provider="environment-variable", + parameters={"variable": f"{integration_env_name}_TOKEN"}, + ) + SecretsGroupAssociation.objects.create( + secret=secret_token, + secrets_group=secrets_group, + access_type=SecretsGroupAccessTypeChoices.TYPE_HTTP, + secret_type=SecretsGroupSecretTypeChoices.TYPE_TOKEN, + ) + secret_password = Secret.objects.create( + name=f"{slugified_integration_name}-password", + provider="environment-variable", + parameters={"variable": f"{integration_env_name}_PASSWORD"}, + ) + SecretsGroupAssociation.objects.create( + secret=secret_password, + secrets_group=secrets_group, + access_type=SecretsGroupAccessTypeChoices.TYPE_HTTP, + secret_type=SecretsGroupSecretTypeChoices.TYPE_PASSWORD, + ) + secret_user = Secret.objects.create( + name=f"{slugified_integration_name}-user", + provider="environment-variable", + parameters={"variable": f"{integration_env_name}_USER"}, + ) + SecretsGroupAssociation.objects.create( + secret=secret_user, + secrets_group=secrets_group, + access_type=SecretsGroupAccessTypeChoices.TYPE_HTTP, + secret_type=SecretsGroupSecretTypeChoices.TYPE_USERNAME, + ) + integration.secrets_group = secrets_group + integration.validated_save() + return integration + + +def get_config() -> CloudVisionAppConfig: + """Get Arista CloudVision configuration from Nautobot settings. + + Reads configuration from external integration if specified by `aristacv_external_integration_name` app configuration. + + Keeps backward compatibility with previous configuration settings. + + Create a new integration if specified but not found. + """ + app_settings: dict = settings.PLUGINS_CONFIG["nautobot_ssot"] # type: ignore + + config = { + "is_on_premise": bool(app_settings.get("aristacv_cvp_host")), + "delete_devices_on_sync": is_truthy( + app_settings.get("aristacv_delete_devices_on_sync", constants.DEFAULT_DELETE_DEVICES_ON_SYNC) + ), + "from_cloudvision_default_site": app_settings.get( + "aristacv_from_cloudvision_default_site", constants.DEFAULT_SITE + ), + "from_cloudvision_default_device_role": app_settings.get( + "aristacv_from_cloudvision_default_device_role", constants.DEFAULT_DEVICE_ROLE + ), + "from_cloudvision_default_device_role_color": app_settings.get( + "aristacv_from_cloudvision_default_device_role_color", constants.DEFAULT_DEVICE_ROLE_COLOR + ), + "apply_import_tag": is_truthy( + app_settings.get("aristacv_apply_import_tag", constants.DEFAULT_APPLY_IMPORT_TAG) + ), + "import_active": is_truthy(app_settings.get("aristacv_import_active", constants.DEFAULT_IMPORT_ACTIVE)), + "verify_ssl": is_truthy(app_settings.get("aristacv_verify", constants.DEFAULT_VERIFY_SSL)), + "token": app_settings.get("aristacv_cvp_token", ""), + "cvp_user": app_settings.get("aristacv_cvp_user", ""), + "cvp_password": app_settings.get("aristacv_cvp_password", ""), + "hostname_patterns": app_settings.get("aristacv_hostname_patterns", []), + "site_mappings": app_settings.get("aristacv_site_mappings", {}), + "role_mappings": app_settings.get("aristacv_role_mappings", {}), + "controller_site": app_settings.get("aristacv_controller_site", ""), + "create_controller": is_truthy( + app_settings.get("aristacv_create_controller", constants.DEFAULT_CREATE_CONTROLLER) + ), + } + + if config["is_on_premise"]: + url = app_settings.get("aristacv_cvp_host", "") + if not url.startswith("http"): + url = f"https://{url}" + parsed_url = urlparse(url) + port = parsed_url.port or app_settings.get("aristacv_cvp_port", 443) + config["url"] = f"{parsed_url.scheme}://{parsed_url.hostname}:{port}" + else: + url = app_settings.get("aristacv_cvaas_url", constants.DEFAULT_CVAAS_URL) + if not url.startswith("http"): + url = f"https://{url}" + parsed_url = urlparse(url) + config["url"] = f"{parsed_url.scheme}://{parsed_url.hostname}:{parsed_url.port or 443}" + + def convert(): + expected_fields = set(CloudVisionAppConfig._fields) + for key in list(config): + if key not in expected_fields: + logger.warning(f"Unexpected key found in Arista CloudVision config: {key}") + config.pop(key) + + for key in expected_fields - set(config): + logger.warning(f"Missing key in Arista CloudVision config: {key}") + config[key] = "" + + return CloudVisionAppConfig(**config) + + integration_name = app_settings.get("aristacv_external_integration_name") + if not integration_name: + return convert() + + integration = _get_or_create_integration(integration_name, {**config}) + integration_config: Mapping = integration.extra_config # type: ignore + if not isinstance(integration.extra_config, Mapping): + integration_config = config + + if isinstance(integration.verify_ssl, bool): + config["verify_ssl"] = integration.verify_ssl + + config["url"] = integration.remote_url + + config.update(integration_config) + + secrets_group: SecretsGroup = integration.secrets_group # type: ignore + if not secrets_group: + return convert() + + config["cvp_user"] = secrets_group.get_secret_value( + access_type=SecretsGroupAccessTypeChoices.TYPE_HTTP, + secret_type=SecretsGroupSecretTypeChoices.TYPE_USERNAME, + ) + config["cvp_password"] = secrets_group.get_secret_value( + access_type=SecretsGroupAccessTypeChoices.TYPE_HTTP, + secret_type=SecretsGroupSecretTypeChoices.TYPE_PASSWORD, + ) + config["token"] = secrets_group.get_secret_value( + access_type=SecretsGroupAccessTypeChoices.TYPE_HTTP, + secret_type=SecretsGroupSecretTypeChoices.TYPE_TOKEN, + ) + + return convert() + + def verify_site(site_name): """Verifies whether site in app config is created. If not, creates site. @@ -26,12 +210,14 @@ def verify_site(site_name): try: site_obj = Location.objects.get(name=site_name, location_type=loc_type) except Location.DoesNotExist: - site_obj = Location( + status, created = Status.objects.get_or_create(name="Staging") + if created: + status.content_types.add(ContentType.objects.get_for_model(Location)) + site_obj = Location.objects.create( name=site_name, - status=Status.objects.get(name="Staging"), + status=status, location_type=loc_type, ) - site_obj.validated_save() return site_obj @@ -39,7 +225,7 @@ def verify_device_type_object(device_type): """Verifies whether device type object already exists in Nautobot. If not, creates specified device type. Args: - device_type (str): Device model gathered from Cloudvision. + device_type (str): Device model gathered from CloudVision. """ try: device_type_obj = DeviceType.objects.get(model=device_type) @@ -101,14 +287,12 @@ def get_device_version(device): return version -def parse_hostname(hostname: str): +def parse_hostname(hostname: str, hostname_patterns: list): """Parse a device's hostname to find site and role. Args: hostname (str): Device hostname to be parsed for site and role. """ - hostname_patterns = APP_SETTINGS.get("aristacv_hostname_patterns") - site, role = None, None for pattern in hostname_patterns: match = re.search(pattern=pattern, string=hostname) @@ -118,35 +302,3 @@ def parse_hostname(hostname: str): if "role" in match.groupdict() and match.group("role"): role = match.group("role") return (site, role) - - -def get_site_from_map(site_code: str): - """Get name of Site from site_mapping based upon sitecode. - - Args: - site_code (str): Site code from device hostname. - - Returns: - str|None: Name of Site if site code found else None. - """ - site_map = APP_SETTINGS.get("aristacv_site_mappings") - site_name = None - if site_code in site_map: - site_name = site_map[site_code] - return site_name - - -def get_role_from_map(role_code: str): - """Get name of Role from role_mapping based upon role code in hostname. - - Args: - role_code (str): Role code from device hostname. - - Returns: - str|None: Name of Device Role if role code found else None. - """ - role_map = APP_SETTINGS.get("aristacv_role_mappings") - role_name = None - if role_code in role_map: - role_name = role_map[role_code] - return role_name diff --git a/nautobot_ssot/integrations/infoblox/diffsync/adapters/nautobot.py b/nautobot_ssot/integrations/infoblox/diffsync/adapters/nautobot.py index 6de48d3e6..6077ff41f 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/adapters/nautobot.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/adapters/nautobot.py @@ -1,6 +1,5 @@ """Nautobot Adapter for Infoblox integration.""" # pylint: disable=duplicate-code -from collections import defaultdict import datetime from diffsync import DiffSync from diffsync.exceptions import ObjectAlreadyExists, ObjectNotFound @@ -18,7 +17,6 @@ ) from nautobot_ssot.integrations.infoblox.constant import TAG_COLOR from nautobot_ssot.integrations.infoblox.utils.diffsync import ( - create_tag_sync_from_infoblox, nautobot_vlan_status, get_default_custom_fields, ) @@ -40,18 +38,20 @@ def tag_involved_objects(self, target): "color": TAG_COLOR, }, ) + for model in [IPAddress, Prefix, VLAN]: + tag.content_types.add(ContentType.objects.get_for_model(model)) # Ensure that the "ssot_synced_to_infoblox" custom field is present; as above, it *should* already exist. custom_field, _ = CustomField.objects.get_or_create( type=CustomFieldTypeChoices.TYPE_DATE, - name="ssot_synced_to_infoblox", + key="ssot_synced_to_infoblox", defaults={ "label": "Last synced to Infoblox on", }, ) - for model in [IPAddress, Prefix]: + for model in [IPAddress, Prefix, VLAN, VLANGroup]: custom_field.content_types.add(ContentType.objects.get_for_model(model)) - for modelname in ["ipaddress", "prefix"]: + for modelname in ["ipaddress", "prefix", "vlan", "vlangroup"]: for local_instance in self.get_all(modelname): unique_id = local_instance.get_unique_id() # Verify that the object now has a counterpart in the target DiffSync @@ -72,13 +72,17 @@ def _tag_object(nautobot_object): if hasattr(nautobot_object, "tags"): nautobot_object.tags.add(tag) if hasattr(nautobot_object, "cf"): - nautobot_object.cf[custom_field.name] = today + nautobot_object.cf[custom_field.key] = today nautobot_object.validated_save() if modelname == "ipaddress": _tag_object(IPAddress.objects.get(pk=model_instance.pk)) elif modelname == "prefix": _tag_object(Prefix.objects.get(pk=model_instance.pk)) + elif modelname == "vlan": + _tag_object(VLAN.objects.get(pk=model_instance.pk)) + elif modelname == "vlangroup": + _tag_object(VLANGroup.objects.get(pk=model_instance.pk)) class NautobotAdapter(NautobotMixin, DiffSync): # pylint: disable=too-many-instance-attributes @@ -112,7 +116,6 @@ def __init__(self, *args, job=None, sync=None, **kwargs): super().__init__(*args, **kwargs) self.job = job self.sync = sync - self.objects_to_create = defaultdict(list) def sync_complete(self, source: DiffSync, *args, **kwargs): """Process object creations/updates using bulk operations. @@ -120,11 +123,7 @@ def sync_complete(self, source: DiffSync, *args, **kwargs): Args: source (DiffSync): Source DiffSync adapter data. """ - for obj_type, objs in self.objects_to_create.items(): - if obj_type != "vlangroups": - self.job.logger.info(f"Adding tags to all imported {obj_type}.") - for obj in objs: - obj.tags.add(create_tag_sync_from_infoblox()) + super().sync_complete(source, *args, **kwargs) def load_prefixes(self): """Load Prefixes from Nautobot.""" diff --git a/nautobot_ssot/integrations/infoblox/diffsync/models/nautobot.py b/nautobot_ssot/integrations/infoblox/diffsync/models/nautobot.py index 15b9dbacc..5e6e279f3 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/models/nautobot.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/models/nautobot.py @@ -12,6 +12,7 @@ from nautobot.ipam.models import VLANGroup as OrmVlanGroup from nautobot_ssot.integrations.infoblox.constant import PLUGIN_CFG from nautobot_ssot.integrations.infoblox.diffsync.models.base import Network, IPAddress, Vlan, VlanView +from nautobot_ssot.integrations.infoblox.utils.diffsync import create_tag_sync_from_infoblox from nautobot_ssot.integrations.infoblox.utils.nautobot import get_prefix_vlans @@ -132,6 +133,7 @@ def create(cls, diffsync, ids, attrs): if attrs.get("ext_attrs"): process_ext_attrs(diffsync=diffsync, obj=_prefix, extattrs=attrs["ext_attrs"]) + _prefix.tags.add(create_tag_sync_from_infoblox()) _prefix.validated_save() diffsync.prefix_map[ids["network"]] = _prefix.id return super().create(ids=ids, diffsync=diffsync, attrs=attrs) @@ -226,6 +228,7 @@ def create(cls, diffsync, ids, attrs): if attrs.get("ext_attrs"): process_ext_attrs(diffsync=diffsync, obj=_ip, extattrs=attrs["ext_attrs"]) try: + _ip.tags.add(create_tag_sync_from_infoblox()) _ip.validated_save() diffsync.ipaddr_map[_ip.address] = _ip.id return super().create(ids=ids, diffsync=diffsync, attrs=attrs) @@ -317,6 +320,7 @@ def create(cls, diffsync, ids, attrs): if "ext_attrs" in attrs: process_ext_attrs(diffsync=diffsync, obj=_vlan, extattrs=attrs["ext_attrs"]) try: + _vlan.tags.add(create_tag_sync_from_infoblox()) _vlan.validated_save() if ids["vlangroup"] not in diffsync.vlan_map: diffsync.vlan_map[ids["vlangroup"]] = {} diff --git a/nautobot_ssot/integrations/infoblox/signals.py b/nautobot_ssot/integrations/infoblox/signals.py index ccc9e1633..06c7f31c5 100644 --- a/nautobot_ssot/integrations/infoblox/signals.py +++ b/nautobot_ssot/integrations/infoblox/signals.py @@ -11,7 +11,7 @@ def register_signals(sender): nautobot_database_ready.connect(nautobot_database_ready_callback, sender=sender) -def nautobot_database_ready_callback(sender, *, apps, **kwargs): # pylint: disable=unused-argument +def nautobot_database_ready_callback(sender, *, apps, **kwargs): # pylint: disable=unused-argument,too-many-locals """Create Tag and CustomField to note System of Record for SSoT. Callback function triggered by the nautobot_database_ready signal when the Nautobot database is fully ready. @@ -24,8 +24,9 @@ def nautobot_database_ready_callback(sender, *, apps, **kwargs): # pylint: disa Tag = apps.get_model("extras", "Tag") Relationship = apps.get_model("extras", "Relationship") VLAN = apps.get_model("ipam", "VLAN") + VLANGroup = apps.get_model("ipam", "VLANGroup") - Tag.objects.get_or_create( + tag_sync_from_infoblox, _ = Tag.objects.get_or_create( name="SSoT Synced from Infoblox", defaults={ "name": "SSoT Synced from Infoblox", @@ -33,7 +34,9 @@ def nautobot_database_ready_callback(sender, *, apps, **kwargs): # pylint: disa "color": TAG_COLOR, }, ) - Tag.objects.get_or_create( + for model in [IPAddress, Prefix, VLAN]: + tag_sync_from_infoblox.content_types.add(ContentType.objects.get_for_model(model)) + tag_sync_to_infoblox, _ = Tag.objects.get_or_create( name="SSoT Synced to Infoblox", defaults={ "name": "SSoT Synced to Infoblox", @@ -41,6 +44,8 @@ def nautobot_database_ready_callback(sender, *, apps, **kwargs): # pylint: disa "color": TAG_COLOR, }, ) + for model in [IPAddress, Prefix, VLAN]: + tag_sync_to_infoblox.content_types.add(ContentType.objects.get_for_model(model)) custom_field, _ = CustomField.objects.get_or_create( type=CustomFieldTypeChoices.TYPE_DATE, key="ssot_synced_to_infoblox", @@ -48,11 +53,8 @@ def nautobot_database_ready_callback(sender, *, apps, **kwargs): # pylint: disa "label": "Last synced to Infoblox on", }, ) - for content_type in [ - ContentType.objects.get_for_model(Prefix), - ContentType.objects.get_for_model(IPAddress), - ]: - custom_field.content_types.add(content_type) + for model in [IPAddress, Prefix, VLAN, VLANGroup]: + custom_field.content_types.add(ContentType.objects.get_for_model(model)) range_custom_field, _ = CustomField.objects.get_or_create( type=CustomFieldTypeChoices.TYPE_TEXT, key="dhcp_ranges", diff --git a/nautobot_ssot/integrations/infoblox/utils/diffsync.py b/nautobot_ssot/integrations/infoblox/utils/diffsync.py index 88eecbc4c..3589b063e 100644 --- a/nautobot_ssot/integrations/infoblox/utils/diffsync.py +++ b/nautobot_ssot/integrations/infoblox/utils/diffsync.py @@ -1,6 +1,7 @@ """Utilities for DiffSync related stuff.""" from django.contrib.contenttypes.models import ContentType from django.utils.text import slugify +from nautobot.ipam.models import IPAddress, Prefix, VLAN from nautobot.extras.models import CustomField, Tag from nautobot_ssot.integrations.infoblox.constant import TAG_COLOR @@ -15,6 +16,8 @@ def create_tag_sync_from_infoblox(): "color": TAG_COLOR, }, ) + for model in [IPAddress, Prefix, VLAN]: + tag.content_types.add(ContentType.objects.get_for_model(model)) return tag diff --git a/nautobot_ssot/integrations/ipfabric/diffsync/adapter_ipfabric.py b/nautobot_ssot/integrations/ipfabric/diffsync/adapter_ipfabric.py index 192a3f93a..5e14973fe 100644 --- a/nautobot_ssot/integrations/ipfabric/diffsync/adapter_ipfabric.py +++ b/nautobot_ssot/integrations/ipfabric/diffsync/adapter_ipfabric.py @@ -96,7 +96,7 @@ def load_device_interfaces(self, device_model, interfaces, device_primary_ip, ne if iface.get("mac") else DEFAULT_INTERFACE_MAC, mtu=iface.get("mtu") if iface.get("mtu") else DEFAULT_INTERFACE_MTU, - type=ipfabric_utils.convert_media_type(iface.get("media") or ""), + type=ipfabric_utils.convert_media_type(iface.get("media"), iface_name), mgmt_only=iface.get("mgmt_only", False), ip_address=ip_address, subnet_mask=subnet_mask, @@ -115,7 +115,7 @@ def load(self): # pylint: disable=too-many-locals,too-many-statements interfaces = self.client.inventory.interfaces.all() vlans = self.client.fetch_all("tables/vlan/site-summary") networks = defaultdict(list) - for network in self.client.technology.managed_networks.networks.fetch( + for network in self.client.technology.managed_networks.networks.all( filters={"net": ["empty", False], "siteName": ["empty", False]}, columns=["net", "siteName"], ): @@ -151,7 +151,7 @@ def load(self): # pylint: disable=too-many-locals,too-many-statements location_devices = [device for device in devices if device["siteName"] == location.name] for device in location_devices: device_name = device["hostname"] - stack_members = self.client.technology.platforms.stacks_members.fetch( + stack_members = self.client.technology.platforms.stacks_members.all( filters={"master": ["eq", device_name], "siteName": ["eq", location.name]}, columns=["master", "member", "memberSn", "pn"], ) diff --git a/nautobot_ssot/integrations/ipfabric/utilities/utils.py b/nautobot_ssot/integrations/ipfabric/utilities/utils.py index fad946197..7c601ffcc 100644 --- a/nautobot_ssot/integrations/ipfabric/utilities/utils.py +++ b/nautobot_ssot/integrations/ipfabric/utilities/utils.py @@ -1,4 +1,6 @@ """General utils for IPFabric.""" +import re + from nautobot_ssot.integrations.ipfabric.constants import DEFAULT_INTERFACE_TYPE @@ -37,99 +39,122 @@ def convert_media_type( # pylint: disable=too-many-return-statements,too-many-branches,too-many-statements media_type: str, + interface_name: str, ) -> str: """Convert provided `media_type` to value used by Nautobot. Args: media_type: The media type of an inteface (i.e. SFP-10GBase) + interface_name: The name of the interface with `media_type`. Returns: str: The corresponding represention of `media_type` in Nautobot. """ - media_type = media_type.lower().replace("-", "") - if VIRTUAL in media_type: - return VIRTUAL - if BRIDGE in media_type: - return BRIDGE - if LAG in media_type: - return LAG - - if TEN_GIG_BASE_T in media_type: - return "10gbase-t" - - # Going from 10Gig to lower bandwidths to allow media that supports multiple - # bandwidths to use highest supported bandwidth - if TEN_GIG in media_type: - nautobot_media_type = "10gbase-x-" - if XFP in media_type: - nautobot_media_type += "xfp" - elif X2 in media_type: - nautobot_media_type += "x2" - elif XENPAK in media_type: - nautobot_media_type += "xenpak" - else: - nautobot_media_type += "sfpp" - return nautobot_media_type - - # Flipping order of 5gig and 2.5g as both use the string 5gbase - if TWO_AND_HALF_GIG_BASE in media_type: - return "2.5gbase-t" - - if FIVE_GIG_BASE in media_type: - return "5gbase-t" - - if GIG_BASE in media_type or RJ45 in media_type or GIG in media_type: - nautobot_media_type = "1000base-" - if GBIC in media_type: - nautobot_media_type += "x-gbic" - elif SFP in media_type or SR in media_type or LR in media_type or SX in media_type or LX in media_type: - nautobot_media_type += "x-sfp" - else: - nautobot_media_type += "t" - return nautobot_media_type - - if HUNDRED_MEG_BASE in media_type or HUNDRED_MEG in media_type: - return "100base-tx" - - if TWENTY_FIVE_GIG in media_type: - return "25gbase-x-sfp28" - - if FORTY_GIG in media_type: - return "40gbase-x-qsfpp" - - if FIFTY_GIG in media_type: - return "50gbase-x-sfp56" - - if HUNDRED_GIG in media_type: - nautobot_media_type = "100gbase-x-" - if QSFP in media_type: - nautobot_media_type += "qsfp28" - else: - nautobot_media_type += "cfp" - return nautobot_media_type - - if TWO_HUNDRED_GIG in media_type: - nautobot_media_type = "200gbase-x-" - if QSFP in media_type: - nautobot_media_type += "qsfp56" - else: - nautobot_media_type += "cfp2" - return nautobot_media_type - - if FOUR_HUNDRED_GIG in media_type: - nautobot_media_type = "400gbase-x-" - if QSFP in media_type: - nautobot_media_type += "qsfp112" - else: - nautobot_media_type += "osfp" - return nautobot_media_type - - if EIGHT_HUNDRED_GIG in media_type: - nautobot_media_type = "800gbase-x-" - if QSFP in media_type: - nautobot_media_type += "qsfpdd" - else: - nautobot_media_type += "osfp" - return nautobot_media_type + if media_type: + media_type = media_type.lower().replace("-", "") + if VIRTUAL in media_type: + return VIRTUAL + if BRIDGE in media_type: + return BRIDGE + if LAG in media_type: + return LAG + + if TEN_GIG_BASE_T in media_type: + return "10gbase-t" + + # Going from 10Gig to lower bandwidths to allow media that supports multiple + # bandwidths to use highest supported bandwidth + if TEN_GIG in media_type: + nautobot_media_type = "10gbase-x-" + if XFP in media_type: + nautobot_media_type += "xfp" + elif X2 in media_type: + nautobot_media_type += "x2" + elif XENPAK in media_type: + nautobot_media_type += "xenpak" + else: + nautobot_media_type += "sfpp" + return nautobot_media_type + + # Flipping order of 5gig and 2.5g as both use the string 5gbase + if TWO_AND_HALF_GIG_BASE in media_type: + return "2.5gbase-t" + + if FIVE_GIG_BASE in media_type: + return "5gbase-t" + + if GIG_BASE in media_type or RJ45 in media_type or GIG in media_type: + nautobot_media_type = "1000base-" + if GBIC in media_type: + nautobot_media_type += "x-gbic" + elif SFP in media_type or SR in media_type or LR in media_type or SX in media_type or LX in media_type: + nautobot_media_type += "x-sfp" + else: + nautobot_media_type += "t" + return nautobot_media_type + + if HUNDRED_MEG_BASE in media_type or HUNDRED_MEG in media_type: + return "100base-tx" + + if TWENTY_FIVE_GIG in media_type: + return "25gbase-x-sfp28" + + if FORTY_GIG in media_type: + return "40gbase-x-qsfpp" + + if FIFTY_GIG in media_type: + return "50gbase-x-sfp56" + + if HUNDRED_GIG in media_type: + nautobot_media_type = "100gbase-x-" + if QSFP in media_type: + nautobot_media_type += "qsfp28" + else: + nautobot_media_type += "cfp" + return nautobot_media_type + + if TWO_HUNDRED_GIG in media_type: + nautobot_media_type = "200gbase-x-" + if QSFP in media_type: + nautobot_media_type += "qsfp56" + else: + nautobot_media_type += "cfp2" + return nautobot_media_type + + if FOUR_HUNDRED_GIG in media_type: + nautobot_media_type = "400gbase-x-" + if QSFP in media_type: + nautobot_media_type += "qsfp112" + else: + nautobot_media_type += "osfp" + return nautobot_media_type + + if EIGHT_HUNDRED_GIG in media_type: + nautobot_media_type = "800gbase-x-" + if QSFP in media_type: + nautobot_media_type += "qsfpdd" + else: + nautobot_media_type += "osfp" + return nautobot_media_type + else: + interface_name = interface_name.lower() + regex_to_type = ( + (r"po(rt-?channel)?\d", "lag"), + (r"vl(an)?\d", "virtual"), + (r"lo(opback)?\d", "virtual"), + (r"tu(nnel)?\d", "virtual"), + (r"vx(lan)?\d", "virtual"), + (r"fa(stethernet)?\d", "100base-tx"), + (r"gi(gabitethernet)?\d", "1000base-t"), + (r"te(ngigabitethernet)?\d", "10gbase-x-sfpp"), + (r"twentyfivegigabitethernet\d", "25gbase-x-sfp28"), + (r"fo(rtygigabitethernet)?\d", "40gbase-x-qsfpp"), + (r"fi(ftygigabitethernet)?\d", "50gbase-x-sfp56"), + (r"hu(ndredgigabitethernet)?\d", "100gbase-x-qsfp28"), + (r"twohundredgigabitethernet\d", "200gbase-x-qsfp56"), + ) + for regex, iface_type in regex_to_type: + if re.match(regex, interface_name): + return iface_type return DEFAULT_INTERFACE_TYPE diff --git a/nautobot_ssot/integrations/servicenow/diffsync/adapter_servicenow.py b/nautobot_ssot/integrations/servicenow/diffsync/adapter_servicenow.py index 9a9e40a8e..a725f6ef0 100644 --- a/nautobot_ssot/integrations/servicenow/diffsync/adapter_servicenow.py +++ b/nautobot_ssot/integrations/servicenow/diffsync/adapter_servicenow.py @@ -4,6 +4,7 @@ import json import os +from collections import defaultdict from diffsync import DiffSync from diffsync.enum import DiffSyncFlags from diffsync.exceptions import ObjectAlreadyExists, ObjectNotFound @@ -16,6 +17,10 @@ class ServiceNowDiffSync(DiffSync): """DiffSync adapter using pysnow to communicate with a ServiceNow server.""" + # create defaultdict object to store objects that should be deleted from ServiceNow if they do not + # exist in Nautobot + objects_to_delete = defaultdict(list) + company = models.Company device = models.Device # child of location interface = models.Interface # child of device @@ -293,3 +298,23 @@ def sync_complete(self, source, diff, flags=DiffSyncFlags.NONE, logger=None): self.bulk_create_interfaces() source.tag_involved_objects(target=self) + + # If there are objects inside any of the lists in objects_to_delete then iterate over those objects + # and remove them from ServiceNow + if ( + self.objects_to_delete["interface"] + or self.objects_to_delete["device"] + or self.objects_to_delete["product_model"] + or self.objects_to_delete["location"] + or self.objects_to_delete["company"] + ): + for grouping in ( + "interface", + "device", + "product_model", + "location", + "company", + ): + for sn_object in self.objects_to_delete[grouping]: + sn_object.delete() + self.objects_to_delete[grouping] = [] diff --git a/nautobot_ssot/integrations/servicenow/diffsync/models.py b/nautobot_ssot/integrations/servicenow/diffsync/models.py index e17ba9323..490f2c601 100644 --- a/nautobot_ssot/integrations/servicenow/diffsync/models.py +++ b/nautobot_ssot/integrations/servicenow/diffsync/models.py @@ -15,7 +15,7 @@ class ServiceNowCRUDMixin: _sys_id_cache = {} """Dict of table -> column_name -> value -> sys_id.""" - def map_data_to_sn_record(self, data, mapping_entry, existing_record=None): + def map_data_to_sn_record(self, data, mapping_entry, existing_record=None, clear_cache=False): """Map create/update data from DiffSync to a corresponding ServiceNow data record.""" record = existing_record or {} for mapping in mapping_entry.get("mappings", []): @@ -31,6 +31,9 @@ def map_data_to_sn_record(self, data, mapping_entry, existing_record=None): raise NotImplementedError column_name = mapping["reference"]["column"] if value is not None: + # if clear_cache is set to True then clear the cache for the object + if clear_cache: + self._sys_id_cache.setdefault(tablename, {}).setdefault(column_name, {})[value] = {} # Look in the cache first sys_id = self._sys_id_cache.get(tablename, {}).get(column_name, {}).get(value, None) if not sys_id: @@ -40,7 +43,6 @@ def map_data_to_sn_record(self, data, mapping_entry, existing_record=None): else: sys_id = target["sys_id"] self._sys_id_cache.setdefault(tablename, {}).setdefault(column_name, {})[value] = sys_id - record[mapping["reference"]["key"]] = sys_id else: raise NotImplementedError @@ -82,7 +84,27 @@ def update(self, attrs): super().update(attrs) return self - # TODO delete() method + def delete(self): + """Delete an existing instance in ServiceNow if it does not exist in Nautobot. This code adds the ServiceNow object to the objects_to_delete dict of lists. The actual delete occurs in the post-run method of adapter_servicenow.py.""" + entry = self.diffsync.mapping_data[self.get_type()] + sn_resource = self.diffsync.client.resource(api_path=f"/table/{entry['table']}") + query = self.map_data_to_sn_record(data=self.get_identifiers(), mapping_entry=entry) + try: + sn_resource.get(query=query).one() + except pysnow.exceptions.MultipleResults: + self.diffsync.job.logger.error( + f"Unsure which record to update, as query {query} matched more than one item " + f"in table {entry['table']}" + ) + return None + self.diffsync.job.logger.warning(f"{self._modelname} {self.get_identifiers()} will be deleted.") + _object = sn_resource.get(query=query) + self.diffsync.objects_to_delete[self._modelname].append(_object) + self.map_data_to_sn_record( + data=self.get_identifiers(), mapping_entry=entry, clear_cache=True + ) # remove device cache + super().delete() + return self class Company(ServiceNowCRUDMixin, DiffSyncModel): @@ -96,7 +118,7 @@ class Company(ServiceNowCRUDMixin, DiffSyncModel): } name: str - manufacturer: bool = False + manufacturer: bool = True product_models: List["ProductModel"] = [] @@ -110,7 +132,7 @@ class ProductModel(ServiceNowCRUDMixin, DiffSyncModel): _modelname = "product_model" _identifiers = ("manufacturer_name", "model_name", "model_number") - manufacturer_name: Optional[str] # some ServiceNow products have no associated manufacturer? + manufacturer_name: str # Nautobot has only one combined "model" field, but ServiceNow has both name and number model_name: str model_number: str @@ -196,8 +218,6 @@ def create(cls, diffsync, ids, attrs): return model - # TODO delete() method - class Interface(ServiceNowCRUDMixin, DiffSyncModel): """ServiceNow Interface model.""" @@ -253,8 +273,6 @@ def create(cls, diffsync, ids, attrs): model = super().create(diffsync, ids=ids, attrs=attrs) return model - # TODO delete() method - class IPAddress(ServiceNowCRUDMixin, DiffSyncModel): """An IPv4 or IPv6 address.""" diff --git a/nautobot_ssot/integrations/servicenow/jobs.py b/nautobot_ssot/integrations/servicenow/jobs.py index e69da0c4b..f13fb3706 100644 --- a/nautobot_ssot/integrations/servicenow/jobs.py +++ b/nautobot_ssot/integrations/servicenow/jobs.py @@ -22,11 +22,7 @@ class ServiceNowDataTarget(DataTarget, Job): # pylint: disable=abstract-method debug = BooleanVar(description="Enable for more verbose logging.") - # TODO: not yet implemented - # delete_records = BooleanVar( - # description="Delete records from ServiceNow if not present in Nautobot", - # default=False, - # ) + delete_records = BooleanVar(description="Delete synced records from ServiceNow if not present in Nautobot") site_filter = ObjectVar( description="Only sync records belonging to a single Site.", diff --git a/nautobot_ssot/jobs/__init__.py b/nautobot_ssot/jobs/__init__.py index fce386eae..d6765371e 100644 --- a/nautobot_ssot/jobs/__init__.py +++ b/nautobot_ssot/jobs/__init__.py @@ -3,13 +3,15 @@ from django.conf import settings from nautobot.core.celery import register_jobs +from nautobot.core.settings_funcs import is_truthy from nautobot.extras.models import Job from nautobot_ssot.integrations.utils import each_enabled_integration_module from nautobot_ssot.jobs.base import DataSource, DataTarget from nautobot_ssot.jobs.examples import ExampleDataSource, ExampleDataTarget from nautobot_ssot.utils import logger -if settings.PLUGINS_CONFIG["nautobot_ssot"]["hide_example_jobs"]: +hide_jobs_setting = settings.PLUGINS_CONFIG["nautobot_ssot"].get("hide_example_jobs", False) +if is_truthy(hide_jobs_setting): jobs = [] else: jobs = [ExampleDataSource, ExampleDataTarget] diff --git a/nautobot_ssot/jobs/examples.py b/nautobot_ssot/jobs/examples.py index a3164fc23..3362e5900 100644 --- a/nautobot_ssot/jobs/examples.py +++ b/nautobot_ssot/jobs/examples.py @@ -5,20 +5,25 @@ from typing import Optional, Mapping, List from uuid import UUID +from django.contrib.contenttypes.models import ContentType from django.templatetags.static import static from django.urls import reverse -from nautobot.dcim.models import Location, LocationType -from nautobot.extras.jobs import StringVar +from nautobot.dcim.models import Device, DeviceType, Location, LocationType, Manufacturer, Platform +from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices +from nautobot.extras.jobs import ObjectVar, StringVar +from nautobot.extras.models import ExternalIntegration, Role from nautobot.ipam.models import Prefix from nautobot.tenancy.models import Tenant from diffsync import DiffSync from diffsync.enum import DiffSyncFlags +from diffsync.exceptions import ObjectNotFound import requests from nautobot_ssot.contrib import NautobotModel, NautobotAdapter +from nautobot_ssot.tests.contrib_base_classes import ContentTypeDict from nautobot_ssot.jobs.base import DataMapping, DataSource, DataTarget @@ -35,12 +40,14 @@ class LocationTypeModel(NautobotModel): _modelname = "locationtype" _identifiers = ("name",) # To keep this example simple, we don't include **all** attributes of a Location here. But you could! - _attributes = ("description", "nestable") + _attributes = ("content_types", "description", "nestable", "parent__name") # Data type declarations for all identifiers and attributes name: str description: str nestable: bool + parent__name: Optional[str] + content_types: List[ContentTypeDict] = [] # Not in _attributes or _identifiers, hence not included in diff calculations pk: Optional[UUID] @@ -54,31 +61,57 @@ class LocationModel(NautobotModel): _modelname = "location" _identifiers = ("name",) # To keep this example simple, we don't include **all** attributes of a Location here. But you could! - _attributes = ("location_type__name", "status__name", "parent__name", "description") + _attributes = ( + "location_type__name", + "status__name", + "parent__name", + "parent__location_type__name", + "tenant__name", + "description", + ) # Data type declarations for all identifiers and attributes name: str location_type__name: str status__name: str parent__name: Optional[str] + parent__location_type__name: Optional[str] + tenant__name: Optional[str] description: str # Not in _attributes or _identifiers, hence not included in diff calculations pk: Optional[UUID] +class RoleModel(NautobotModel): + """Shared data model representing a Role in either of the local or remote Nautobot instances.""" + + # Metadata about this model + _model = Role + _modelname = "role" + _identifiers = ("name",) + _attributes = ("content_types",) + + name: str + content_types: List[ContentTypeDict] = [] + + # Not in _attributes or _identifiers, hence not included in diff calculations + pk: Optional[UUID] + + class PrefixModel(NautobotModel): """Shared data model representing a Prefix in either of the local or remote Nautobot instances.""" # Metadata about this model _model = Prefix _modelname = "prefix" - _identifiers = ("prefix", "tenant__name") + _identifiers = ("network", "prefix_length", "tenant__name") # To keep this example simple, we don't include **all** attributes of a Prefix here. But you could! _attributes = ("description", "status__name") # Data type declarations for all identifiers and attributes - prefix: str + network: str + prefix_length: int tenant__name: Optional[str] status__name: str description: str @@ -94,7 +127,7 @@ class TenantModel(NautobotModel): _model = Tenant _modelname = "tenant" _identifiers = ("name",) - _children = {"prefix": "prefixes"} + _children = {} name: str prefixes: List[PrefixModel] = [] @@ -102,6 +135,92 @@ class TenantModel(NautobotModel): pk: Optional[UUID] +class DeviceTypeModel(NautobotModel): + """Shared data model representing a DeviceType in either of the local or remote Nautobot instances.""" + + _model = DeviceType + _modelname = "device_type" + _identifiers = ("model", "manufacturer__name") + _attributes = ("part_number", "u_height", "is_full_depth") + + model: str + manufacturer__name: str + part_number: str + u_height: int + is_full_depth: bool + + # Not in _attributes or _identifiers, hence not included in diff calculations + pk: Optional[UUID] + + +class ManufacturerModel(NautobotModel): + """Shared data model representing a Manufacturer in either of the local or remote Nautobot instances.""" + + _model = Manufacturer + _modelname = "manufacturer" + _identifiers = ("name",) + _attributes = ("description",) + _children = {"device_type": "device_types"} + + name: str + description: str + device_types: List[DeviceTypeModel] = [] + + # Not in _attributes or _identifiers, hence not included in diff calculations + pk: Optional[UUID] + + +class PlatformModel(NautobotModel): + """Shared data model representing a Platform in either of the local or remote Nautobot instances.""" + + _model = Platform + _modelname = "platform" + _identifiers = ("name", "manufacturer__name") + _attributes = ("description", "network_driver", "napalm_driver") + + name: str + manufacturer__name: str + description: str + network_driver: str + napalm_driver: str + + +class DeviceModel(NautobotModel): + """Shared data model representing a Device in either of the local or remote Nautobot instances.""" + + # Metadata about this model + _model = Device + _modelname = "device" + _identifiers = ("name", "location__name", "location__parent__name") + _attributes = ( + "location__location_type__name", + "location__parent__location_type__name", + "device_type__manufacturer__name", + "device_type__model", + "platform__name", + "role__name", + "serial", + "status__name", + "tenant__name", + "asset_tag", + ) + # _children = {"interface": "interfaces"} + + name: str + location__name: str + location__location_type__name: str + location__parent__name: Optional[str] + location__parent__location_type__name: Optional[str] + device_type__manufacturer__name: str + device_type__model: str + platform__name: Optional[str] + role__name: str + serial: str + status__name: str + tenant__name: Optional[str] + asset_tag: Optional[str] + + class LocationRemoteModel(LocationModel): """Implementation of Location create/update/delete methods for updating remote Nautobot data.""" @@ -135,7 +254,7 @@ def update(self, attrs): data = {} if "description" in attrs: data["description"] = attrs["description"] - if "status" in attrs: + if "status__name" in attrs: data["status"] = attrs["status__name"] if "parent__name" in attrs: if attrs["parent__name"]: @@ -190,7 +309,8 @@ def create(cls, diffsync, ids, attrs): diffsync.post( "/api/ipam/prefixes/", { - "prefix": ids["prefix"], + "network": ids["network"], + "prefix_length": ids["prefix_length"], "tenant": {"name": ids["tenant__name"]} if ids["tenant__name"] else None, "description": attrs["description"], "status": attrs["status__name"], @@ -207,7 +327,7 @@ def update(self, attrs): data = {} if "description" in attrs: data["description"] = attrs["description"] - if "status" in attrs: + if "status__name" in attrs: data["status"] = attrs["status__name"] self.diffsync.patch(f"/api/dcim/locations/{self.pk}/", data) return super().update(attrs) @@ -233,9 +353,14 @@ class NautobotRemote(DiffSync): location = LocationRemoteModel tenant = TenantRemoteModel prefix = PrefixRemoteModel + manufacturer = ManufacturerModel + device_type = DeviceTypeModel + platform = PlatformModel + role = RoleModel + device = DeviceModel # Top-level class labels, i.e. those classes that are handled directly rather than as children of other models - top_level = ["locationtype", "location", "tenant"] + top_level = ["tenant", "locationtype", "location", "manufacturer", "platform", "role", "device"] def __init__(self, *args, url=None, token=None, job=None, **kwargs): """Instantiate this class, but do not load data immediately from the remote system. @@ -268,34 +393,76 @@ def _get_api_data(self, url_path: str) -> Mapping: return result_data def load(self): - """Load Region and Site data from the remote Nautobot instance.""" - for lt_entry in self._get_api_data("api/dcim/location-types/"): + """Load data from the remote Nautobot instance.""" + self.load_location_types() + self.load_locations() + self.load_roles() + self.load_tenants() + self.load_prefixes() + self.load_manufacturers() + self.load_device_types() + self.load_platforms() + self.load_devices() + + def load_location_types(self): + """Load LocationType data from the remote Nautobot instance.""" + for lt_entry in self._get_api_data("api/dcim/location-types/?depth=1"): + content_types = self.get_content_types(lt_entry) location_type = self.locationtype( name=lt_entry["name"], description=lt_entry["description"], nestable=lt_entry["nestable"], + parent__name=lt_entry["parent"]["name"] if lt_entry.get("parent") else None, + content_types=content_types, pk=lt_entry["id"], ) self.add(location_type) self.job.logger.debug(f"Loaded {location_type} LocationType from remote Nautobot instance") - for loc_entry in self._get_api_data("api/dcim/locations/?depth=1"): + def load_locations(self): + """Load Locations data from the remote Nautobot instance.""" + for loc_entry in self._get_api_data("api/dcim/locations/?depth=3"): location_args = { "name": loc_entry["name"], "status__name": loc_entry["status"]["name"] if loc_entry["status"].get("name") else "Active", "location_type__name": loc_entry["location_type"]["name"], + "tenant__name": loc_entry["tenant"]["name"] if loc_entry.get("tenant") else None, "description": loc_entry["description"], "pk": loc_entry["id"], } if loc_entry["parent"]: - location_args["parent_name"] = loc_entry["parent"]["name"] + location_args["parent__name"] = loc_entry["parent"]["name"] + location_args["parent__location_type__name"] = loc_entry["parent"]["location_type"]["name"] new_location = self.location(**location_args) self.add(new_location) self.job.logger.debug(f"Loaded {new_location} Location from remote Nautobot instance") + def load_roles(self): + """Load Roles data from the remote Nautobot instance.""" + for role_entry in self._get_api_data("api/extras/roles/?depth=1"): + content_types = self.get_content_types(role_entry) + role = self.role( + name=role_entry["name"], + content_types=content_types, + pk=role_entry["id"], + ) + self.add(role) + + def load_tenants(self): + """Load Tenants data from the remote Nautobot instance.""" + for tenant_entry in self._get_api_data("api/tenancy/tenants/?depth=1"): + tenant = self.tenant( + name=tenant_entry["name"], + pk=tenant_entry["id"], + ) + self.add(tenant) + + def load_prefixes(self): + """Load Prefixes data from the remote Nautobot instance.""" for prefix_entry in self._get_api_data("api/ipam/prefixes/?depth=1"): prefix = self.prefix( - prefix=prefix_entry["prefix"], + network=prefix_entry["network"], + prefix_length=prefix_entry["prefix_length"], description=prefix_entry["description"], status__name=prefix_entry["status"]["name"] if prefix_entry["status"].get("name") else "Active", tenant__name=prefix_entry["tenant"]["name"] if prefix_entry["tenant"] else "", @@ -304,6 +471,91 @@ def load(self): self.add(prefix) self.job.logger.debug(f"Loaded {prefix} from remote Nautobot instance") + def load_manufacturers(self): + """Load Manufacturers data from the remote Nautobot instance.""" + for manufacturer in self._get_api_data("api/dcim/manufacturers/?depth=1"): + manufacturer = self.manufacturer( + name=manufacturer["name"], + description=manufacturer["description"], + pk=manufacturer["id"], + ) + self.add(manufacturer) + + def load_device_types(self): + """Load DeviceTypes data from the remote Nautobot instance.""" + for device_type in self._get_api_data("api/dcim/device-types/?depth=1"): + try: + manufacturer = self.get(self.manufacturer, device_type["manufacturer"]["name"]) + devicetype = self.device_type( + model=device_type["model"], + manufacturer__name=device_type["manufacturer"]["name"], + part_number=device_type["part_number"], + u_height=device_type["u_height"], + is_full_depth=device_type["is_full_depth"], + pk=device_type["id"], + ) + self.add(devicetype) + manufacturer.add_child(devicetype) + except ObjectNotFound: + self.job.logger.debug(f"Unable to find Manufacturer {device_type['manufacturer']['name']}") + + def load_platforms(self): + """Load Platforms data from the remote Nautobot instance.""" + for platform in self._get_api_data("api/dcim/platforms/?depth=1"): + platform = self.platform( + name=platform["name"], + manufacturer__name=platform["manufacturer"]["name"], + description=platform["description"], + network_driver=platform["network_driver"], + napalm_driver=platform["napalm_driver"], + pk=platform["id"], + ) + self.add(platform) + + def load_devices(self): + """Load Devices data from the remote Nautobot instance.""" + for device in self._get_api_data("api/dcim/devices/?depth=3"): + device = self.device( + name=device["name"], + location__name=device["location"]["name"], + location__parent__name=( + device["location"]["parent"]["name"] if device["location"].get("parent") else None + ), + location__parent__location_type__name=( + device["location"]["parent"]["location_type"]["name"] if device["location"].get("parent") else None + ), + location__location_type__name=device["location"]["location_type"]["name"], + device_type__manufacturer__name=device["device_type"]["manufacturer"]["name"], + device_type__model=device["device_type"]["model"], + platform__name=device["platform"]["name"] if device.get("platform") else None, + role__name=device["role"]["name"], + asset_tag=device["asset_tag"] if device.get("asset_tag") else None, + serial=device["serial"] if device.get("serial") else "", + status__name=device["status"]["name"], + tenant__name=device["tenant"]["name"] if device.get("tenant") else None, + pk=device["id"], + ) + self.add(device) + + def get_content_types(self, entry): + """Create list of dicts of ContentTypes. + + Args: + entry (dict): Record from Nautobot. + + Returns: + List[dict]: List of dictionaries of ContentTypes split into app_label and model. + """ + content_types = [] + for contenttype in entry["content_types"]: + app_label, model = tuple(contenttype.split(".")) + try: + ContentType.objects.get(app_label=app_label, model=model) + content_types.append({"app_label": app_label, "model": model}) + except ContentType.DoesNotExist: + pass + return content_types + def post(self, path, data): """Send an appropriately constructed HTTP POST request.""" response = requests.post(f"{self.url}{path}", headers=self.headers, json=data, timeout=60) @@ -331,44 +583,14 @@ class NautobotLocal(NautobotAdapter): location = LocationModel tenant = TenantModel prefix = PrefixModel + manufacturer = ManufacturerModel + device_type = DeviceTypeModel + platform = PlatformModel + role = RoleModel + device = DeviceModel # Top-level class labels, i.e. those classes that are handled directly rather than as children of other models - top_level = ["locationtype", "location", "prefix"] - - def load(self): - """Load LocationType and Location data from the local Nautobot instance.""" - for loc_type in LocationType.objects.all(): - new_lt = self.locationtype( - name=loc_type.name, - description=loc_type.description, - nestable=loc_type.nestable, - pk=loc_type.pk, - ) - self.add(new_lt) - self.job.logger.debug(f"Loaded {new_lt} LocationType from local Nautobot instance") - - for location in Location.objects.all(): - loc_model = self.location( - name=location.name, - status=location.status.name, - location_type=location.location_type.name, - parent__name=location.parent.name if location.parent else "", - description=location.description, - pk=location.pk, - ) - self.add(loc_model) - self.job.logger.debug(f"Loaded {loc_model} Location from local Nautobot instance") - - for prefix in Prefix.objects.all(): - prefix_model = self.prefix( - prefix=str(prefix.prefix), - description=prefix.description, - status=prefix.status.name, - tenant__name=prefix.tenant.name if prefix.tenant else "", - pk=prefix.pk, - ) - self.add(prefix_model) - self.job.logger.debug(f"Loaded {prefix_model} from local Nautobot instance") + top_level = ["tenant", "locationtype", "location", "manufacturer", "platform", "role", "device"] # The actual Data Source and Data Target Jobs are relatively simple to implement @@ -378,6 +600,12 @@ def load(self): class ExampleDataSource(DataSource): """Sync Region and Site data from a remote Nautobot instance into the local Nautobot instance.""" + source = ObjectVar( + model=ExternalIntegration, + queryset=ExternalIntegration.objects.all(), + display_field="display", + label="Nautobot Demo Instance", + ) source_url = StringVar( description="Remote Nautobot instance to load Sites and Regions from", default="https://demo.nautobot.com" ) @@ -387,7 +615,7 @@ def __init__(self): """Initialize ExampleDataSource.""" super().__init__() self.diffsync_flags = ( - self.diffsync_flags | DiffSyncFlags.SKIP_UNMATCHED_DST # pylint:disable=unsupported-binary-operation + self.diffsync_flags | DiffSyncFlags.SKIP_UNMATCHED_DST # pylint: disable=unsupported-binary-operation ) class Meta: @@ -405,16 +633,39 @@ def data_mappings(cls): DataMapping("Region (remote)", None, "Region (local)", reverse("dcim:location_list")), DataMapping("Site (remote)", None, "Site (local)", reverse("dcim:location_list")), DataMapping("Prefix (remote)", None, "Prefix (local)", reverse("ipam:prefix_list")), + DataMapping("Tenant (remote)", None, "Tenant (local)", reverse("tenancy:tenant_list")), ) - def run( - self, dryrun, memory_profiling, source_url, source_token, *args, **kwargs - ): # pylint:disable=arguments-differ + def run( # pylint: disable=too-many-arguments, arguments-differ + self, + dryrun, + memory_profiling, + source, + source_url, + source_token, + *args, + **kwargs, + ): """Run sync.""" self.dryrun = dryrun self.memory_profiling = memory_profiling - self.source_url = source_url - self.source_token = source_token + try: + if source: + self.logger.info(f"Using external integration '{source}'") + self.source_url = source.remote_url + secrets_group = source.secrets_group + self.source_token = secrets_group.get_secret_value( + access_type=SecretsGroupAccessTypeChoices.TYPE_HTTP, + secret_type=SecretsGroupSecretTypeChoices.TYPE_TOKEN, + ) + else: + self.source_url = source_url + self.source_token = source_token + except Exception as error: + # TBD: Why are these exceptions swallowed? + self.logger.error("Error setting up job: %s", error) + raise + super().run(dryrun, memory_profiling, *args, **kwargs) def load_source_adapter(self): @@ -451,6 +702,11 @@ def lookup_object(self, model_name, unique_id): ) except Prefix.DoesNotExist: pass + elif model_name == "tenant": + try: + return Tenant.objects.get(name=unique_id) + except Tenant.DoesNotExist: + pass return None @@ -482,6 +738,7 @@ def data_mappings(cls): DataMapping("Region (local)", reverse("dcim:location_list"), "Region (remote)", None), DataMapping("Site (local)", reverse("dcim:location_list"), "Site (remote)", None), DataMapping("Prefix (local)", reverse("ipam:prefix_list"), "Prefix (remote)", None), + DataMapping("Tenant (local)", reverse("tenancy:tenant_list"), "Tenant (remote)", None), ) def load_source_adapter(self): @@ -517,4 +774,9 @@ def lookup_object(self, model_name, unique_id): ) except Prefix.DoesNotExist: pass + elif model_name == "tenant": + try: + return Tenant.objects.get(name=unique_id) + except Tenant.DoesNotExist: + pass return None diff --git a/nautobot_ssot/tests/aristacv/test_cloudvision_adapter.py b/nautobot_ssot/tests/aristacv/test_cloudvision_adapter.py index e2229e418..9c64bd92b 100644 --- a/nautobot_ssot/tests/aristacv/test_cloudvision_adapter.py +++ b/nautobot_ssot/tests/aristacv/test_cloudvision_adapter.py @@ -1,9 +1,10 @@ -"""Unit tests for the Cloudvision DiffSync adapter class.""" +"""Unit tests for the CloudVision DiffSync adapter class.""" import ipaddress from unittest.mock import MagicMock, patch -from nautobot.extras.models import JobResult from nautobot.core.testing import TransactionTestCase +from nautobot.extras.models import JobResult + from nautobot_ssot.integrations.aristacv.diffsync.adapters.cloudvision import ( CloudvisionAdapter, ) @@ -49,12 +50,10 @@ def setUp(self): ) self.cvp = CloudvisionAdapter(job=self.job, conn=self.client) - @patch.dict( - "nautobot_ssot.integrations.aristacv.constant.APP_SETTINGS", - {"aristacv_create_controller": False}, - ) def test_load_devices(self): """Test the load_devices() adapter method.""" + # Update config namedtuple `create_controller` to False + self.job.app_config = self.job.app_config._replace(create_controller=False) with patch( "nautobot_ssot.integrations.aristacv.utils.cloudvision.get_devices", self.cloudvision.get_devices, diff --git a/nautobot_ssot/tests/aristacv/test_jobs.py b/nautobot_ssot/tests/aristacv/test_jobs.py index ab89b4f48..b3bb87ce3 100644 --- a/nautobot_ssot/tests/aristacv/test_jobs.py +++ b/nautobot_ssot/tests/aristacv/test_jobs.py @@ -1,6 +1,5 @@ -"""Test Cloudvision Jobs.""" -from unittest.mock import patch - +"""Test CloudVision Jobs.""" +from django.test import override_settings from django.urls import reverse from nautobot.core.testing import TestCase @@ -8,13 +7,13 @@ class CloudVisionDataSourceJobTest(TestCase): - """Test the Cloudvision DataSource Job.""" + """Test the CloudVision DataSource Job.""" def test_metadata(self): """Verify correctness of the Job Meta attributes.""" self.assertEqual("CloudVision ⟹ Nautobot", jobs.CloudVisionDataSource.name) self.assertEqual("CloudVision ⟹ Nautobot", jobs.CloudVisionDataSource.Meta.name) - self.assertEqual("Cloudvision", jobs.CloudVisionDataSource.data_source) + self.assertEqual("CloudVision", jobs.CloudVisionDataSource.data_source) self.assertEqual("Sync system tag data from CloudVision to Nautobot", jobs.CloudVisionDataSource.description) def test_data_mapping(self): # pylint: disable=too-many-statements @@ -101,53 +100,55 @@ def test_data_mapping(self): # pylint: disable=too-many-statements self.assertEqual("Topology Type", mappings[15].target_name) self.assertIsNone(mappings[15].target_url) - @patch.dict( - "nautobot_ssot.integrations.aristacv.constant.APP_SETTINGS", - { - "aristacv_cvp_host": "https://localhost", - "aristacv_cvp_user": "admin", - "aristacv_verify": True, - "aristacv_delete_devices_on_sync": True, - "aristacv_from_cloudvision_default_site": "HQ", - "aristacv_from_cloudvision_default_device_role": "Router", - "aristacv_from_cloudvision_default_device_role_color": "ff0000", - "aristacv_apply_import_tag": True, - "aristacv_import_active": True, + @override_settings( + PLUGINS_CONFIG={ + "nautobot_ssot": { + "aristacv_cvp_host": "https://localhost", + "aristacv_cvp_user": "admin", + "aristacv_verify": True, + "aristacv_delete_devices_on_sync": True, + "aristacv_from_cloudvision_default_site": "HQ", + "aristacv_from_cloudvision_default_device_role": "Router", + "aristacv_from_cloudvision_default_device_role_color": "ff0000", + "aristacv_apply_import_tag": True, + "aristacv_import_active": True, + }, }, ) def test_config_information_on_prem(self): """Verify the config_information() API for on-prem.""" config_information = jobs.CloudVisionDataSource.config_information() - self.assertEqual(config_information["Server type"], "On prem") - self.assertEqual(config_information["CloudVision host"], "https://localhost") - self.assertEqual(config_information["Username"], "admin") - self.assertEqual(config_information["Verify"], "True") - self.assertEqual(config_information["Delete devices on sync"], True) - self.assertEqual(config_information["New device default site"], "HQ") - self.assertEqual(config_information["New device default role"], "Router") - self.assertEqual(config_information["New device default role color"], "ff0000") - self.assertEqual(config_information["Apply import tag"], "True") + self.assertEqual(config_information["Server Type"], "On prem") + self.assertEqual(config_information["CloudVision URL"], "https://localhost:443") + self.assertEqual(config_information["Verify SSL"], "True") + self.assertEqual(config_information["User Name"], "admin") + self.assertEqual(config_information["Delete Devices On Sync"], True) + self.assertEqual(config_information["New Device Default Site"], "HQ") + self.assertEqual(config_information["New Device Default Role"], "Router") + self.assertEqual(config_information["New Device Default Role Color"], "ff0000") + self.assertEqual(config_information["Apply Import Tag"], "True") self.assertEqual(config_information["Import Active"], "True") - @patch.dict( - "nautobot_ssot.integrations.aristacv.constant.APP_SETTINGS", - { - "aristacv_cvaas_url": "https://www.arista.io", - "aristacv_cvp_user": "admin", + @override_settings( + PLUGINS_CONFIG={ + "nautobot_ssot": { + "aristacv_cvaas_url": "https://www.arista.io", + "aristacv_cvp_user": "admin", + }, }, ) def test_config_information_cvaas(self): """Verify the config_information() API for CVaaS.""" config_information = jobs.CloudVisionDataSource.config_information() - self.assertEqual(config_information["Server type"], "CVaaS") - self.assertEqual(config_information["CloudVision host"], "https://www.arista.io") - self.assertEqual(config_information["Username"], "admin") + self.assertEqual(config_information["Server Type"], "CVaaS") + self.assertEqual(config_information["CloudVision URL"], "https://www.arista.io:443") + self.assertEqual(config_information["User Name"], "admin") class CloudVisionDataTargetJobTest(TestCase): - """Test the Cloudvision DataTarget Job.""" + """Test the CloudVision DataTarget Job.""" def test_metadata(self): """Verify correctness of the Job Meta attributes.""" diff --git a/nautobot_ssot/tests/aristacv/test_utils_cloudvision.py b/nautobot_ssot/tests/aristacv/test_utils_cloudvision.py index 64dddc022..fe325032f 100644 --- a/nautobot_ssot/tests/aristacv/test_utils_cloudvision.py +++ b/nautobot_ssot/tests/aristacv/test_utils_cloudvision.py @@ -1,48 +1,62 @@ -"""Tests of Cloudvision utility methods.""" -from unittest.mock import MagicMock, patch -from parameterized import parameterized +"""Tests of CloudVision utility methods.""" + +from unittest.mock import MagicMock +from unittest.mock import patch -from nautobot.core.testing import TestCase from cloudvision.Connector.codec.custom_types import FrozenDict +from django.test import override_settings +from nautobot.core.testing import TestCase +from parameterized import parameterized from nautobot_ssot.integrations.aristacv.utils import cloudvision +from nautobot_ssot.integrations.aristacv.utils.nautobot import get_config from nautobot_ssot.tests.aristacv.fixtures import fixtures class TestCloudvisionApi(TestCase): - """Test Cloudvision Api client and methods.""" + """Test CloudVision Api client and methods.""" databases = ("default", "job_logs") + @override_settings( + PLUGINS_CONFIG={ + "nautobot_ssot": { + "aristacv_cvp_host": "localhost", + "aristacv_verify": True, + }, + }, + ) def test_auth_failure_exception(self): """Test that AuthFailure is thrown when no credentials are passed.""" + config = get_config() with self.assertRaises(cloudvision.AuthFailure): - cloudvision.CloudvisionApi(cvp_host="https://localhost", username="", password="", verify=True) # nosec - - @patch.dict( - "nautobot_ssot.integrations.aristacv.constant.APP_SETTINGS", - {"aristacv_cvaas_url": "www.arista.io:443"}, + cloudvision.CloudvisionApi(config) # nosec + + @override_settings( + PLUGINS_CONFIG={ + "nautobot_ssot": { + "aristacv_cvaas_url": "www.arista.io:443", + "aristacv_cvp_token": "1234567890abcdef", + }, + }, ) def test_auth_cvass_with_token(self): """Test that authentication against CVaaS with token works.""" - client = cloudvision.CloudvisionApi(cvp_host=None, cvp_token="1234567890abcdef") # nosec - self.assertEqual(client.cvp_url, "www.arista.io:443") - self.assertEqual(client.cvp_token, "1234567890abcdef") + config = get_config() + cloudvision.CloudvisionApi(config) + self.assertEqual(config.url, "https://www.arista.io:443") + self.assertEqual(config.token, "1234567890abcdef") class TestCloudvisionUtils(TestCase): - """Test Cloudvision utility methods.""" + """Test CloudVision utility methods.""" databases = ("default", "job_logs") def setUp(self): - """Setup mock Cloudvision client.""" + """Setup mock CloudVision client.""" self.client = MagicMock() - @patch.dict( - "nautobot_ssot.integrations.aristacv.constant.APP_SETTINGS", - {"aristacv_import_active": False}, - ) def test_get_all_devices(self): """Test get_devices function for active and inactive devices.""" device1 = MagicMock() @@ -69,14 +83,10 @@ def test_get_all_devices(self): device_svc_stub.DeviceServiceStub.return_value.GetAll.return_value = device_list with patch("nautobot_ssot.integrations.aristacv.utils.cloudvision.services", device_svc_stub): - results = cloudvision.get_devices(client=self.client) + results = cloudvision.get_devices(client=self.client, import_active=False) expected = fixtures.DEVICE_FIXTURE self.assertEqual(results, expected) - @patch.dict( - "nautobot_ssot.integrations.aristacv.constant.APP_SETTINGS", - {"aristacv_import_active": True}, - ) def test_get_active_devices(self): """Test get_devices function for active devices.""" device1 = MagicMock() @@ -94,7 +104,7 @@ def test_get_active_devices(self): device_svc_stub.DeviceServiceStub.return_value.GetAll.return_value = device_list with patch("nautobot_ssot.integrations.aristacv.utils.cloudvision.services", device_svc_stub): - results = cloudvision.get_devices(client=self.client) + results = cloudvision.get_devices(client=self.client, import_active=True) expected = [ { "device_id": "JPE12345678", diff --git a/nautobot_ssot/tests/aristacv/test_utils_nautobot.py b/nautobot_ssot/tests/aristacv/test_utils_nautobot.py index 070169d05..61cc3a0dd 100644 --- a/nautobot_ssot/tests/aristacv/test_utils_nautobot.py +++ b/nautobot_ssot/tests/aristacv/test_utils_nautobot.py @@ -1,9 +1,12 @@ -"""Tests of Cloudvision utility methods.""" +"""Tests of CloudVision utility methods.""" from unittest import skip from unittest.mock import MagicMock, patch + +from django.test import override_settings +from nautobot.core.testing import TestCase from nautobot.dcim.models import DeviceType, Location, LocationType, Manufacturer from nautobot.extras.models import Relationship, Role, Status, Tag -from nautobot.core.testing import TestCase + from nautobot_ssot.integrations.aristacv.utils import nautobot @@ -102,99 +105,53 @@ def test_get_device_version_dlc_exception(self): result = nautobot.get_device_version(mock_device) self.assertEqual(result, "1.0") - @patch.dict( - "nautobot_ssot.integrations.aristacv.constant.APP_SETTINGS", - { - "aristacv_hostname_patterns": [r"(?P\w{2,3}\d+)-(?P\w+)-\d+"], - "aristacv_site_mappings": {"ams01": "Amsterdam"}, - "aristacv_role_mappings": {"leaf": "leaf"}, + @override_settings( + PLUGINS_CONFIG={ + "nautobot_ssot": { + "aristacv_hostname_patterns": [r"(?P\w{2,3}\d+)-(?P\w+)-\d+"], + "aristacv_site_mappings": {"ams01": "Amsterdam"}, + "aristacv_role_mappings": {"leaf": "leaf"}, + }, }, ) def test_parse_hostname(self): """Test the parse_hostname method.""" + config = nautobot.get_config() host = "ams01-leaf-01" - results = nautobot.parse_hostname(host) + results = nautobot.parse_hostname(host, config.hostname_patterns) expected = ("ams01", "leaf") self.assertEqual(results, expected) - @patch.dict( - "nautobot_ssot.integrations.aristacv.constant.APP_SETTINGS", - { - "aristacv_hostname_patterns": [r"(?P\w{2,3}\d+)-.+-\d+"], - "aristacv_site_mappings": {"ams01": "Amsterdam"}, - "aristacv_role_mappings": {}, + @override_settings( + PLUGINS_CONFIG={ + "nautobot_ssot": { + "aristacv_hostname_patterns": [r"(?P\w{2,3}\d+)-.+-\d+"], + "aristacv_site_mappings": {"ams01": "Amsterdam"}, + "aristacv_role_mappings": {}, + }, }, ) def test_parse_hostname_only_site(self): """Test the parse_hostname method with only site specified.""" + config = nautobot.get_config() host = "ams01-leaf-01" - results = nautobot.parse_hostname(host) + results = nautobot.parse_hostname(host, config.hostname_patterns) expected = ("ams01", None) self.assertEqual(results, expected) - @patch.dict( - "nautobot_ssot.integrations.aristacv.constant.APP_SETTINGS", - { - "aristacv_hostname_patterns": [r".+-(?P\w+)-\d+"], - "aristacv_site_mappings": {}, - "aristacv_role_mappings": {"leaf": "leaf"}, + @override_settings( + PLUGINS_CONFIG={ + "nautobot_ssot": { + "aristacv_hostname_patterns": [r".+-(?P\w+)-\d+"], + "aristacv_site_mappings": {}, + "aristacv_role_mappings": {"leaf": "leaf"}, + }, }, ) def test_parse_hostname_only_role(self): """Test the parse_hostname method with only role specified.""" + config = nautobot.get_config() host = "ams01-leaf-01" - results = nautobot.parse_hostname(host) + results = nautobot.parse_hostname(host, config.hostname_patterns) expected = (None, "leaf") self.assertEqual(results, expected) - - @patch.dict( - "nautobot_ssot.integrations.aristacv.constant.APP_SETTINGS", - { - "aristacv_hostname_patterns": [r"(?P\w{2,3}\d+)-(?P\w+)-\d+"], - "aristacv_site_mappings": {"ams01": "Amsterdam"}, - }, - ) - def test_get_site_from_map_success(self): - """Test the get_site_from_map method with response.""" - results = nautobot.get_site_from_map("ams01") - expected = "Amsterdam" - self.assertEqual(results, expected) - - @patch.dict( - "nautobot_ssot.integrations.aristacv.constant.APP_SETTINGS", - { - "aristacv_hostname_patterns": [r"(?P\w{2,3}\d+)-(?P\w+)-\d+"], - "aristacv_site_mappings": {}, - }, - ) - def test_get_site_from_map_fail(self): - """Test the get_site_from_map method with failed response.""" - results = nautobot.get_site_from_map("dc01") - expected = None - self.assertEqual(results, expected) - - @patch.dict( - "nautobot_ssot.integrations.aristacv.constant.APP_SETTINGS", - { - "aristacv_hostname_patterns": [r"(?P\w{2,3}\d+)-(?P\w+)-\d+"], - "aristacv_role_mappings": {"edge": "Edge Router"}, - }, - ) - def test_get_role_from_map_success(self): - """Test the get_role_from_map method with response.""" - results = nautobot.get_role_from_map("edge") - expected = "Edge Router" - self.assertEqual(results, expected) - - @patch.dict( - "nautobot_ssot.integrations.aristacv.constant.APP_SETTINGS", - { - "aristacv_hostname_patterns": [r"(?P\w{2,3}\d+)-(?P\w+)-\d+"], - "aristacv_role_mappings": {}, - }, - ) - def test_get_role_from_map_fail(self): - """Test the get_role_from_map method with failed response.""" - results = nautobot.get_role_from_map("rtr") - expected = None - self.assertEqual(results, expected) diff --git a/nautobot_ssot/tests/infoblox/test_tags_and_cfs.py b/nautobot_ssot/tests/infoblox/test_tags_and_cfs.py new file mode 100644 index 000000000..67394b814 --- /dev/null +++ b/nautobot_ssot/tests/infoblox/test_tags_and_cfs.py @@ -0,0 +1,193 @@ +"""Tests covering use of tags and custom fields in the plugin.""" +import datetime +from unittest.mock import Mock + +from django.contrib.contenttypes.models import ContentType +from django.test import TestCase +from nautobot.extras.models import CustomField, Status, Tag +from nautobot.ipam.models import VLAN, IPAddress, Prefix, VLANGroup + +from nautobot_ssot.integrations.infoblox.diffsync.adapters.infoblox import InfobloxAdapter +from nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot import NautobotAdapter + + +class TestTagging(TestCase): + """Tests ensuring tagging is applied to objects synced from and to Infoblox.""" + + def setUp(self): + "Test class set up." + self.tag_sync_from_infoblox = Tag.objects.get(name="SSoT Synced from Infoblox") + self.tag_sync_to_infoblox = Tag.objects.get(name="SSoT Synced to Infoblox") + + def test_tags_have_correct_content_types_set(self): + """Ensure tags have correct content types configured.""" + for model in (IPAddress, Prefix, VLAN): + content_type = ContentType.objects.get_for_model(model) + self.assertIn(content_type, self.tag_sync_from_infoblox.content_types.all()) + self.assertIn(content_type, self.tag_sync_to_infoblox.content_types.all()) + + def test_objects_synced_from_infoblox_are_tagged(self): + """Ensure objects synced from Infoblox have 'SSoT Synced from Infoblox' tag applied.""" + nb_diffsync = NautobotAdapter() + nb_diffsync.job = Mock() + nb_diffsync.load() + + infoblox_adapter = InfobloxAdapter(conn=Mock()) + + ds_prefix = infoblox_adapter.prefix( + network="10.0.0.0/8", + description="Test Network", + network_type="network", + status="Active", + ) + infoblox_adapter.add(ds_prefix) + ds_ipaddress = infoblox_adapter.ipaddress( + description="Test IPAddress", + address="10.0.0.1", + status="Active", + dns_name="", + prefix="10.0.0.0/8", + prefix_length=8, + ip_addr_type="host", + ) + infoblox_adapter.add(ds_ipaddress) + ds_vlangroup = infoblox_adapter.vlangroup( + name="TestVLANGroup", + description="", + ) + infoblox_adapter.add(ds_vlangroup) + ds_vlan = infoblox_adapter.vlan( + vid=750, + name="TestVLAN", + description="Test VLAN", + status="ASSIGNED", + vlangroup="TestVLANGroup", + ext_attrs={}, + ) + infoblox_adapter.add(ds_vlan) + + nb_diffsync.sync_from(infoblox_adapter) + + prefix = Prefix.objects.get(network="10.0.0.0", prefix_length="8") + self.assertEqual(prefix.tags.all()[0], self.tag_sync_from_infoblox) + + ipaddress = IPAddress.objects.get(address="10.0.0.1/8") + self.assertEqual(ipaddress.tags.all()[0], self.tag_sync_from_infoblox) + + vlan = VLAN.objects.get(vid=750) + self.assertEqual(vlan.tags.all()[0], self.tag_sync_from_infoblox) + + def test_objects_synced_to_infoblox_are_tagged(self): + """Ensure objects synced to Infoblox have 'SSoT Synced to Infoblox' tag applied.""" + nb_prefix = Prefix( + network="10.0.0.0", + prefix_length=8, + description="Test Network", + type="network", + status=Status.objects.get_for_model(Prefix).first(), + ) + nb_prefix.validated_save() + nb_ipaddress = IPAddress( + description="Test IPAddress", + address="10.0.0.1/8", + status=Status.objects.get_for_model(IPAddress).first(), + type="host", + ) + nb_ipaddress.validated_save() + nb_vlangroup = VLANGroup( + name="TestVLANGroup", + ) + nb_vlangroup.validated_save() + nb_vlan = VLAN( + vid=750, + name="VL750", + description="Test VLAN", + status=Status.objects.get_for_model(VLAN).first(), + vlan_group=nb_vlangroup, + ) + nb_vlan.validated_save() + + nautobot_adapter = NautobotAdapter() + nautobot_adapter.job = Mock() + nautobot_adapter.load() + + infoblox_adapter = InfobloxAdapter(conn=Mock()) + infoblox_adapter.job = Mock() + nautobot_adapter.sync_to(infoblox_adapter) + + prefix = Prefix.objects.get(network="10.0.0.0", prefix_length="8") + self.assertEqual(prefix.tags.all()[0], self.tag_sync_to_infoblox) + + ipaddress = IPAddress.objects.get(address="10.0.0.1/8") + self.assertEqual(ipaddress.tags.all()[0], self.tag_sync_to_infoblox) + + vlan = VLAN.objects.get(vid=750) + self.assertEqual(vlan.tags.all()[0], self.tag_sync_to_infoblox) + + +class TestCustomFields(TestCase): + """Tests ensuring custom fields are updated for objects synced from and to Infoblox.""" + + def setUp(self): + """Test class set up.""" + self.today = datetime.date.today().isoformat() + self.cf_synced_to_infoblox = CustomField.objects.get(key="ssot_synced_to_infoblox") + + def test_cfs_have_correct_content_types_set(self): + """Ensure cfs have correct content types configured.""" + for model in (IPAddress, Prefix, VLAN, VLANGroup): + content_type = ContentType.objects.get_for_model(model) + self.assertIn(content_type, self.cf_synced_to_infoblox.content_types.all()) + + def test_cf_updated_for_objects_synced_to_infoblox(self): + """Ensure objects synced to Infoblox have cf 'ssot_synced_to_infoblox' correctly updated.""" + nb_prefix = Prefix( + network="10.0.0.0", + prefix_length=8, + description="Test Network", + type="network", + status=Status.objects.get_for_model(Prefix).first(), + ) + nb_prefix.validated_save() + + nb_ipaddress = IPAddress( + description="Test IPAddress", + address="10.0.0.1/8", + status=Status.objects.get_for_model(IPAddress).first(), + type="host", + ) + nb_ipaddress.validated_save() + + nb_vlangroup = VLANGroup( + name="TestVLANGroup", + ) + nb_vlangroup.validated_save() + nb_vlan = VLAN( + vid=750, + name="VL750", + description="Test VLAN", + status=Status.objects.get_for_model(VLAN).first(), + vlan_group=nb_vlangroup, + ) + nb_vlan.validated_save() + + nautobot_adapter = NautobotAdapter() + nautobot_adapter.job = Mock() + nautobot_adapter.load() + + conn = Mock() + infoblox_adapter = InfobloxAdapter(conn=conn) + infoblox_adapter.job = Mock() + nautobot_adapter.sync_to(infoblox_adapter) + + prefix = Prefix.objects.get(network="10.0.0.0", prefix_length="8") + self.assertEqual(prefix.cf["ssot_synced_to_infoblox"], self.today) + + ipaddress = IPAddress.objects.get(address="10.0.0.1/8") + self.assertEqual(ipaddress.cf["ssot_synced_to_infoblox"], self.today) + + vlangroup = VLANGroup.objects.get(name="TestVLANGroup") + self.assertEqual(vlangroup.cf["ssot_synced_to_infoblox"], self.today) + + vlan = VLAN.objects.get(vid=750) + self.assertEqual(vlan.cf["ssot_synced_to_infoblox"], self.today) diff --git a/nautobot_ssot/tests/ipfabric/test_ipfabric_adapter.py b/nautobot_ssot/tests/ipfabric/test_ipfabric_adapter.py index 7992e2121..28bf30d0d 100644 --- a/nautobot_ssot/tests/ipfabric/test_ipfabric_adapter.py +++ b/nautobot_ssot/tests/ipfabric/test_ipfabric_adapter.py @@ -36,8 +36,8 @@ def setUp(self): side_effect=(lambda x: VLAN_FIXTURE if x == "tables/vlan/site-summary" else "") ) ipfabric_client.inventory.interfaces.all.return_value = INTERFACE_FIXTURE - ipfabric_client.technology.managed_networks.networks.fetch.return_value = NETWORKS_FIXTURE - ipfabric_client.technology.platforms.stacks_members.fetch.side_effect = [[] for site in SITE_FIXTURE[:-1]] + [ + ipfabric_client.technology.managed_networks.networks.all.return_value = NETWORKS_FIXTURE + ipfabric_client.technology.platforms.stacks_members.all.side_effect = [[] for site in SITE_FIXTURE[:-1]] + [ STACKS_FIXTURE ] diff --git a/nautobot_ssot/tests/ipfabric/test_utils.py b/nautobot_ssot/tests/ipfabric/test_utils.py index 1d34c3032..c0f4e8352 100644 --- a/nautobot_ssot/tests/ipfabric/test_utils.py +++ b/nautobot_ssot/tests/ipfabric/test_utils.py @@ -9,95 +9,145 @@ class TestUtils(SimpleTestCase): # pylint: disable=too-many-public-methods """Test IPFabric utilities.utils.""" def test_virtual_interface(self): - self.assertEqual("virtual", utils.convert_media_type("Virtual")) + self.assertEqual("virtual", utils.convert_media_type("Virtual", "VLAN1")) def test_bridge_interface(self): - self.assertEqual("bridge", utils.convert_media_type("Bridge")) + self.assertEqual("bridge", utils.convert_media_type("Bridge", "Bridge0")) def test_lag_interface(self): - self.assertEqual("lag", utils.convert_media_type("LAG")) + self.assertEqual("lag", utils.convert_media_type("LAG", "Po1")) def test_hunderd_meg_base_t_interface(self): - self.assertEqual("100base-tx", utils.convert_media_type("100Base-T")) + self.assertEqual("100base-tx", utils.convert_media_type("100Base-T", "Fa1")) def test_hundred_meg_interface(self): - self.assertEqual("100base-tx", utils.convert_media_type("100MegabitEthernet")) + self.assertEqual("100base-tx", utils.convert_media_type("100MegabitEthernet", "Fa1")) def test_gig_base_t_interface(self): - self.assertEqual("1000base-t", utils.convert_media_type("1000BaseT")) - self.assertEqual("1000base-t", utils.convert_media_type("10/100/1000BaseTX")) + self.assertEqual("1000base-t", utils.convert_media_type("1000BaseT", "Gi1")) + self.assertEqual("1000base-t", utils.convert_media_type("10/100/1000BaseTX", "Gi1")) def test_rj45_uses_gig_base_t_interface(self): - self.assertEqual("1000base-t", utils.convert_media_type("RJ45")) + self.assertEqual("1000base-t", utils.convert_media_type("RJ45", "Gi1")) def test_gig_default_uses_base_t_interface(self): - self.assertEqual("1000base-t", utils.convert_media_type("1GigThisUsesDefault")) + self.assertEqual("1000base-t", utils.convert_media_type("1GigThisUsesDefault", "Gi1")) def test_gig_sfp_interface(self): - self.assertEqual("1000base-x-sfp", utils.convert_media_type("10/100/1000BaseTX SFP")) + self.assertEqual("1000base-x-sfp", utils.convert_media_type("10/100/1000BaseTX SFP", "Gi1")) def test_gig_sfp_used_for_sfp_type_interface(self): - self.assertEqual("1000base-x-sfp", utils.convert_media_type("1000BaseLX")) - self.assertEqual("1000base-x-sfp", utils.convert_media_type("1000BaseSX")) - self.assertEqual("1000base-x-sfp", utils.convert_media_type("1000BaseLR")) - self.assertEqual("1000base-x-sfp", utils.convert_media_type("1000BaseSR")) + self.assertEqual("1000base-x-sfp", utils.convert_media_type("1000BaseLX", "Gi1")) + self.assertEqual("1000base-x-sfp", utils.convert_media_type("1000BaseSX", "Gi1")) + self.assertEqual("1000base-x-sfp", utils.convert_media_type("1000BaseLR", "Gi1")) + self.assertEqual("1000base-x-sfp", utils.convert_media_type("1000BaseSR", "Gi1")) def test_gig_gbic_interface(self): - self.assertEqual("1000base-x-gbic", utils.convert_media_type("10/100/1000BaseTX GBIC")) + self.assertEqual("1000base-x-gbic", utils.convert_media_type("10/100/1000BaseTX GBIC", "Gi1")) def test_two_and_half_gig_base_t_interface(self): - self.assertEqual("2.5gbase-t", utils.convert_media_type("100/1000/2.5GBaseTX")) + self.assertEqual("2.5gbase-t", utils.convert_media_type("100/1000/2.5GBaseTX", "TwoGi1")) def test_five_gig_base_t_interface(self): - self.assertEqual("5gbase-t", utils.convert_media_type("100/1000/2.5G/5GBaseTX")) + self.assertEqual("5gbase-t", utils.convert_media_type("100/1000/2.5G/5GBaseTX", "FiveGi1")) def test_ten_gig_xfp_interface(self): - self.assertEqual("10gbase-x-xfp", utils.convert_media_type("10GBase XFP")) + self.assertEqual("10gbase-x-xfp", utils.convert_media_type("10GBase XFP", "TenGi1")) def test_ten_gig_x2_interface(self): - self.assertEqual("10gbase-x-x2", utils.convert_media_type("10GBase X2")) + self.assertEqual("10gbase-x-x2", utils.convert_media_type("10GBase X2", "TenGi1")) def test_ten_gig_xenpak_interface(self): - self.assertEqual("10gbase-x-xenpak", utils.convert_media_type("10GBase XENPAK")) + self.assertEqual("10gbase-x-xenpak", utils.convert_media_type("10GBase XENPAK", "TenGi1")) def test_ten_gig_sfp_interface(self): - self.assertEqual("10gbase-x-sfpp", utils.convert_media_type("10GBase SFP")) + self.assertEqual("10gbase-x-sfpp", utils.convert_media_type("10GBase SFP", "TenGi1")) def test_ten_gig_default_uses_sfp_interface(self): - self.assertEqual("10gbase-x-sfpp", utils.convert_media_type("10G")) + self.assertEqual("10gbase-x-sfpp", utils.convert_media_type("10G", "TenGi1")) def test_twenty_five_gig_sfp_interface(self): - self.assertEqual("25gbase-x-sfp28", utils.convert_media_type("25G")) + self.assertEqual("25gbase-x-sfp28", utils.convert_media_type("25G", "TweGi1")) def test_forty_gig_sfp_interface(self): - self.assertEqual("40gbase-x-qsfpp", utils.convert_media_type("40G")) + self.assertEqual("40gbase-x-qsfpp", utils.convert_media_type("40G", "FoGi1")) def test_fifty_gig_sfp_interface(self): - self.assertEqual("50gbase-x-sfp56", utils.convert_media_type("50G")) + self.assertEqual("50gbase-x-sfp56", utils.convert_media_type("50G", "FiGi1")) def test_hundred_gig_qsfp_interface(self): - self.assertEqual("100gbase-x-qsfp28", utils.convert_media_type("100G QSFP")) + self.assertEqual("100gbase-x-qsfp28", utils.convert_media_type("100G QSFP", "HunGi1")) def test_hundred_gig_default_uses_cfp_interface(self): - self.assertEqual("100gbase-x-cfp", utils.convert_media_type("100G")) + self.assertEqual("100gbase-x-cfp", utils.convert_media_type("100G", "HunGi1")) def test_two_hundred_gig_qsfp_interface(self): - self.assertEqual("200gbase-x-qsfp56", utils.convert_media_type("200G QSFP")) + self.assertEqual("200gbase-x-qsfp56", utils.convert_media_type("200G QSFP", "TwoHunGi1")) def test_two_hundred_gig_default_uses_cfp_interface(self): - self.assertEqual("200gbase-x-cfp2", utils.convert_media_type("200G")) + self.assertEqual("200gbase-x-cfp2", utils.convert_media_type("200G", "TwoHunGi1")) def test_four_hundred_gig_qsfp_interface(self): - self.assertEqual("400gbase-x-qsfp112", utils.convert_media_type("400G QSFP")) + self.assertEqual("400gbase-x-qsfp112", utils.convert_media_type("400G QSFP", "FoHunGi1")) def test_four_hundred_gig_default_uses_osfp_interface(self): - self.assertEqual("400gbase-x-osfp", utils.convert_media_type("400G")) + self.assertEqual("400gbase-x-osfp", utils.convert_media_type("400G", "FoHunGi1")) def test_eight_hundred_gig_qsfp_interface(self): - self.assertEqual("800gbase-x-qsfpdd", utils.convert_media_type("800G QSFP")) + self.assertEqual("800gbase-x-qsfpdd", utils.convert_media_type("800G QSFP", "EiHunGi1")) def test_eight_hundred_gig_default_uses_osfp_interface(self): - self.assertEqual("800gbase-x-osfp", utils.convert_media_type("800G")) + self.assertEqual("800gbase-x-osfp", utils.convert_media_type("800G", "EiHunGi1")) def test_unknown_interface_uses_default_interface(self): - self.assertEqual(DEFAULT_INTERFACE_TYPE, utils.convert_media_type("ThisShouldGiveTheDefault")) + self.assertEqual(DEFAULT_INTERFACE_TYPE, utils.convert_media_type("ThisShouldGiveTheDefault", "")) + + def test_interface_name_lag(self): + self.assertEqual("lag", utils.convert_media_type("", "Po1")) + self.assertEqual("lag", utils.convert_media_type("", "Port-channel1")) + + def test_interface_name_vlan(self): + self.assertEqual("virtual", utils.convert_media_type("", "Vlan1")) + self.assertEqual("virtual", utils.convert_media_type("", "Vl1")) + + def test_interface_name_loopback(self): + self.assertEqual("virtual", utils.convert_media_type("", "Loopback1")) + self.assertEqual("virtual", utils.convert_media_type("", "Lo1")) + + def test_interface_name_tunnel(self): + self.assertEqual("virtual", utils.convert_media_type("", "Tu1")) + self.assertEqual("virtual", utils.convert_media_type("", "Tunnel1")) + + def test_interface_name_vxlan(self): + self.assertEqual("virtual", utils.convert_media_type("", "Vxlan1")) + self.assertEqual("virtual", utils.convert_media_type("", "Vx1")) + + def test_interface_name_fastethernet(self): + self.assertEqual("100base-tx", utils.convert_media_type("", "FastEthernet1")) + self.assertEqual("100base-tx", utils.convert_media_type("", "Fa1")) + + def test_interface_name_gigethernet(self): + self.assertEqual("1000base-t", utils.convert_media_type("", "GigabitEthernet1")) + self.assertEqual("1000base-t", utils.convert_media_type("", "Gi1")) + + def test_interface_name_tengigethernet(self): + self.assertEqual("10gbase-x-sfpp", utils.convert_media_type("", "TenGigabitEthernet1")) + self.assertEqual("10gbase-x-sfpp", utils.convert_media_type("", "Te1")) + + def test_interface_name_twentyfivegigethernet(self): + self.assertEqual("25gbase-x-sfp28", utils.convert_media_type("", "TwentyFiveGigabitEthernet1")) + + def test_interface_name_fortygigethernet(self): + self.assertEqual("40gbase-x-qsfpp", utils.convert_media_type("", "FortyGigabitEthernet1")) + self.assertEqual("40gbase-x-qsfpp", utils.convert_media_type("", "Fo1")) + + def test_interface_name_fiftygigethernet(self): + self.assertEqual("50gbase-x-sfp56", utils.convert_media_type("", "FiftyGigabitEthernet1")) + self.assertEqual("50gbase-x-sfp56", utils.convert_media_type("", "Fi1")) + + def test_interface_name_hundredgigethernet(self): + self.assertEqual("100gbase-x-qsfp28", utils.convert_media_type("", "HundredGigabitEthernet1")) + self.assertEqual("100gbase-x-qsfp28", utils.convert_media_type("", "Hu1")) + + def test_interface_name_twohundredgigethernet(self): + self.assertEqual("200gbase-x-qsfp56", utils.convert_media_type("", "TwoHundredGigabitEthernet1")) diff --git a/nautobot_ssot/tests/test_contrib_adapter.py b/nautobot_ssot/tests/test_contrib_adapter.py index 01b96e67d..cf4c7b05a 100644 --- a/nautobot_ssot/tests/test_contrib_adapter.py +++ b/nautobot_ssot/tests/test_contrib_adapter.py @@ -331,6 +331,7 @@ class Adapter(NautobotAdapter): top_level = ["vlan_group"] location_type = dcim_models.LocationType.objects.create(name="Building") + location_type.content_types.add(ContentType.objects.get_for_model(ipam_models.VLAN)) location = dcim_models.Location.objects.create( name="Example Building", location_type=location_type, status=extras_models.Status.objects.get(name="Active") ) diff --git a/poetry.lock b/poetry.lock index 4f7e565f8..572da8d93 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "aiodns" @@ -1982,6 +1982,17 @@ files = [ {file = "ijson-3.2.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4a3a6a2fbbe7550ffe52d151cf76065e6b89cfb3e9d0463e49a7e322a25d0426"}, {file = "ijson-3.2.3-cp311-cp311-win32.whl", hash = "sha256:6a4db2f7fb9acfb855c9ae1aae602e4648dd1f88804a0d5cfb78c3639bcf156c"}, {file = "ijson-3.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:ccd6be56335cbb845f3d3021b1766299c056c70c4c9165fb2fbe2d62258bae3f"}, + {file = "ijson-3.2.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:055b71bbc37af5c3c5861afe789e15211d2d3d06ac51ee5a647adf4def19c0ea"}, + {file = "ijson-3.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c075a547de32f265a5dd139ab2035900fef6653951628862e5cdce0d101af557"}, + {file = "ijson-3.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:457f8a5fc559478ac6b06b6d37ebacb4811f8c5156e997f0d87d708b0d8ab2ae"}, + {file = "ijson-3.2.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9788f0c915351f41f0e69ec2618b81ebfcf9f13d9d67c6d404c7f5afda3e4afb"}, + {file = "ijson-3.2.3-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fa234ab7a6a33ed51494d9d2197fb96296f9217ecae57f5551a55589091e7853"}, + {file = "ijson-3.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdd0dc5da4f9dc6d12ab6e8e0c57d8b41d3c8f9ceed31a99dae7b2baf9ea769a"}, + {file = "ijson-3.2.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c6beb80df19713e39e68dc5c337b5c76d36ccf69c30b79034634e5e4c14d6904"}, + {file = "ijson-3.2.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a2973ce57afb142d96f35a14e9cfec08308ef178a2c76b8b5e1e98f3960438bf"}, + {file = "ijson-3.2.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:105c314fd624e81ed20f925271ec506523b8dd236589ab6c0208b8707d652a0e"}, + {file = "ijson-3.2.3-cp312-cp312-win32.whl", hash = "sha256:ac44781de5e901ce8339352bb5594fcb3b94ced315a34dbe840b4cff3450e23b"}, + {file = "ijson-3.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:0567e8c833825b119e74e10a7c29761dc65fcd155f5d4cb10f9d3b8916ef9912"}, {file = "ijson-3.2.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:eeb286639649fb6bed37997a5e30eefcacddac79476d24128348ec890b2a0ccb"}, {file = "ijson-3.2.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:396338a655fb9af4ac59dd09c189885b51fa0eefc84d35408662031023c110d1"}, {file = "ijson-3.2.3-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e0243d166d11a2a47c17c7e885debf3b19ed136be2af1f5d1c34212850236ac"}, @@ -2581,6 +2592,16 @@ files = [ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, @@ -3560,6 +3581,7 @@ files = [ {file = "psycopg2_binary-2.9.9-cp311-cp311-win32.whl", hash = "sha256:dc4926288b2a3e9fd7b50dc6a1909a13bbdadfc67d93f3374d984e56f885579d"}, {file = "psycopg2_binary-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:b76bedd166805480ab069612119ea636f5ab8f8771e640ae103e05a4aae3e417"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8532fd6e6e2dc57bcb3bc90b079c60de896d2128c5d9d6f24a63875a95a088cf"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0605eaed3eb239e87df0d5e3c6489daae3f7388d455d0c0b4df899519c6a38d"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f8544b092a29a6ddd72f3556a9fcf249ec412e10ad28be6a0c0d948924f2212"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d423c8d8a3c82d08fe8af900ad5b613ce3632a1249fd6a223941d0735fce493"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e5afae772c00980525f6d6ecf7cbca55676296b580c0e6abb407f15f3706996"}, @@ -3568,6 +3590,8 @@ files = [ {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:cb16c65dcb648d0a43a2521f2f0a2300f40639f6f8c1ecbc662141e4e3e1ee07"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:911dda9c487075abd54e644ccdf5e5c16773470a6a5d3826fda76699410066fb"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57fede879f08d23c85140a360c6a77709113efd1c993923c59fde17aa27599fe"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-win32.whl", hash = "sha256:64cf30263844fa208851ebb13b0732ce674d8ec6a0c86a4e160495d299ba3c93"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:81ff62668af011f9a48787564ab7eded4e9fb17a4a6a74af5ffa6a457400d2ab"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2293b001e319ab0d869d660a704942c9e2cce19745262a8aba2115ef41a0a42a"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ef7df18daf2c4c07e2695e8cfd5ee7f748a1d54d802330985a78d2a5a6dca9"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a602ea5aff39bb9fac6308e9c9d82b9a35c2bf288e184a816002c9fae930b77"}, @@ -4152,6 +4176,7 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -4159,8 +4184,16 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -4177,6 +4210,7 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -4184,6 +4218,7 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -5347,4 +5382,4 @@ servicenow = ["Jinja2", "PyYAML", "ijson", "oauthlib", "python-magic", "pytz", " [metadata] lock-version = "2.0" python-versions = ">=3.8,<3.12" -content-hash = "30b0830286fe4ea755685be945b2f7e059f18d5d255452197d7e11609313184f" +content-hash = "cd710f5c319a47e01c925b15ded4986b7dd40575ee65813234016f2511ffbbc6" diff --git a/pyproject.toml b/pyproject.toml index 5d9035257..9c52dabf5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "nautobot-ssot" -version = "2.5.0" +version = "2.6.0" description = "Nautobot Single Source of Truth" authors = ["Network to Code, LLC "] license = "Apache-2.0" @@ -39,7 +39,7 @@ cvprac = { version = "^1.2.2", optional = true } dnspython = { version = "^2.1.0", optional = true } nautobot-device-lifecycle-mgmt = { version = "^2.0.0", optional = true } packaging = ">=21.3, <24" -prometheus-client = "~0.17.1" +prometheus-client = ">=0.17.1" ijson = { version = ">=2.5.1", optional = true } ipfabric = { version = "~6.0.9", optional = true } ipfabric-diagrams = { version = "~6.0.2", optional = true } @@ -50,7 +50,6 @@ pytz = { version = ">=2019.3", optional = true } requests = { version = ">=2.21.0", optional = true } requests-oauthlib = { version = ">=1.3.0", optional = true } six = { version = ">=1.13.0", optional = true } -drf-spectacular = "0.26.3" httpx = { version = ">=0.23.3", optional = true } [tool.poetry.group.dev.dependencies] diff --git a/tasks.py b/tasks.py index bec9260aa..c54ee1b0e 100644 --- a/tasks.py +++ b/tasks.py @@ -88,6 +88,38 @@ def _await_healthy_container(context, container_id): sleep(1) +def _read_command_env(values) -> dict: + """Reads the environment variables from the values and returns a dictionary. + + Examples: + >>> _read_command_env('VAR1=VALUE1') + {'VAR1': 'VALUE1'} + os.environ["VAR2"] = "VALUE2" + >>> _read_command_env(['VAR1=VALUE1', 'VAR2', 'VAR3']) + {'VAR1': 'VALUE1', 'VAR2': 'VALUE2', 'VAR3': ''} + >>> _read_command_env({'VAR1': 'VALUE1', 'VAR2': 'ANOHTER_VALUE'}) + {'VAR1': 'VALUE1', 'VAR2': 'ANOHTER_VALUE'} + """ + if not values: + return {} + + if isinstance(values, dict): + return values + + def read(envs): + if isinstance(envs, str): + if "=" in envs: + name, value = envs.split("=") + yield name, value + else: + yield envs, os.getenv(envs, "") + else: + for env in envs: + yield from read(env) + + return dict(read(values)) + + def task(function=None, *args, **kwargs): """Task decorator to override the default Invoke task decorator and add each task to the invoke namespace.""" @@ -148,32 +180,26 @@ def docker_compose(context, command, **kwargs): def run_command(context, command, **kwargs): """Wrapper to run a command locally or inside the nautobot container.""" + env = _read_command_env(kwargs.pop("env", None)) if is_truthy(context.nautobot_ssot.local): - if "command_env" in kwargs: - kwargs["env"] = { - **kwargs.get("env", {}), - **kwargs.pop("command_env"), - } - context.run(command, **kwargs) + return context.run(command, **kwargs, env=env) + + # Check if nautobot is running, no need to start another nautobot container to run a command + docker_compose_status = "ps --services --filter status=running" + results = docker_compose(context, docker_compose_status, hide="out") + if "nautobot" in results.stdout: + compose_command = "exec" else: - # Check if nautobot is running, no need to start another nautobot container to run a command - docker_compose_status = "ps --services --filter status=running" - results = docker_compose(context, docker_compose_status, hide="out") - if "nautobot" in results.stdout: - compose_command = "exec" - else: - compose_command = "run --rm --entrypoint=''" + compose_command = "run --rm --entrypoint=''" - if "command_env" in kwargs: - command_env = kwargs.pop("command_env") - for key, value in command_env.items(): - compose_command += f' --env="{key}={value}"' + for env_name in env: + compose_command += f" --env={env_name}" - compose_command += f" -- nautobot {command}" + compose_command += f" -- nautobot {command}" - pty = kwargs.pop("pty", True) + pty = kwargs.pop("pty", True) - docker_compose(context, compose_command, pty=pty, **kwargs) + return docker_compose(context, compose_command, **kwargs, pty=pty, env=env) # ------------------------------------------------------------------------------ @@ -342,19 +368,23 @@ def logs(context, service="", follow=False, tail=0): @task( help={ "file": "Python file to execute", - "env": "Environment variables to pass to the command", + "env": "Environment variables to pass to the command e.g.: `--env VAR1=VALUE1 --env VAR2`", "plain": "Flag to run nbshell in plain mode (default: False)", }, + iterable=["env"], ) -def nbshell(context, file="", env={}, plain=False): - """Launch an interactive nbshell session.""" +def nbshell(context, file="", env=None, plain=False): + """Launch an interactive nbshell session. + + Files passed to the command can't contain unindented empty lines, as it breaks the nbshell interpreter. + """ command = [ "nautobot-server", "nbshell", "--plain" if plain else "", f"< '{file}'" if file else "", ] - run_command(context, " ".join(command), pty=not bool(file), command_env=env) + run_command(context, " ".join(command), pty=not bool(file), env=env) @task