From ddb25f612c085da25fc9e8729d224600d48b1b1a Mon Sep 17 00:00:00 2001 From: bile0026 Date: Mon, 9 Sep 2024 14:21:53 -0500 Subject: [PATCH 01/19] =?UTF-8?q?fix:=20=F0=9F=90=9B=20nautobot=5Fssot=20r?= =?UTF-8?q?equires=20nautobot=20>=3D=202.1.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- development/creds.example.env | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/development/creds.example.env b/development/creds.example.env index 5feb6c71c..42aa42b81 100644 --- a/development/creds.example.env +++ b/development/creds.example.env @@ -48,3 +48,7 @@ NAUTOBOT_APIC_VERIFY_DEVNET=False SERVICENOW_PASSWORD="changeme" IPFABRIC_API_TOKEN=secrettoken + +NAUTOBOT_SSOT_ENABLE_BOOTSTRAP="False" +NAUTOBOT_BOOTSTRAP_SSOT_ENVIRONMENT_BRANCH=develop +NAUTOBOT_BOOTSTRAP_SSOT_LOAD_SOURCE=file # or git From 63fffc470a684c2b1c66a67c0cae33f072aa5e7d Mon Sep 17 00:00:00 2001 From: bile0026 Date: Mon, 9 Sep 2024 14:22:45 -0500 Subject: [PATCH 02/19] =?UTF-8?q?fix:=20=F0=9F=90=9B=20nautobot=5Fssot=20r?= =?UTF-8?q?equires=20nautobot=20>=3D=202.1.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- invoke.example.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/invoke.example.yml b/invoke.example.yml index a71bc4ffa..7102a2dad 100644 --- a/invoke.example.yml +++ b/invoke.example.yml @@ -1,6 +1,6 @@ --- nautobot_ssot: - nautobot_ver: "2.0.0" + nautobot_ver: "2.1.0" python_ver: "3.11" # local: false # compose_dir: "/full/path/to/nautobot-app-ssot/development" From 2550e8638fa7b35701501d37a7af6727ad182db8 Mon Sep 17 00:00:00 2001 From: bile0026 Date: Mon, 9 Sep 2024 15:15:36 -0500 Subject: [PATCH 03/19] =?UTF-8?q?feat:=20=E2=9C=A8=20add=20bootstrap=20sso?= =?UTF-8?q?t=20code?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- changes/495.added | 1 + development/app_config_schema.py | 4 +- development/nautobot_config.py | 46 +- docs/admin/integrations/bootstrap_setup.md | 89 + docs/user/integrations/bootstrap.md | 718 +++++ docs/user/integrations/index.md | 1 + mkdocs.yml | 2 + nautobot_ssot/__init__.py | 1 + .../integrations/bootstrap/__init__.py | 1 + .../integrations/bootstrap/constants.py | 3 + .../bootstrap/diffsync/adapters/__init__.py | 1 + .../bootstrap/diffsync/adapters/bootstrap.py | 1041 +++++++ .../bootstrap/diffsync/adapters/nautobot.py | 1327 +++++++++ .../bootstrap/diffsync/models/__init__.py | 1 + .../bootstrap/diffsync/models/base.py | 764 +++++ .../bootstrap/diffsync/models/bootstrap.py | 549 ++++ .../bootstrap/diffsync/models/nautobot.py | 2471 +++++++++++++++++ .../bootstrap/fixtures/develop.yml | 2 + .../bootstrap/fixtures/global_settings.yml | 794 ++++++ .../bootstrap/fixtures/production.yml | 2 + .../bootstrap/fixtures/staging.yml | 2 + nautobot_ssot/integrations/bootstrap/jobs.py | 160 ++ .../integrations/bootstrap/signals.py | 115 + .../integrations/bootstrap/utils/__init__.py | 144 + .../integrations/bootstrap/utils/bootstrap.py | 1 + .../integrations/bootstrap/utils/nautobot.py | 29 + nautobot_ssot/tests/bootstrap/__init__.py | 1 + .../tests/bootstrap/fixtures/develop.json | 3 + .../bootstrap/fixtures/global_settings.json | 757 +++++ .../tests/bootstrap/fixtures/production.json | 3 + .../tests/bootstrap/test_bootstrap_adapter.py | 158 ++ .../tests/bootstrap/test_nautobot_adapter.py | 128 + nautobot_ssot/tests/bootstrap/test_setup.py | 959 +++++++ nautobot_ssot/tests/test_basic.py | 18 +- nautobot_ssot/utils.py | 12 +- poetry.lock | 780 +++--- pyproject.toml | 2 +- tasks.py | 58 +- 38 files changed, 10700 insertions(+), 448 deletions(-) create mode 100644 changes/495.added create mode 100644 docs/admin/integrations/bootstrap_setup.md create mode 100644 docs/user/integrations/bootstrap.md create mode 100644 nautobot_ssot/integrations/bootstrap/__init__.py create mode 100644 nautobot_ssot/integrations/bootstrap/constants.py create mode 100644 nautobot_ssot/integrations/bootstrap/diffsync/adapters/__init__.py create mode 100755 nautobot_ssot/integrations/bootstrap/diffsync/adapters/bootstrap.py create mode 100755 nautobot_ssot/integrations/bootstrap/diffsync/adapters/nautobot.py create mode 100644 nautobot_ssot/integrations/bootstrap/diffsync/models/__init__.py create mode 100755 nautobot_ssot/integrations/bootstrap/diffsync/models/base.py create mode 100755 nautobot_ssot/integrations/bootstrap/diffsync/models/bootstrap.py create mode 100755 nautobot_ssot/integrations/bootstrap/diffsync/models/nautobot.py create mode 100644 nautobot_ssot/integrations/bootstrap/fixtures/develop.yml create mode 100755 nautobot_ssot/integrations/bootstrap/fixtures/global_settings.yml create mode 100644 nautobot_ssot/integrations/bootstrap/fixtures/production.yml create mode 100644 nautobot_ssot/integrations/bootstrap/fixtures/staging.yml create mode 100644 nautobot_ssot/integrations/bootstrap/jobs.py create mode 100644 nautobot_ssot/integrations/bootstrap/signals.py create mode 100644 nautobot_ssot/integrations/bootstrap/utils/__init__.py create mode 100644 nautobot_ssot/integrations/bootstrap/utils/bootstrap.py create mode 100644 nautobot_ssot/integrations/bootstrap/utils/nautobot.py create mode 100644 nautobot_ssot/tests/bootstrap/__init__.py create mode 100644 nautobot_ssot/tests/bootstrap/fixtures/develop.json create mode 100644 nautobot_ssot/tests/bootstrap/fixtures/global_settings.json create mode 100644 nautobot_ssot/tests/bootstrap/fixtures/production.json create mode 100644 nautobot_ssot/tests/bootstrap/test_bootstrap_adapter.py create mode 100644 nautobot_ssot/tests/bootstrap/test_nautobot_adapter.py create mode 100644 nautobot_ssot/tests/bootstrap/test_setup.py diff --git a/changes/495.added b/changes/495.added new file mode 100644 index 000000000..d121b8028 --- /dev/null +++ b/changes/495.added @@ -0,0 +1 @@ +Add Bootstrap SSoT to Nautobot SSoT Nautobot application \ No newline at end of file diff --git a/development/app_config_schema.py b/development/app_config_schema.py index e52e24786..a779b14ef 100644 --- a/development/app_config_schema.py +++ b/development/app_config_schema.py @@ -40,9 +40,7 @@ def _main(): **SchemaBuilder().to_json_schema(app_config), # type: ignore } app_config = import_module(package_name).config - _enrich_object_schema( - schema, app_config.default_settings, app_config.required_settings - ) + _enrich_object_schema(schema, app_config.default_settings, app_config.required_settings) schema_path.write_text(json.dumps(schema, indent=4) + "\n") print(f"\n==================\nGenerated schema:\n\n{schema_path}\n") print( diff --git a/development/nautobot_config.py b/development/nautobot_config.py index 33bb80280..2069a54c8 100644 --- a/development/nautobot_config.py +++ b/development/nautobot_config.py @@ -18,12 +18,8 @@ if "debug_toolbar" not in INSTALLED_APPS: # noqa: F405 INSTALLED_APPS.append("debug_toolbar") # noqa: F405 - if ( - "debug_toolbar.middleware.DebugToolbarMiddleware" not in MIDDLEWARE - ): # noqa: F405 - MIDDLEWARE.insert( - 0, "debug_toolbar.middleware.DebugToolbarMiddleware" - ) # noqa: F405 + if "debug_toolbar.middleware.DebugToolbarMiddleware" not in MIDDLEWARE: # noqa: F405 + MIDDLEWARE.insert(0, "debug_toolbar.middleware.DebugToolbarMiddleware") # noqa: F405 # # Misc. settings @@ -55,9 +51,7 @@ "NAUTOBOT_DB_PORT", default_db_settings[nautobot_db_engine]["NAUTOBOT_DB_PORT"], ), # Database port, default to postgres - "CONN_MAX_AGE": int( - os.getenv("NAUTOBOT_DB_TIMEOUT", "300") - ), # Database timeout + "CONN_MAX_AGE": int(os.getenv("NAUTOBOT_DB_TIMEOUT", "300")), # Database timeout "ENGINE": nautobot_db_engine, } } @@ -192,8 +186,42 @@ "atl01": "Atlanta", }, "aristacv_verify": is_truthy(os.getenv("NAUTOBOT_ARISTACV_VERIFY", "true")), + "bootstrap_nautobot_environment_branch": os.getenv("NAUTOBOT_BOOTSTRAP_SSOT_ENVIRONMENT_BRANCH", "develop"), + "bootstrap_models_to_sync": { + "secret": True, + "secrets_group": True, + "git_repository": True, + "dynamic_group": True, + "computed_field": True, + "tag": True, + "graph_ql_query": True, + "software": False, + "software_image": False, + "validated_software": False, + "tenant_group": True, + "tenant": True, + "role": True, + "manufacturer": True, + "platform": True, + "location_type": True, + "location": True, + "team": True, + "contact": True, + "provider": True, + "provider_network": True, + "circuit_type": True, + "circuit": True, + "circuit_termination": True, + "namespace": True, + "rir": True, + "vlan_group": True, + "vlan": True, + "vrf": True, + "prefix": True, + }, "enable_aci": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_ACI")), "enable_aristacv": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_ARISTACV")), + "enable_bootstrap": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_BOOTSTRAP", "false")), "enable_device42": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_DEVICE42")), "enable_dna_center": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_DNA_CENTER")), "enable_infoblox": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_INFOBLOX")), diff --git a/docs/admin/integrations/bootstrap_setup.md b/docs/admin/integrations/bootstrap_setup.md new file mode 100644 index 000000000..e59999a31 --- /dev/null +++ b/docs/admin/integrations/bootstrap_setup.md @@ -0,0 +1,89 @@ +# NautobotSsotBootstrap + + +## Description + +This plugin will sync data from yaml files into Nautobot to create baseline environments. Most items will receive a custom field associated with them called "System of Record", which will be set to "Bootstrap". These items are then the only ones managed by the Bootstrap SSoT App. Other items within the Nautobot instance will not be affected unless there's items with overlapping names. There is currently two exceptions to this and those are the ComputedField, and GraphQLQuery models since they can't have a custom field associated. If you choose to manage ComputedField or GraphQLQuery objects with the Bootstrap SSoT App, make sure to define them all within the yaml file, since any "locally defined" Computed Fields and GraphQL Queries within Nautobot will end up getting deleted when the job runs. If an item exists in Nautobot by it's identifiers but it does not have the "System of Record" custom field on it, the item will be updated with "Bootstrap" (or `SYSTEM_OF_RECORD` environment variable value) when the plugin runs. This way no duplicates are created, and the plugin will not delete any items that are not definied in the Bootstrap data but were manually created in Nautobot. + +## Installation + +Add the plugin to your poetry environment `poetry install nautobot-ssot-bootstrap`, then configure your `nautobot_config.py` to include the app and the settings. + + +### nautobot_config.py + +The settings here are pretty straightforward, `nautobot_environment_branch` will be loaded from the environment variable `NAUTOBOT_BOOTSTRAP_SSOT_ENVIRONMENT_BRANCH`, or default to develop. The rest of the settings define which models/objects you want to have the plugin sync to Nautobot. There are a couple of caveats to these. For example, for DynamicGroup objects to sync, the filter criteria need to already exist in Nautobot. So, if you are going to have groups that are filtered on platforms/regions/sites/etc make sure not to include DynamicGroup objects in the "models_to_sync" until those items exist. Same for Git Repositories when you want to sync Golden Config-related repositories. The Golden Config plugin needs to be installed, for the `provided_contents` items to be able to be found. This also goes for the Lifecycle Management app with `Software/ValidatedSoftware` models. + +```python +PLUGINS = ["nautobot_ssot", "nautobot_ssot_bootstrap"] + +PLUGINS_CONFIG = { + "nautobot_ssot_bootstrap": { + # What to assign to the System of Record custom field. + "nautobot_environment_branch": os.getenv("NAUTOBOT_BOOTSTRAP_SSOT_ENVIRONMENT_BRANCH", "develop"), + # Which models to import from YAML into Nautobot + "models_to_sync": { + "secret": True, + "secrets_group": True, + "git_repository": True, + "dynamic_group": True, + "computed_field": True, + "tag": True, + "graph_ql_query": True, + "software": True, + "software_image": True, + "validated_software": True, + "tenant_group": True, + "tenant": True, + "role": True, + "manufacturer": True, + "platform": True, + "location_type": True, + "location": True, + "team": True, + "contact": True, + "provider": True, + "provider_network": True, + "circuit_type": True, + "circuit": True, + "circuit_termination": True, + }, + } +} +``` + +## Configuration + +### Bootstrap data + +Bootstrap data can be stored in 2 fashions. Firstly, it can be stored within the `nautobot_ssot_bootstrap/fixtures` directory, or you may create a Git Repository within an existing Nautobot instance that contains the word `Bootstrap` in the name and provides `config context` data. The data structure is flat files, there is a naming scheme to these files. The first one required is `global_settings.yml`. This contains the main data structures of what data can be loaded `Secrets,SecretsGroups,GitRepository,DynamicGroup,Tag,etc`. You can then create additional `.yml` files with naming of your CI environments, i.e. production, development, etc. This is where the environment variables described below would be matched to pull in additional data from the other yaml files defined in the directory. A simple structure would look something like this: + +```text +global_settings.yml +develop.yml +prod.yml +staging.yml +``` + +There are 2 environment variables that control how certain things are loaded in the app. + + 1. `NAUTOBOT_BOOTSTRAP_SSOT_LOAD_SOURCE` - defines whether to load from the local `fixtures` folder or a GitRepository already present in Nautobot. + - Acceptable options are `file` or `git`. + 2. `NAUTOBOT_BOOTSTRAP_SSOT_ENVIRONMENT_BRANCH` - Defines the environment and settings you want to import. I.e. production, develop, staging. + +## Process + +### Bootstrap as DataSource + +Synchronization of data follows this workflow: +1. Load data from Bootstrap yaml file (limited to `models_to_sync`) +2. Load data from Nautobot (limited to `models_to_sync`, and objects that also have the `CustomField` `system_of_record` set to "Bootstrap".) +3. DiffSync determines Creates, Updates, Deletes +4. If an object is being created (an object loaded from Bootstrap was not loaded from Nautobot) Bootstrap will first check to see if an object with the same name exists in Nautobot but does not have the `system_of_record` field set. If it finds an object, it will update it with the Bootstrap values and set the `system_of_record` field to "Bootstrap". +5. If an object needs to be updated it will be updated with the values provided by Bootstrap data. +6. If an object needs to be deleted it will be deleted. + + +### Bootstrap as DataTarget + +NotYetImplemented diff --git a/docs/user/integrations/bootstrap.md b/docs/user/integrations/bootstrap.md new file mode 100644 index 000000000..669bf722a --- /dev/null +++ b/docs/user/integrations/bootstrap.md @@ -0,0 +1,718 @@ +## Usage + +### Data structures + +#### global_settings.yml (see '../nautobot_ssot_bootstrap/fixtures/global_settings.yml for examples of supported models) + +```yaml +secret: + - name: Github_Service_Acct + provider: environment-variable # or text-file + parameters: + variable: GITHUB_SERVICE_ACCT + path: + - name: Github_Service_Token + provider: environment-variable # or text-file + parameters: + variable: GITHUB_SERVICE_TOKEN + path: +secrets_group: + - name: Github_Service_Account + secrets: + - name: Github_Service_Acct + secret_type: username + access_type: HTTP(S) + - name: Github_Service_Token + secret_type: token + access_type: HTTP(S) +git_repository: + - name: "Backbone Config Contexts" + url: "https://github.com/nautobot/backbone-config-contexts.git" + branch: "main" # if branch is defined it will be used instead of the "git_branch" in the "branch" variable file. + secrets_group_name: "Github_Service_Account" + provided_data_type: + - "config contexts" + - name: "Datacenter Config Contexts" + url: "https://github.com/nautobot/datacenter-config-contexts.git" + secrets_group_name: "Github_Service_Account" + provided_data_type: + - "config contexts" +dynamic_group: + - name: Backbone Domain + content_type: dcim.device + filter: | + { + "tenant": [ + "backbone" + ] + } +computed_field: + - label: Compliance Change + content_type: nautobot_golden_config.configcompliance + template: '{{ obj | get_change_log }}' +tag: + - name: Backbone + color: '795548' + content_types: + - dcim.device +graph_ql_query: + - name: "Backbone Devices" + query: | + query ($device_id: ID!) { + device(id: $device_id) { + config_context + hostname: name + device_role { + name + } + tenant { + name + } + primary_ip4 { + address + } + } + } +software: + - device_platform: "arista_eos" + version: "4.25.10M" + alias: + release_date: "2023-12-04" + eos_date: "2023-12-04" + documentation_url: "https://arista.com" # url is currently required due to a bug in the Device Lifecycle Management Plugin https://github.com/nautobot/nautobot-app-device-lifecycle-mgmt/issues/263 + lts: false + pre_release: false + tags: ['Backbone'] +software_image: + - software: arista_eos - 15.4.3 + platform: arista_eos + software_version: 15.4.3 + file_name: arista15.4.3.bin + download_url: https://arista.com + image_file_checksum: + default_image: false + tags: ['Test'] +validated_software: + - software: "arista.eos.eos - 4.25.10M" + valid_since: 2023-08-07 + valid_until: + preferred_version: false + tags: [] +``` + +#### develop.yml + +```yaml +git_branch: develop +``` + +## Content Types + +There are a couple models like Tags and Git Repositories that have associated content types. These require a specific format when listing them in the yaml file. The format of these is the `app_label`.`model`, though models can somewhat vary from App to App. Here is a list of some of the most common ones: + +```yaml +- "circuits.circuit" +- "circuits.circuittermination" +- "circuits.provider" +- "circuits.providernetwork" +- "dcim.cable" +- "dcim.consoleport" +- "dcim.consoleserverport" +- "dcim.device" +- "dcim.devicebay" +- "dcim.devicetype" +- "dcim.frontport" +- "dcim.interface" +- "dcim.inventoryitem" +- "dcim.powerfeed" +- "dcim.poweroutlet" +- "dcim.powerpanel" +- "dcim.powerport" +- "dcim.rack" +- "dcim.rackreservation" +- "dcim.rearport" +- "dcim.site" +- "dcim.virtualchassis" +- "extras.gitrepository" +- "extras.job" +- "extras.secret" +- "ipam.aggregate" +- "ipam.ipaddress" +- "ipam.prefix" +- "ipam.routetarget" +- "ipam.service" +- "ipam.vlan" +- "ipam.vrf" +- "nautobot_device_lifecycle_mgmt.contactlcm" +- "nautobot_device_lifecycle_mgmt.contractlcm" +- "nautobot_device_lifecycle_mgmt.cvelcm" +- "nautobot_device_lifecycle_mgmt.devicesoftwarevalidationresult" +- "nautobot_device_lifecycle_mgmt.hardwarelcm" +- "nautobot_device_lifecycle_mgmt.inventoryitemsoftwarevalidationresult" +- "nautobot_device_lifecycle_mgmt.softwareimagelcm" +- "nautobot_device_lifecycle_mgmt.softwarelcm" +- "nautobot_device_lifecycle_mgmt.validatedsoftwarelcm" +- "nautobot_device_lifecycle_mgmt.vulnerabilitylcm" +- "nautobot_golden_config.compliancefeature" +- "nautobot_golden_config.compliancerule" +- "nautobot_golden_config.configcompliance" +- "nautobot_golden_config.configremove" +- "nautobot_golden_config.configreplace" +- "nautobot_golden_config.goldenconfig" +- "nautobot_golden_config.goldenconfigsetting" +- "tenancy.tenant" +- "virtualization.cluster" +- "virtualization.virtualmachine" +- "virtualization.vminterface" +``` + +## Object Model Notes + +### Manufacturer + +Create Manufacturer objects. Uses the folowing data structure: + +```yaml +manufacturer: + - name: # str + description: # str +``` + +### Platform + +Create Platform objects. Uses the following data structure: + +```yaml +platform: + - name: # str + manufacturer: # str + network_driver: # str + napalm_driver: # str + napalm_arguments: {} # dict + description: # str +``` + +Ensure Manufacturer objects are created before reference. + +### LocationType + +Create LocationType objects. Uses the following data structure: + +```yaml +location_type: + - name: # str + parent: # str + nestable: # bool + description: # str + content_types: [] # List[str] +``` + +### Location + +Create Location objects. Uses the following data structure: + +```yaml +location: + - name: # str + location_type: # str + parent: # str + status: # str + facility: # str + asn: # int + time_zone: # str + description: # str + tenant: # str + physical_address: # str + shipping_address: # str + latitude: # str + longitude: # str + contact_name: # str + contact_phone: # str + contact_email: # str + tags: [] # List[str] +``` + +`location_type`, `parent`, `status`, `time_zone`, `tenant`, and `tags` are all references to objects. Ensure they exist prior to attempting to reference them here. + +Ensure that location types that you reference here are first defined in the location models or they will fail to create. + +### TenantGroup + +Create TenantGroup objects. Uses the following data structure: + +```yaml +tenant_group: + - name: # str + parent: # str + description: # str +``` + +### Tenant + +Create Tenant objects. Uses the following data structure: + +```yaml +tenant: + - name: # str + tenant_group: # str + description: # str + tags: [] # List[str] +``` + +Ensure that tenant groups that you reference here are first defined in the location models or they will fail to create. + +### Role + +Create Role objects. Uses the following data structure: + +```yaml +role: + - name: "Administrative" # str + weight: # int + description: "Unit plays an administrative role" # str + color: "2196f3" # str + content_types: # List[str] + - "extras.contactassociation" + - name: "Anycast" + weight: + description: "" + color: "ffc107" + content_types: + - "ipam.ipaddress" + - name: "Billing" + weight: + description: "Unit plays a billing role" + color: "4caf50" + content_types: + - "extras.contactassociation" + - name: "CARP" + weight: + description: "" + color: "4caf50" + content_types: + - "ipam.ipaddress" + - name: "GLBP" + weight: + description: "" + color: "4caf50" + content_types: + - "ipam.ipaddress" + - name: "HSRP" + weight: + description: "" + color: "4caf50" + content_types: + - "ipam.ipaddress" + - name: "Loopback" + weight: + description: "" + color: "9e9e9e" + content_types: + - "ipam.ipaddress" + - name: "On Site" + weight: + description: "Unit plays an on site role" + color: "111111" + content_types: + - "extras.contactassociation" + - name: "Secondary" + weight: + description: "" + color: "2196f3" + content_types: + - "ipam.ipaddress" + - name: "Support" + weight: + description: "Unit plays a support role" + color: "ffeb3b" + content_types: + - "extras.contactassociation" + - name: "VIP" + weight: + description: "" + color: "4caf50" + content_types: + - "ipam.ipaddress" + - name: "VRRP" + weight: + description: "" + color: "4caf50" + content_types: + - "ipam.ipaddress" +``` + +This also recreates the default Roles included in Nautobot core. This is because the Role model does not support custom fields, and therefore can not be selectively synced with the SSoT framework. Any roles not included in the Bootstrap `global_settings.yaml` file will be deleted. The list obove is the default list of roles included in Nautobot Core. + +### Team + +Create Team objects. Uses the following data structure: + +```yaml +team: + - name: # str + phone: # str + email: # str + address: # str + # contacts: [] +``` + +Currently, assigning contacts to a team through the `contact:` key is not supported due to the way that DiffSync works. Assign Contacts to a Team by adding the Team to the `team` list in the `Contact` model. In part this is due to the fact that contacts need to exist before being assigned to `team.contacts`. + +### Contact + +Create Contact objects. Uses the following data structure: + +```yaml +contact: + - name: # str + phone: # str + email: # str + address: # str + teams: [] # List[str] +``` + +As noted above, a `Contact` can be assigned to a `Team` by listing the `Team` names in the `teams:` key in the `Contact` model. + + +### Provider + +Create Provider objects. Uses the following data structure: + +```yaml +provider: + - name: # str + asn: # int + account_number: # str + portal_url: # str + noc_contact: # str + admin_contact: # str + tags: [] # List[str] +``` + +### Provider Network + +Create ProviderNetwork objects. Uses the following data structure: + +```yaml +provider_network: + - name: # str + provider: # str + description: # str + comments: # str + tags: [] # List[str] +``` + +`provider` is a reference to a Provider object. Ensure it exists before trying to assign it. + +### CircuitType + +Create CircuitType objects. Uses the following data structure: + +```yaml +circuit_type: + - name: # str + description: # str +``` + +### Circuit + +Create Circuit objects. Uses the following data structure: + +```yaml +circuit: + - circuit_id: # str + provider: # str + circuit_type: # str + status: # str + date_installed: # date (YYYY-MM-DD) + commit_rate_kbps: # int + description: # str + tenant: # str + tags: [] # List[str] +``` + +`circuit_type`, `status`, `tenant`, and `tags` are references to existing objects in Nautobot. Make sure these exist before trying to assign them. + +### CircuitTermination + +Create CircuitTermination objects. Uses the following data structure. + +```yaml +circuit_termination: + - name: # str + termination_type: # str + location: # str + provider_network: # str + port_speed_kbps: # int + upstream_speed_kbps: # int + cross_connect_id: # str + patch_panel_or_ports: # str + description: # str + tags: [] # List[str] +``` + +`termination_type` can be "Provider Network" or "Location" which are the only allowed relationships in the database for CircuitTermination objects. If you specify `termination_type` as "Provider Network" you will need to provide a valid Provider Network name in the `provider_network` field. If you specify "Location" as the `termination_type` you will specify a valid Location name in the `location` field. The `name` field is a bit special and should be formatted as follows as it is used to reference the Circuit objects `____`. The termination side can be "A" or "Z", and the Circuit ID and Provider Name are used to look up the correct Circuit and Provider information on creation, so make sure those exist prior to reference. + +### Namespace (IPAM) + +Create Namespace objects. Uses the following data structure:ß + +```yaml +namespace: + - name: # str + description: # str + location: # str +``` + +`location` is a reference to a location name and the app will attempt to look up this location by name and associate it with the namespace. Make sure the location exists. All uniqueness constraints are enforced by the ORM. + +### RIR (IPAM) + +Create RIR objects. Uses the following data structure: + +```yaml +rir: + - name: # str + private: # bool: defaults to false + description: # str +``` + +### VRF (IPAM) + +Create VRF objects. Uses the following data structure: + +```yaml +vrf: + - name: # str + namespace: # str + route_distinguisher: # str + description: # str + # prefixes: # List[str] + tenant: # str + tags: # List[str] +``` + +`namespace` and `tenant` are strings which reference the namespace and tenant names respectively. Make sure these exist in Nautobot or are in global_settings.yaml so they can be associated. `tenant` defaults to None if blank or can't find the Nautobot Tenant. `namespace` defaults to the Global namespace if blank or can't be found. Currently due to the order that the app syncs objects, `prefixes` can't be defined on VRFs and must be assigned from the `prefix` object by specifying `vrfs` on the `prefix` definition. All uniqueness constraints are enforced by the ORM. + +### VLAN Group + +Create VLANGroup objects. Uses the following data structure: + +```yaml +vlan_group: + - name: # str + location: # str + description: # str +``` + +`location` is a reference to a location name and the app will attempt to look up this location by name and associate it with the namespace. Make sure the location exists. All uniqueness constraints are enforced by the ORM. + +### VLAN + +Create VLAN objects. Uses the following data structure: + +```yaml +vlan: + - name: # str + vid: # int between 1 and 4094 + description: # str + status: # str + role: # str + locations: # List[str] + vlan_group: # str + tenant: # str + tags: # List[str] +``` + +`locations` and `tags` lists of strings which reference the location and tag names respectively. Make sure these exist in Nautobot or are in global_settings.yaml so they can be associated. `vlan_group` is a reference to a Nautobot VLANGroup name. This will be associated if it exists, or default to None if the Nautobot VLANGroup can't be found. `tenant`, `role`, and `status` are references to Tenant, Role, and Status objects in Nautobot. The app will attempt to look these up and associate them. `role` and `tenant` default to None if the object can't be found. `status` defaults to Active if a improper status is defined. All uniqueness constraints are enforced by the ORM. + +### Prefix + +Create Prefix objects. Uses the following data structure: + +```yaml +prefix: + - network: # str (cidr notation) + namespace: # str + prefix_type: # str # network, container, or pool + status: # str + role: # str + rir: # str + date_allocated: # str(datetime) (YYYY-mm-dd HH:mm:ss) + description: # str + vrfs: # List[str] + locations: # List[str] + vlan: # str + tenant: # str + tags: # List[str] +``` + +`vrfs`, `locations`, and `tags` are lists of strings that reference the names of VRF, Location, and Tag objects in Nautobot. Make sure these exist or they will default to None if they can't be found. `network` is the CIDR notation for the prefix. `namespace`, `status`, `role`, `rir`, `vlan`, and `tenant` are also references to names of their respective objects in Nautobot. `status` defaults to Active, and the rest default to None if not found or are left blank. `prefix_type` options are limited by the `PrefixStatusChoices` defined in `nautobot.ipam.choices`. `date_allocated` should be in the format indicated above as a datetime string (Year-Month-Day Hours:Minutes:Seconds) with time in 24 hour format in order to properly set the `date_allocated` field on the prefix object. For example "1970-01-01 00:00:00". They are all lowercase network, container, or pool. All uniqueness constraints are enforced by the ORM. + +### Secret + +Create Secret objects. Uses the following data structure: + +```yaml +secret: + - name: # str + provider: "environment-variable" # or text-file + parameters: # str + variable: # str + path: # str +``` + +`Secret` objects need to be created before `SecretsGroup` objects references them, so make sure any `Secret` objects you are wanting to reference in `SecretGroups` objects are created here or already exist in Nautobot. + +### SecretsGroup + +Create SecretsGroup objects. Uses the following data structure: + +```yaml +secrets_group: + - name: # str + secrets: # str + - name: # str + secret_type: # str + access_type: # str + - name: # str + secret_type: # str + access_type: # str +``` + +`Secret` objects need to be created before SecretsGroup references them, so make sure any `Secret` objects you are wanting to reference in `SecretGroups` objects are created here or already exist in Nautobot. + +### GitRepository + +Create GitRepository objects. Uses the following data structure: + +```yaml +git_repository: + - name: # str + url: # str + branch: # str + secrets_group_name: # str + provided_data_type: [] # List[str] + +# develop/staging/production.yaml +git_branch: # str +``` + +GitRepositories are a bit unique. If you specify they `branch:` key in the global_settings.yaml file, this will override the `git_branch:` key in the `.yaml` file. The `git_branch:` key in the environment specific yaml file is the default, so you don't have to specify branches for each git repository. + +### DynamicGroup + +Create DynamicGroup objects. Uses the following data structure: + +```yaml +dynamic_group: + - name: # str + content_type: # str + description: # str + filter: | # str +``` + +The `filter:` key for DynamicGroup objects takes a string representation of the JSON filter to group the devices required. + +### Computed_Field + +Create ComputedField objects. Uses the following data structure: + +```yaml +computed_field: + - label: # str + content_type: # str + template: # str +``` + +The `template:` key for ComputedField objects takes a jinja variable format string which will display the calculated information. + +### Tag + +Create Tag objects. Uses the following data structure: + +```yaml +tag: + - name: # str + color: # str + description: # str + content_types: [] # List[str] +``` + +The `color` tag is optional, but will default to grey if not specified. The `content_types` list is a list of `path.model` formatted strings for the types of objects that the tags should be able to apply to. + +### GraphQLQuery + +Create GraphQLQuery objects. Uses the following data structure: + +```yaml +graph_ql_query: + - name: # str + query: | # str +``` + +The `query:` key takes a graphql formatted string to retrieve the information required. + +### Software + + - Note: Requires Nautobot Device Lifecycle Plugin Installed + +Create Software objects. Uses the following data structure: + +```yaml +software: + - device_platform: # str + version: # str + alias: # str + release_date: # date (YYYY-MM-DD) + eos_date: # date (YYYY-MM-DD) + documentation_url: # str + lts: # bool + pre_release: # bool + tags: [] # List[str] +``` + +The `device_platform` key must be a Platform that exists in Nautobot or is created by this plugin. The date fields `release_date` and `eos_date` need to be formatted YYYY-MM-DD in order to properly import. + +### SoftwareImage + + - Note: Requires Nautobot Device Lifecycle Plugin Installed + +Create Software Image objects. Uses the following data structure: + +```yaml +software_image: + - software: # str + platform: # str + software_version: # str + file_name: # str + download_url: # str + image_file_checksum: # str + hashing_algorithm: # str + default_image: # bool + tags: [] # List[str] +``` + +The `software`, `platform`, and `software_version` keys are linked and should be consistent. The Platform and Software must already be present in Nautobot for these models to be created. The format for the `software:` key is important and should be `-`. + +### ValidatedSoftware + + - Note: Requires Nautobot Device Lifecycle Plugin Installed + +Create ValidatedSoftware objects. Uses the following data structure: + +```yaml +validated_software: + - software: # str + valid_since: # date (YYYY-MM-DD) + valid_until: # date {YYYY-MM-DD} + preferred_version: # bool + devices: [] # List[str] + device_types: [] # List[str] + device_roles: [] # List[str] + inventory_items: [] # List[str] + object_tags: [] # List[str] + tags: [] # List[str] +``` + +The `software:` key is a reference to the platform and software version of a Software object that already exists in Nautobot (or is created by this plugin). The `valid_since` and `valid_until` fields must dates in YYYY-MM-DD format. The `devices`, `device_types`, `device_roles`, `inventory_items`, and `object_tags` are all lists of objects to apply the validated software to for validation against what is currently running on the device. diff --git a/docs/user/integrations/index.md b/docs/user/integrations/index.md index bb5b03b2e..ebd3a8df9 100644 --- a/docs/user/integrations/index.md +++ b/docs/user/integrations/index.md @@ -4,6 +4,7 @@ This Nautobot app supports the following integrations: - [Cisco ACI](./aci.md) - [Arista CloudVision](./aristacv.md) +- [Bootstrap](./bootstrap.md) - [Device42](./device42.md) - [Infoblox](./infoblox.md) - [IPFabric](./ipfabric.md) diff --git a/mkdocs.yml b/mkdocs.yml index a83afa16d..53eb099e4 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -110,6 +110,7 @@ nav: - "user/integrations/index.md" - Cisco ACI: "user/integrations/aci.md" - Arista CloudVision: "user/integrations/aristacv.md" + - Bootstrap: "user/integrations/bootstrap.md" - Device42: "user/integrations/device42.md" - Infoblox: "user/integrations/infoblox.md" - IPFabric: "user/integrations/ipfabric.md" @@ -125,6 +126,7 @@ nav: - "admin/integrations/index.md" - Cisco ACI: "admin/integrations/aci_setup.md" - Arista CloudVision: "admin/integrations/aristacv_setup.md" + - Bootstrap: "admin/integrations/bootstrap.md" - Device42: "admin/integrations/device42_setup.md" - Infoblox: "admin/integrations/infoblox_setup.md" - IPFabric: "admin/integrations/ipfabric_setup.md" diff --git a/nautobot_ssot/__init__.py b/nautobot_ssot/__init__.py index bb805c222..7eeff3479 100644 --- a/nautobot_ssot/__init__.py +++ b/nautobot_ssot/__init__.py @@ -17,6 +17,7 @@ _CONFLICTING_APP_NAMES = [ "nautobot_ssot_aci", "nautobot_ssot_aristacv", + "nautobot_ssot_bootstrap", "nautobot_ssot_device42", "nautobot_ssot_dna_center", "nautobot_ssot_infoblox", diff --git a/nautobot_ssot/integrations/bootstrap/__init__.py b/nautobot_ssot/integrations/bootstrap/__init__.py new file mode 100644 index 000000000..85e31f0b4 --- /dev/null +++ b/nautobot_ssot/integrations/bootstrap/__init__.py @@ -0,0 +1 @@ +"""Plugin declaration for bootstrap.""" diff --git a/nautobot_ssot/integrations/bootstrap/constants.py b/nautobot_ssot/integrations/bootstrap/constants.py new file mode 100644 index 000000000..eb25bf1a7 --- /dev/null +++ b/nautobot_ssot/integrations/bootstrap/constants.py @@ -0,0 +1,3 @@ +"""Constants to be used with the nautobot_ssot_bootstrap plugin.""" + +content_model_path_mapping = {} diff --git a/nautobot_ssot/integrations/bootstrap/diffsync/adapters/__init__.py b/nautobot_ssot/integrations/bootstrap/diffsync/adapters/__init__.py new file mode 100644 index 000000000..77e2b2577 --- /dev/null +++ b/nautobot_ssot/integrations/bootstrap/diffsync/adapters/__init__.py @@ -0,0 +1 @@ +"""Adapter classes for loading DiffSyncModels with data from bootstrap or Nautobot.""" diff --git a/nautobot_ssot/integrations/bootstrap/diffsync/adapters/bootstrap.py b/nautobot_ssot/integrations/bootstrap/diffsync/adapters/bootstrap.py new file mode 100755 index 000000000..89eaa9eeb --- /dev/null +++ b/nautobot_ssot/integrations/bootstrap/diffsync/adapters/bootstrap.py @@ -0,0 +1,1041 @@ +"""Nautobot Ssot Bootstrap Adapter for bootstrap SSoT plugin.""" + +import datetime +import json +import os + +import yaml +from diffsync import Adapter +from diffsync.exceptions import ObjectAlreadyExists, ObjectNotFound +from django.conf import settings +from nautobot.extras.datasources.git import ensure_git_repository +from nautobot.extras.models import GitRepository + +from nautobot_ssot.integrations.bootstrap.diffsync.models.bootstrap import ( + BootstrapCircuit, + BootstrapCircuitTermination, + BootstrapCircuitType, + BootstrapComputedField, + BootstrapContact, + BootstrapDynamicGroup, + BootstrapGitRepository, + BootstrapGraphQLQuery, + BootstrapLocation, + BootstrapLocationType, + BootstrapManufacturer, + BootstrapNamespace, + BootstrapPlatform, + BootstrapPrefix, + BootstrapProvider, + BootstrapProviderNetwork, + BootstrapRiR, + BootstrapRole, + BootstrapSecret, + BootstrapSecretsGroup, + BootstrapTag, + BootstrapTeam, + BootstrapTenant, + BootstrapTenantGroup, + BootstrapVLAN, + BootstrapVLANGroup, + BootstrapVRF, +) +from nautobot_ssot.integrations.bootstrap.utils import ( + is_running_tests, + lookup_content_type, +) + +try: + import nautobot_device_lifecycle_mgmt # noqa: F401 + + LIFECYCLE_MGMT = True +except ImportError: + LIFECYCLE_MGMT = False + +if LIFECYCLE_MGMT: + from nautobot_ssot.integrations.bootstrap.diffsync.models.bootstrap import ( # noqa: F401 + BootstrapSoftware, + BootstrapSoftwareImage, + BootstrapValidatedSoftware, + ) + + +class LabelMixin: + """Add labels onto Nautobot objects to provide information on sync status with Bootstrap.""" + + def label_imported_objects(self, target): + """Add CustomFields to all objects that were successfully synced to the target.""" + _model_list = [ + "tag", + "tenant_group", + "tenant", + "role", + "manufacturer", + "platform", + "location_type", + "location", + "team", + "contact", + "provider", + "provider_network", + "circuit_type", + "circuit", + "namespace", + "rir", + "vlan_group", + "vlan", + "vrf", + "prefix", + "secret", + "secrets_group", + "git_repository", + "dynamic_group", + "computed_field", + "graph_ql_query", + ] + + if LIFECYCLE_MGMT: + _model_list.append( + "software", + "software_image", + "validated_software", + ) + + for modelname in _model_list: + for local_instance in self.get_all(modelname): + unique_id = local_instance.get_unique_id() + # Verify that the object now has a counterpart in the target DiffSync + try: + target.get(modelname, unique_id) + except ObjectNotFound: + continue + + self.label_object(modelname, unique_id) + + def label_object(self, modelname, unique_id): + """Apply the given CustomField to the identified object.""" + + def _label_object(nautobot_object): + """Apply custom field to object, if applicable.""" + nautobot_object.custom_field_data["last_synced_from_sor"] = today + nautobot_object.custom_field_data["system_of_record"] = os.getenv("SYSTEM_OF_RECORD", "Bootstrap") + nautobot_object.validated_save() + + today = datetime.today().date().isoformat() + + +class BootstrapAdapter(Adapter, LabelMixin): + """DiffSync adapter for Bootstrap.""" + + tenant_group = BootstrapTenantGroup + tenant = BootstrapTenant + role = BootstrapRole + manufacturer = BootstrapManufacturer + platform = BootstrapPlatform + location_type = BootstrapLocationType + location = BootstrapLocation + team = BootstrapTeam + contact = BootstrapContact + provider = BootstrapProvider + provider_network = BootstrapProviderNetwork + circuit_type = BootstrapCircuitType + circuit = BootstrapCircuit + circuit_termination = BootstrapCircuitTermination + namespace = BootstrapNamespace + rir = BootstrapRiR + vlan_group = BootstrapVLANGroup + vlan = BootstrapVLAN + vrf = BootstrapVRF + prefix = BootstrapPrefix + secret = BootstrapSecret + secrets_group = BootstrapSecretsGroup + git_repository = BootstrapGitRepository + dynamic_group = BootstrapDynamicGroup + computed_field = BootstrapComputedField + tag = BootstrapTag + graph_ql_query = BootstrapGraphQLQuery + + if LIFECYCLE_MGMT: + software = BootstrapSoftware + software_image = BootstrapSoftwareImage + validated_software = BootstrapValidatedSoftware + + top_level = [ + "tenant_group", + "tenant", + "role", + "manufacturer", + "platform", + "location_type", + "location", + "team", + "contact", + "provider", + "provider_network", + "circuit_type", + "circuit", + "namespace", + "rir", + "vlan_group", + "vlan", + "vrf", + "prefix", + "secret", + "secrets_group", + "git_repository", + "dynamic_group", + "computed_field", + "tag", + "graph_ql_query", + ] + + if LIFECYCLE_MGMT: + top_level.append("software") + top_level.append("software_image") + top_level.append("validated_software") + + def __init__(self, *args, job=None, sync=None, client=None, **kwargs): # noqa: D417 + """Initialize bootstrap. + + Args: + job (object, optional): bootstrap job. Defaults to None. + sync (object, optional): bootstrap DiffSync. Defaults to None. + client (object): bootstrap API client connection object. + """ + super().__init__(*args, **kwargs) + self.job = job + self.sync = sync + self.conn = client + + def load_tenant_group(self, bs_tenant_group, branch_vars): + """Load TenantGroup objects from Bootstrap into DiffSync models.""" + self.job.logger.debug(f"Loading Bootstrap TenantGroup: {bs_tenant_group}") + + try: + self.get(self.tenant_group, bs_tenant_group["name"]) + except ObjectNotFound: + new_tenant_group = self.tenant_group( + name=bs_tenant_group["name"], + parent=bs_tenant_group["parent"] if not None else None, + description=bs_tenant_group["description"], + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(new_tenant_group) + + def load_tenant(self, bs_tenant, branch_vars): + """Load Tenant objects from Bootstrap into DiffSync models.""" + self.job.logger.debug(f"Loading Bootstrap Tenant: {bs_tenant}") + + try: + self.get(self.tenant, bs_tenant["name"]) + except ObjectNotFound: + new_tenant = self.tenant( + name=bs_tenant["name"], + tenant_group=bs_tenant["tenant_group"] if not None else None, + description=bs_tenant["description"], + tags=bs_tenant["tags"], + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(new_tenant) + + def load_role(self, bs_role, branch_vars): + """Load Role objects from Bootstrap into DiffSync models.""" + self.job.logger.debug(f"Loading Bootstrap Role {bs_role}") + + if len(bs_role["content_types"]) > 1: + _content_types = bs_role["content_types"] + _content_types.sort() + else: + _content_types = bs_role["content_types"] + try: + self.get(self.role, bs_role["name"]) + except ObjectNotFound: + new_role = self.role( + name=bs_role["name"], + weight=bs_role["weight"], + description=bs_role["description"], + color=bs_role["color"] if not None else "9e9e9e", + content_types=_content_types, + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(new_role) + + def load_manufacturer(self, bs_manufacturer, branch_vars): + """Load Manufacturer objects from Bootstrap into DiffSync models.""" + self.job.logger.debug(f"Loading Boostrap Manufacturer {bs_manufacturer}") + + try: + self.get(self.manufacturer, bs_manufacturer["name"]) + except ObjectNotFound: + new_manufacturer = self.manufacturer( + name=bs_manufacturer["name"], + description=bs_manufacturer["description"], + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(new_manufacturer) + + def load_platform(self, bs_platform, branch_vars): + """Load Platform objects from Bootstrap into DiffSync models.""" + self.job.logger.debug(f"Loading Bootstrap Platform {bs_platform}") + + try: + self.get(self.platform, bs_platform["name"]) + except ObjectNotFound: + new_platform = self.platform( + name=bs_platform["name"], + manufacturer=bs_platform["manufacturer"], + network_driver=bs_platform["network_driver"], + napalm_driver=bs_platform["napalm_driver"], + napalm_arguments=bs_platform["napalm_arguments"], + description=bs_platform["description"], + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(new_platform) + + def load_location_type(self, bs_location_type, branch_vars): + """Load LocationType objects from Bootstrap into DiffSync models.""" + self.job.logger.debug(f"Loading Bootstrap LocationType {bs_location_type}") + + try: + self.get(self.location_type, bs_location_type["name"]) + except ObjectNotFound: + _content_types = [] + if bs_location_type["parent"]: + _parent = bs_location_type["parent"] + else: + _parent = None + if len(bs_location_type["content_types"]) > 1: + _content_types = bs_location_type["content_types"] + _content_types.sort() + new_location_type = self.location_type( + name=bs_location_type["name"], + parent=_parent, + nestable=bs_location_type["nestable"], + description=bs_location_type["description"], + content_types=_content_types, + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(new_location_type) + + def load_location(self, bs_location, branch_vars): + """Load Location objects from Bootstrap into DiffSync models.""" + self.job.logger.debug(f"Loading Bootstrap Location {bs_location}") + + try: + self.get(self.location, bs_location["name"]) + except ObjectNotFound: + if bs_location["parent"]: + _parent = bs_location["parent"] + else: + _parent = None + if bs_location["tenant"]: + _tenant = bs_location["tenant"] + else: + _tenant = None + new_location = self.location( + name=bs_location["name"], + location_type=bs_location["location_type"], + parent=_parent, + status=bs_location["status"], + facility=bs_location["facility"], + asn=bs_location["asn"], + time_zone=bs_location["time_zone"], + description=bs_location["description"], + tenant=_tenant, + physical_address=bs_location["physical_address"], + shipping_address=bs_location["shipping_address"], + latitude=bs_location["latitude"], + longitude=bs_location["longitude"], + contact_name=bs_location["contact_name"], + contact_phone=bs_location["contact_phone"], + contact_email=bs_location["contact_email"], + tags=bs_location["tags"], + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(new_location) + + def load_team(self, bs_team, branch_vars): + """Load Team objects from Bootstrap into DiffSync models.""" + self.job.logger.debug(f"Loading Bootstrap Team {bs_team}") + + if "contacts" in bs_team: + _contacts = [] + for _contact in bs_team["contacts"]: + _contacts.append(_contact) + _contacts.sort() + try: + self.get(self.team, bs_team["name"]) + except ObjectNotFound: + new_team = self.team( + name=bs_team["name"], + phone=bs_team["phone"], + email=bs_team["email"], + address=bs_team["address"], + # TODO: Need to consider how to allow loading from teams or contacts models. + # contacts=_contacts, + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(new_team) + + def load_contact(self, bs_contact, branch_vars): + """Load Contact objects from Bootstrap into DiffSync models.""" + self.job.logger.debug(f"Loading Boostrap Contact {bs_contact}") + + if "teams" in bs_contact: + _teams = [] + for _team in bs_contact["teams"]: + _teams.append(_team) + _teams.sort() + try: + self.get(self.contact, bs_contact["name"]) + except ObjectNotFound: + new_contact = self.contact( + name=bs_contact["name"], + phone=bs_contact["phone"], + email=bs_contact["email"], + address=bs_contact["address"], + teams=_teams, + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(new_contact) + + def load_provider(self, bs_provider, branch_vars): + """Load Provider objects from Bootstrap into DiffSync models.""" + self.job.logger.debug(f"Loading Bootstrap Provider {bs_provider}") + + try: + self.get(self.provider, bs_provider["name"]) + except ObjectNotFound: + new_provider = self.provider( + name=bs_provider["name"], + asn=bs_provider["asn"], + account_number=bs_provider["account_number"], + portal_url=bs_provider["portal_url"], + noc_contact=bs_provider["noc_contact"], + admin_contact=bs_provider["admin_contact"], + tags=bs_provider["tags"], + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(new_provider) + + def load_provider_network(self, bs_provider_network, branch_vars): + """Load ProviderNetwork objects from Bootstrap into DiffSync models.""" + self.job.logger.debug(f"Loading Bootstrap ProviderNetwork {bs_provider_network}") + + try: + self.get(self.provider_network, bs_provider_network["name"]) + except ObjectNotFound: + new_provider_network = self.provider_network( + name=bs_provider_network["name"], + provider=bs_provider_network["provider"], + description=bs_provider_network["description"], + comments=bs_provider_network["comments"], + tags=bs_provider_network["tags"], + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(new_provider_network) + + def load_circuit_type(self, bs_circuit_type, branch_vars): + """Load CircuitType objects from Bootstrap into DiffSync models.""" + self.job.logger.debug(f"Loading Bootstrap CircuitType {bs_circuit_type} into DiffSync models.") + + try: + self.get(self.circuit_type, bs_circuit_type["name"]) + except ObjectNotFound: + new_circuit_type = self.circuit_type( + name=bs_circuit_type["name"], + description=bs_circuit_type["description"], + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(new_circuit_type) + + def load_circuit(self, bs_circuit, branch_vars): + """Load Circuit objects from Bootstrap into DiffSync models.""" + self.job.logger.debug(f"Loading Bootstrap Circuit {bs_circuit} into DiffSync models.") + + try: + self.get(self.circuit, bs_circuit["circuit_id"]) + except ObjectNotFound: + new_circuit = self.circuit( + circuit_id=bs_circuit["circuit_id"], + provider=bs_circuit["provider"], + circuit_type=bs_circuit["circuit_type"], + status=bs_circuit["status"], + date_installed=bs_circuit["date_installed"], + commit_rate_kbps=bs_circuit["commit_rate_kbps"], + description=bs_circuit["description"], + tags=bs_circuit["tags"], + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(new_circuit) + + def load_circuit_termination(self, bs_circuit_termination, branch_vars): + """Load CircuitTermination objects from Bootstrap into DiffSync models.""" + self.job.logger.debug(f"Loading Bootstrap CircuitTermination {bs_circuit_termination} into DiffSync models.") + _parts = bs_circuit_termination["name"].split("__") + _circuit_id = _parts[0] + _provider = _parts[1] + _term_side = _parts[2] + try: + self.get(self.circuit_termination, bs_circuit_termination["name"]) + except ObjectNotFound: + new_circuit_termination = self.circuit_termination( + name=bs_circuit_termination["name"], + termination_type=bs_circuit_termination["termination_type"], + termination_side=_term_side, + circuit_id=_circuit_id, + location=(bs_circuit_termination["location"] if bs_circuit_termination["location"] != "" else None), + provider_network=( + bs_circuit_termination["provider_network"] + if bs_circuit_termination["provider_network"] != "" + else None + ), + port_speed_kbps=bs_circuit_termination["port_speed_kbps"], + upstream_speed_kbps=bs_circuit_termination["upstream_speed_kbps"], + cross_connect_id=bs_circuit_termination["cross_connect_id"], + patch_panel_or_ports=bs_circuit_termination["patch_panel_or_ports"], + description=bs_circuit_termination["description"], + tags=bs_circuit_termination["tags"], + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(new_circuit_termination) + try: + _circuit = self.get(self.circuit, {"circuit_id": _circuit_id, "provider": _provider}) + _circuit.add_child(new_circuit_termination) + except ObjectAlreadyExists as err: + self.job.logger.warning(f"CircuitTermination for {_circuit} already exists. {err}") + except ObjectNotFound as err: + self.job.logger.warning(f"Circuit {_circuit_id} not found. {err}") + + def load_namespace(self, bs_namespace, branch_vars): + """Load Namespace objects from Bootstrap into DiffSync models.""" + self.job.logger.debug(f"Loading Bootstrap Namespace {bs_namespace}.") + try: + self.get(self.namespace, bs_namespace["name"]) + except ObjectNotFound: + new_namespace = self.namespace( + name=bs_namespace["name"], + description=bs_namespace["description"], + location=bs_namespace["location"], + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(new_namespace) + + def load_rir(self, bs_rir, branch_vars): + """Load RiR objects from Bootstrap into DiffSync models.""" + self.job.logger.debug(f"Loading Bootstrap RiR {bs_rir}.") + try: + self.get(self.rir, bs_rir["name"]) + except ObjectNotFound: + new_rir = self.rir( + name=bs_rir["name"], + private=bs_rir["private"] if bs_rir["private"] else False, + description=bs_rir["description"], + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(new_rir) + + def load_vlan_group(self, bs_vlan_group, branch_vars): + """Load VLANGroup objects from Bootstrap into DiffSync models.""" + self.job.logger.debug(f"Loading Bootstrap VLANGroup {bs_vlan_group}.") + try: + self.get(self.vlan_group, bs_vlan_group["name"]) + except ObjectNotFound: + new_vlan_group = self.vlan_group( + name=bs_vlan_group["name"], + location=bs_vlan_group["location"], + description=bs_vlan_group["description"], + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(new_vlan_group) + + def load_vlan(self, bs_vlan, branch_vars): + """Load VLAN objects from Bootstrap into DiffSync models.""" + self.job.logger.debug(f"Loading Bootstrap VLAN {bs_vlan}.") + try: + self.get( + self.vlan, + { + "name": bs_vlan["name"], + "vid": bs_vlan["vid"], + "vlan_group": (bs_vlan["vlan_group"] if bs_vlan["vlan_group"] else None), + }, + ) + except ObjectNotFound: + new_vlan = self.vlan( + name=bs_vlan["name"], + vid=bs_vlan["vid"], + description=bs_vlan["description"], + status=bs_vlan["status"] if bs_vlan["status"] else "Active", + role=bs_vlan["role"] if bs_vlan["role"] else None, + locations=bs_vlan["locations"], + vlan_group=bs_vlan["vlan_group"] if bs_vlan["vlan_group"] else None, + tenant=bs_vlan["tenant"] if bs_vlan["tenant"] else None, + tags=bs_vlan["tags"], + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(new_vlan) + + def load_vrf(self, bs_vrf, branch_vars): + """Load VRF objects from Bootstrap into DiffSync models.""" + self.job.logger.debug(f"Loading Bootstrap VRF {bs_vrf}.") + try: + self.get( + self.vrf, + { + "name": bs_vrf["name"], + "namespace": (bs_vrf["namespace"] if bs_vrf["namespace"] else "Global"), + }, + ) + except ObjectNotFound: + new_vrf = self.vrf( + name=bs_vrf["name"], + namespace=bs_vrf["namespace"] if bs_vrf["namespace"] else "Global", + route_distinguisher=bs_vrf["route_distinguisher"], + description=bs_vrf["description"], + tenant=bs_vrf["tenant"] if bs_vrf["tenant"] else None, + tags=bs_vrf["tags"], + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(new_vrf) + + def load_prefix(self, bs_prefix, branch_vars): + """Load Prefix objects from Bootstrap into DiffSync models.""" + self.job.logger.debug(f"Loading Bootstrap Prefix {bs_prefix}.") + try: + self.get( + self.prefix, + { + "network": {bs_prefix["network"]}, + "namespace": {bs_prefix["namespace"] if bs_prefix["namespace"] else "Global"}, + }, + ) + except ObjectNotFound: + _date_allocated = None + if "date_allocated" in bs_prefix and bs_prefix["date_allocated"]: + if isinstance(bs_prefix["date_allocated"], (datetime.date, datetime.datetime)): + _date_allocated = bs_prefix["date_allocated"] + if isinstance(_date_allocated, datetime.date) and not isinstance( + _date_allocated, datetime.datetime + ): + _date_allocated = datetime.datetime.combine(_date_allocated, datetime.time.min) + else: + try: + _date_allocated = datetime.datetime.strptime(bs_prefix["date_allocated"], "%Y-%m-%d %H:%M:%S") + except (TypeError, ValueError): + try: + _date_allocated = datetime.datetime.strptime(bs_prefix["date_allocated"], "%Y-%m-%d") + _date_allocated = _date_allocated.replace(hour=0, minute=0, second=0) + except (TypeError, ValueError): + _date_allocated = None + self.job.logger.warning( + f"Invalid date format for date_allocated: {bs_prefix['date_allocated']}" + ) + new_prefix = self.prefix( + network=bs_prefix["network"], + namespace=(bs_prefix["namespace"] if bs_prefix["namespace"] else "Global"), + prefix_type=(bs_prefix["prefix_type"] if bs_prefix["prefix_type"] else "Network"), + status=bs_prefix["status"] if bs_prefix["status"] else "Active", + role=bs_prefix["role"] if bs_prefix["role"] else None, + rir=bs_prefix["rir"] if bs_prefix["rir"] else None, + date_allocated=_date_allocated, + description=bs_prefix["description"], + vrfs=bs_prefix["vrfs"] if bs_prefix["vrfs"] else None, + locations=bs_prefix["locations"] if bs_prefix["locations"] else None, + vlan=bs_prefix["vlan"] if bs_prefix["vlan"] else None, + tenant=bs_prefix["tenant"] if bs_prefix["tenant"] else None, + tags=bs_prefix["tags"], + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(new_prefix) + + def load_secret(self, bs_secret, branch_vars): + """Load Secret objects from Bootstrap into DiffSync models.""" + if bs_secret["provider"] == "environment-variable": + params = {"variable": bs_secret["parameters"]["variable"]} + elif bs_secret["provider"] == "text-file": + params = {"variable": bs_secret["parameters"]["path"]} + else: + self.job.logger.warning(f"Secret: {bs_secret} is not formatted correctly in the yaml file.") + return + + self.job.logger.debug(f"Loading Bootstrap Secret: {bs_secret}, params: {params}") + + try: + self.get(self.secret, bs_secret["name"]) + except ObjectNotFound: + new_secret = self.secret( + name=bs_secret["name"], + provider=bs_secret["provider"], + parameters=params, + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(new_secret) + + def load_secrets_group(self, bs_sg, branch_vars): + """Load SecretsGroup objects from Bootstrap into DiffSync models.""" + _secrets = [] + self.job.logger.debug(f"Loading Bootstrap SecretsGroup: {bs_sg}") + try: + self.get(self.secrets_group, bs_sg["name"]) + except ObjectNotFound: + for _sec in bs_sg["secrets"]: + _secrets.append(_sec) + _secrets = sorted(_secrets, key=lambda x: x["name"]) + new_secrets_group = self.secrets_group( + name=bs_sg["name"], + secrets=_secrets, + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(new_secrets_group) + + def load_git_repository(self, git_repo, branch_vars): + """Load GitRepository objects from Bootstrap into DiffSync models.""" + self.job.logger.debug(f"Loading Bootstrap GitRepository: {git_repo}") + try: + self.get(self.git_repository, git_repo["name"]) + except ObjectNotFound: + _data_types = [] + for con_type in git_repo["provided_data_type"]: + _content_type = lookup_content_type(content_model_path="extras.gitrepository", content_type=con_type) + _data_types.append(_content_type) + if git_repo.get("branch"): + _branch = git_repo["branch"] + else: + _branch = branch_vars["git_branch"] + new_git_repository = self.git_repository( + name=git_repo["name"], + url=git_repo["url"], + branch=_branch, + provided_contents=_data_types, + secrets_group=git_repo["secrets_group_name"], + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(new_git_repository) + _data_types.clear() + + def load_dynamic_group(self, dyn_group): + """Load DynamicGroup objects from Bootstrap into DiffSync models.""" + self.job.logger.debug(f"Loading Bootstrap DynamicGroup: {dyn_group}") + try: + self.get(self.dynamic_group, dyn_group["name"]) + except ObjectNotFound: + new_dynamic_group = self.dynamic_group( + name=dyn_group["name"], + content_type=dyn_group["content_type"], + dynamic_filter=json.loads(dyn_group["filter"]), + description=dyn_group["description"], + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(new_dynamic_group) + + def load_computed_field(self, comp_field): + """Load ComputedField objects from Bootstrap into DiffSync Models.""" + self.job.logger.debug(f"Loading Bootstrap ComputedField: {comp_field}") + try: + self.get(self.computed_field, comp_field["label"]) + except ObjectNotFound: + _new_comp_field = self.computed_field( + label=comp_field["label"], + content_type=comp_field["content_type"], + template=comp_field["template"], + ) + self.add(_new_comp_field) + + def load_tag(self, tag): + """Load Tag objects from Bootstrap into DiffSync Models.""" + self.job.logger.debug(f"Loading Bootstrap Tag: {tag}") + if len(tag["content_types"]) > 1: + _content_types = tag["content_types"] + _content_types.sort() + else: + _content_types = tag["content_types"] + try: + self.get(self.tag, tag["name"]) + except ObjectNotFound: + _new_tag = self.tag( + name=tag["name"], + color=tag["color"] if not None else "9e9e9e", + content_types=_content_types, + description=tag["description"], + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(_new_tag) + + def load_graph_ql_query(self, query): + """Load GraphQLQuery objects from Bootstrap into DiffSync Models.""" + self.job.logger.debug(f"Loading Bootstrap GraphQLQuery {query}") + try: + self.get(self.graph_ql_query, query["name"]) + except ObjectNotFound: + _new_graphqlq = self.graph_ql_query(name=query["name"], query=query["query"]) + self.add(_new_graphqlq) + + def load_software(self, software): + """Load Software objects from Bootstrap into DiffSync Models.""" + self.job.logger.debug(f"Loading Bootstrap Software {software}") + try: + self.get( + self.software, + { + "version": software["version"], + "platform": software["device_platform"], + }, + ) + except ObjectNotFound: + try: + _release_date = datetime.datetime.strptime(software["release_date"], "%Y-%m-%d") + except TypeError: + _release_date = None + try: + _eos_date = datetime.datetime.strptime(software["eos_date"], "%Y-%m-%d") + except TypeError: + _eos_date = None + if software["documentation_url"] is None: + _documentation_url = "" + else: + _documentation_url = software["documentation_url"] + _new_software = self.software( + version=software["version"], + platform=software["device_platform"], + alias=software["alias"] if not None else "", + release_date=_release_date, + eos_date=_eos_date, + documentation_url=_documentation_url, + long_term_support=software["lts"], + pre_release=software["pre_release"], + tags=software["tags"], + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(_new_software) + + def load_software_image(self, software_image): + """Load SoftwareImage objects from Bootstrap into DiffSync Models.""" + self.job.logger.debug(f"Loading Bootstrap SoftwareImage {software_image}") + try: + self.get(self.software_image, software_image["file_name"]) + except ObjectNotFound: + _new_software_image = self.software_image( + software=f'{software_image["platform"]} - {software_image["software_version"]}', + platform=software_image["platform"], + software_version=software_image["software_version"], + file_name=software_image["file_name"], + download_url=software_image["download_url"], + image_file_checksum=software_image["image_file_checksum"], + hashing_algorithm=software_image["hashing_algorithm"], + default_image=software_image["default_image"] if not None else False, + tags=software_image["tags"], + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(_new_software_image) + + def load_validated_software(self, validated_software): + """Load ValidatedSoftware objects from Bootstrap into DiffSync Models.""" + self.job.logger.debug(f"Loading Bootstrap ValidatedSoftware {validated_software}") + try: + self.get( + self.validated_software, + { + "software": {validated_software["software"]}, + "valid_since": {validated_software["valid_since"]}, + "valid_until": {validated_software["valid_until"]}, + }, + ) + except ObjectNotFound: + _new_validated_software = self.validated_software( + software=validated_software["software"], + software_version=validated_software["software"].split(" - ", 1)[1], + platform=validated_software["software"].split(" - ", 1)[0], + valid_since=validated_software["valid_since"], + valid_until=validated_software["valid_until"], + preferred_version=validated_software["preferred_version"], + devices=sorted(validated_software["devices"]), + device_types=sorted(validated_software["device_types"]), + device_roles=sorted(validated_software["device_roles"]), + inventory_items=sorted(validated_software["inventory_items"]), + object_tags=sorted(validated_software["object_tags"]), + tags=sorted(validated_software["tags"]), + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(_new_validated_software) + + def load(self): + """Load data from Bootstrap into DiffSync models.""" + environment_label = settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_nautobot_environment_branch"] + + if is_running_tests(): + load_type = "file" + elif self.job.load_source == "env_var": + load_type = os.getenv("NAUTOBOT_BOOTSTRAP_SSOT_LOAD_SOURCE", "file") + else: + load_type = self.job.load_source + + global global_settings + global_settings = None + + if load_type == "file": + directory_path = "nautobot_ssot/integrations/bootstrap/fixtures" + # generates a variable for each file in fixtures named the same as the file name less .yaml + for filename in os.listdir(directory_path): + if filename.endswith(".yaml") or filename.endswith(".yml"): + with open(os.path.join(directory_path, filename), "r") as file: + yaml_data = yaml.safe_load(file) + variable_name = os.path.splitext(filename)[0] + globals()[variable_name] = yaml_data + + branch_vars = globals()[environment_label] + global_settings = globals().get("global_settings") + + elif load_type == "git": + repo = GitRepository.objects.filter( + name__icontains="Bootstrap", + provided_contents__icontains="extras.configcontext", + ) + if len(repo) == 0: + self.job.logger.warning( + "Unable to find Bootstrap SSoT Repository configured in Nautobot, please ensure a git repository with a name containing 'Bootstrap' is present and provides 'configcontext' type." + ) + else: + repo = repo[0] + self.job.logger.debug(f"Sync the {repo.name} GitRepository.") + ensure_git_repository(repository_record=repo) + self.job.logger.info(f"Parsing the {repo.name} GitRepository.") + os.chdir(f"{repo.filesystem_path}") + directory_path = "./" + # generates a variable for each file in fixtures named the same as the file name less .yaml + for filename in os.listdir("./"): + if filename.endswith(".yaml") or filename.endswith(".yml"): + with open(os.path.join(directory_path, filename), "r") as file: + yaml_data = yaml.safe_load(file) + variable_name = os.path.splitext(filename)[0] + globals()[variable_name] = yaml_data + + branch_vars = globals()[environment_label] + global_settings = globals().get("global_settings") + + # Ensure global_settings is loaded + if global_settings is None: + self.job.logger.error("global_settings not loaded. Check if the file exists in the correct directory.") + return + + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["tenant_group"]: + if global_settings["tenant_group"] is not None: # noqa: F821 + for bs_tenant_group in global_settings["tenant_group"]: # noqa: F821 + self.load_tenant_group(bs_tenant_group=bs_tenant_group, branch_vars=branch_vars) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["tenant"]: + if global_settings["tenant"] is not None: # noqa: F821 + for bs_tenant in global_settings["tenant"]: # noqa: F821 + self.load_tenant(bs_tenant=bs_tenant, branch_vars=branch_vars) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["role"]: + if global_settings["role"] is not None: # noqa: F821 + for bs_role in global_settings["role"]: # noqa: F821 + self.load_role(bs_role=bs_role, branch_vars=branch_vars) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["manufacturer"]: + if global_settings["manufacturer"] is not None: # noqa: F821 + for bs_manufacturer in global_settings["manufacturer"]: # noqa: F821 + self.load_manufacturer(bs_manufacturer=bs_manufacturer, branch_vars=branch_vars) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["platform"]: + if global_settings["platform"] is not None: # noqa: F821 + for bs_platform in global_settings["platform"]: # noqa: F821 + self.load_platform(bs_platform=bs_platform, branch_vars=branch_vars) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["location_type"]: + if global_settings["location_type"] is not None: # noqa: F821 + for bs_location_type in global_settings["location_type"]: # noqa: F821 + self.load_location_type(bs_location_type=bs_location_type, branch_vars=branch_vars) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["location"]: + if global_settings["location"] is not None: # noqa: F821 + for bs_location in global_settings["location"]: # noqa: F821 + self.load_location(bs_location=bs_location, branch_vars=branch_vars) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["team"]: + if global_settings["team"] is not None: # noqa: F821 + for bs_team in global_settings["team"]: # noqa: F821 + self.load_team(bs_team=bs_team, branch_vars=branch_vars) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["contact"]: + if global_settings["contact"] is not None: # noqa: F821 + for bs_contact in global_settings["contact"]: # noqa: F821 + self.load_contact(bs_contact=bs_contact, branch_vars=branch_vars) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["provider"]: + if global_settings["provider"] is not None: # noqa: F821 + for bs_provider in global_settings["provider"]: # noqa: F821 + self.load_provider(bs_provider=bs_provider, branch_vars=branch_vars) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["provider_network"]: + if global_settings["provider_network"] is not None: # noqa: F821 + for bs_provider_network in global_settings["provider_network"]: # noqa: F821 + self.load_provider_network(bs_provider_network=bs_provider_network, branch_vars=branch_vars) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["circuit_type"]: + if global_settings["circuit_type"] is not None: # noqa: F821 + for bs_circuit_type in global_settings["circuit_type"]: # noqa: F821 + self.load_circuit_type(bs_circuit_type=bs_circuit_type, branch_vars=branch_vars) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["circuit"]: + if global_settings["circuit"] is not None: # noqa: F821 + for bs_circuit in global_settings["circuit"]: # noqa: F821 + self.load_circuit(bs_circuit=bs_circuit, branch_vars=branch_vars) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["circuit_termination"]: + if global_settings["circuit_termination"] is not None: # noqa: F821 + for bs_circuit_termination in global_settings["circuit_termination"]: # noqa: F821 + self.load_circuit_termination( + bs_circuit_termination=bs_circuit_termination, + branch_vars=branch_vars, + ) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["namespace"]: + if global_settings["namespace"] is not None: # noqa: F821 + for bs_namespace in global_settings["namespace"]: # noqa: F821 + self.load_namespace(bs_namespace=bs_namespace, branch_vars=branch_vars) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["rir"]: + if global_settings["rir"] is not None: # noqa: F821 + for bs_rir in global_settings["rir"]: # noqa: F821 + self.load_rir(bs_rir=bs_rir, branch_vars=branch_vars) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["vlan_group"]: + if global_settings["vlan_group"] is not None: # noqa: F821 + for bs_vlan_group in global_settings["vlan_group"]: # noqa: F821 + self.load_vlan_group(bs_vlan_group=bs_vlan_group, branch_vars=branch_vars) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["vlan"]: + if global_settings["vlan"] is not None: # noqa: F821 + for bs_vlan in global_settings["vlan"]: # noqa: F821 + self.load_vlan(bs_vlan=bs_vlan, branch_vars=branch_vars) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["vrf"]: + if global_settings["vrf"] is not None: # noqa: F821 + for bs_vrf in global_settings["vrf"]: # noqa: F821 + self.load_vrf(bs_vrf=bs_vrf, branch_vars=branch_vars) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["prefix"]: + if global_settings["prefix"] is not None: # noqa: F821 + for bs_prefix in global_settings["prefix"]: # noqa: F821 + self.load_prefix(bs_prefix=bs_prefix, branch_vars=branch_vars) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["secret"]: + if global_settings["secret"] is not None: # noqa: F821 + for bs_secret in global_settings["secret"]: # noqa: F821 + self.load_secret(bs_secret=bs_secret, branch_vars=branch_vars) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["secrets_group"]: + if global_settings["secrets_group"] is not None: # noqa: F821 + for bs_sg in global_settings["secrets_group"]: # noqa: F821 + self.load_secrets_group(bs_sg=bs_sg, branch_vars=branch_vars) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["git_repository"]: + if global_settings["git_repository"] is not None: # noqa: F821 + for git_repo in global_settings["git_repository"]: # noqa: F821 + self.load_git_repository(git_repo=git_repo, branch_vars=branch_vars) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["dynamic_group"]: + if global_settings["dynamic_group"] is not None: # noqa: F821 + for dyn_group in global_settings["dynamic_group"]: # noqa: F821 + self.load_dynamic_group(dyn_group=dyn_group) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["computed_field"]: + if global_settings["computed_field"] is not None: # noqa: F821 + for computed_field in global_settings["computed_field"]: # noqa: F821 + self.load_computed_field(comp_field=computed_field) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["tag"]: + if global_settings["tag"] is not None: # noqa: F821 + for tag in global_settings["tag"]: # noqa: F821 + self.load_tag(tag=tag) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["graph_ql_query"]: + if global_settings["graph_ql_query"] is not None: # noqa F821 + for graph_ql_query in global_settings["graph_ql_query"]: # noqa F821 + self.load_graph_ql_query(query=graph_ql_query) + if LIFECYCLE_MGMT: + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["software"]: + for software in global_settings["software"]: # noqa: F821 + self.load_software(software=software) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["software_image"]: + for software_image in global_settings["software_image"]: # noqa: F821 + self.load_software_image(software_image=software_image) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["validated_software"]: + for validated_software in global_settings["validated_software"]: # noqa: F821 + self.load_validated_software(validated_software=validated_software) diff --git a/nautobot_ssot/integrations/bootstrap/diffsync/adapters/nautobot.py b/nautobot_ssot/integrations/bootstrap/diffsync/adapters/nautobot.py new file mode 100755 index 000000000..0ab1c6e29 --- /dev/null +++ b/nautobot_ssot/integrations/bootstrap/diffsync/adapters/nautobot.py @@ -0,0 +1,1327 @@ +"""Nautobot Adapter for bootstrap SSoT plugin.""" + +from diffsync import Adapter +from diffsync.enum import DiffSyncModelFlags +from diffsync.exceptions import ObjectAlreadyExists, ObjectNotFound +from django.conf import settings +from nautobot.circuits.models import ( + Circuit, + CircuitTermination, + CircuitType, + Provider, + ProviderNetwork, +) +from nautobot.dcim.models import ( + Location, + LocationType, + Manufacturer, + Platform, +) +from nautobot.extras.models import ( + ComputedField, + Contact, + DynamicGroup, + GitRepository, + GraphQLQuery, + Role, + Secret, + SecretsGroup, + Status, + Tag, + Team, +) +from nautobot.ipam.models import ( + RIR, + VLAN, + VRF, + Namespace, + Prefix, + VLANGroup, +) +from nautobot.tenancy.models import Tenant, TenantGroup + +from nautobot_ssot.integrations.bootstrap.diffsync.models.nautobot import ( + NautobotCircuit, + NautobotCircuitTermination, + NautobotCircuitType, + NautobotComputedField, + NautobotContact, + NautobotDynamicGroup, + NautobotGitRepository, + NautobotGraphQLQuery, + NautobotLocation, + NautobotLocationType, + NautobotManufacturer, + NautobotNamespace, + NautobotPlatform, + NautobotPrefix, + NautobotProvider, + NautobotProviderNetwork, + NautobotRiR, + NautobotRole, + NautobotSecret, + NautobotSecretsGroup, + NautobotTag, + NautobotTeam, + NautobotTenant, + NautobotTenantGroup, + NautobotVLAN, + NautobotVLANGroup, + NautobotVRF, +) +from nautobot_ssot.integrations.bootstrap.utils import ( + check_sor_field, + get_sor_field_nautobot_object, + lookup_content_type_model_path, + lookup_model_for_role_id, + lookup_model_for_taggable_class_id, +) +from nautobot_ssot.integrations.bootstrap.utils.nautobot import ( + get_prefix_location_assignments, + get_vrf_prefix_assignments, +) + +try: + import nautobot_device_lifecycle_mgmt # noqa: F401 + + LIFECYCLE_MGMT = True +except ImportError: + LIFECYCLE_MGMT = False + +if LIFECYCLE_MGMT: + # noqa: F401 + from nautobot_device_lifecycle_mgmt.models import ( + SoftwareImageLCM as ORMSoftwareImage, + ) + from nautobot_device_lifecycle_mgmt.models import ( + SoftwareLCM as ORMSoftware, + ) + + # noqa: F401 + from nautobot_device_lifecycle_mgmt.models import ( + ValidatedSoftwareLCM as ORMValidatedSoftware, + ) + + # noqa: F401 + from nautobot_ssot.integrations.bootstrap.diffsync.models.nautobot import ( # noqa: F401 + NautobotSoftware, + NautobotSoftwareImage, + NautobotValidatedSoftware, + ) + + +class NautobotAdapter(Adapter): + """DiffSync adapter for Nautobot.""" + + tenant_group = NautobotTenantGroup + tenant = NautobotTenant + role = NautobotRole + manufacturer = NautobotManufacturer + platform = NautobotPlatform + location_type = NautobotLocationType + location = NautobotLocation + team = NautobotTeam + contact = NautobotContact + provider = NautobotProvider + provider_network = NautobotProviderNetwork + circuit_type = NautobotCircuitType + circuit = NautobotCircuit + circuit_termination = NautobotCircuitTermination + namespace = NautobotNamespace + rir = NautobotRiR + vlan_group = NautobotVLANGroup + vlan = NautobotVLAN + vrf = NautobotVRF + prefix = NautobotPrefix + secret = NautobotSecret + secrets_group = NautobotSecretsGroup + git_repository = NautobotGitRepository + dynamic_group = NautobotDynamicGroup + computed_field = NautobotComputedField + tag = NautobotTag + graph_ql_query = NautobotGraphQLQuery + + if LIFECYCLE_MGMT: + software = NautobotSoftware + software_image = NautobotSoftwareImage + validated_software = NautobotValidatedSoftware + + top_level = [ + "tag", + "tenant_group", + "tenant", + "role", + "manufacturer", + "platform", + "location_type", + "location", + "team", + "contact", + "provider", + "provider_network", + "circuit_type", + "circuit", + "namespace", + "rir", + "vlan_group", + "vlan", + "vrf", + "prefix", + "secret", + "secrets_group", + "git_repository", + "dynamic_group", + "computed_field", + "graph_ql_query", + ] + + if LIFECYCLE_MGMT: + top_level.append("software") + top_level.append("software_image") + top_level.append("validated_software") + + def __init__(self, *args, job=None, sync=None, **kwargs): # noqa: D417 + """Initialize Nautobot. + + Args: + job (object, optional): Nautobot job. Defaults to None. + sync (object, optional): Nautobot DiffSync. Defaults to None. + """ + super().__init__(*args, **kwargs) + self.job = job + self.sync = sync + + def load_tenant_group(self): + """Method to load TenantGroup objects from Nautobot into NautobotTenantGroup DiffSync models.""" + for nb_tenant_group in TenantGroup.objects.all(): + self.job.logger.debug(f"Loading Nautobot TenantGroup: {nb_tenant_group}, with ID: {nb_tenant_group.id}") + try: + self.get(self.tenant_group, nb_tenant_group.name) + except ObjectNotFound: + try: + _parent = nb_tenant_group.parent.name + except AttributeError: + _parent = "" + _sor = "" + if "system_of_record" in nb_tenant_group.custom_field_data: + _sor = ( + nb_tenant_group.custom_field_data["system_of_record"] + if nb_tenant_group.custom_field_data["system_of_record"] is not None + else "" + ) + new_tenant_group = self.tenant_group( + name=nb_tenant_group.name, + parent=_parent, + description=nb_tenant_group.description, + system_of_record=_sor, + uuid=nb_tenant_group.id, + ) + self.job.logger.info(f"Loading Nautobot Tenant Group - {nb_tenant_group.name}") + + if not check_sor_field(nb_tenant_group): + new_tenant_group.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + + self.add(new_tenant_group) + + def load_tenant(self): + """Method to load Tenant objects from Nautobot into NautobotTenant DiffSync models.""" + for nb_tenant in Tenant.objects.all(): + self.job.logger.debug(f"Loading Nautobot Tenant: {nb_tenant}, with ID: {nb_tenant.id}") + _tags = sorted(list(nb_tenant.tags.all().values_list("name", flat=True))) + try: + self.get(self.tenant, nb_tenant.name) + except ObjectNotFound: + try: + _tenant_group = nb_tenant.tenant_group.name + except AttributeError: + _tenant_group = None + _sor = "" + if "system_of_record" in nb_tenant.custom_field_data: + _sor = ( + nb_tenant.custom_field_data["system_of_record"] + if nb_tenant.custom_field_data["system_of_record"] is not None + else "" + ) + new_tenant = self.tenant( + name=nb_tenant.name, + tenant_group=_tenant_group, + description=nb_tenant.description, + tags=_tags, + system_of_record=_sor, + uuid=nb_tenant.id, + ) + self.job.logger.info(f"Loading Nautobot Tenant - {nb_tenant.name}") + + if not check_sor_field(nb_tenant): + new_tenant.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + + self.add(new_tenant) + + def load_role(self): + """Method to load Role objects from Nautobot into NautobotRole DiffSync models.""" + for nb_role in Role.objects.all(): + self.job.logger.debug(f"Loading Nautobot Role: {nb_role}, with ID {nb_role.id}") + try: + self.get(self.role, nb_role.name) + except ObjectNotFound: + _content_types = [] + _content_uuids = nb_role.content_types.values_list("model", "id") + for _uuid in _content_uuids: + _content_types.append(lookup_model_for_role_id(_uuid[1])) + _content_types.sort() + _sor = "" + if "system_of_record" in nb_role.custom_field_data: + _sor = ( + nb_role.custom_field_data["system_of_record"] + if nb_role.custom_field_data["system_of_record"] is not None + else "" + ) + new_role = self.role( + name=nb_role.name, + weight=nb_role.weight, + description=nb_role.description, + color=nb_role.color, + content_types=_content_types, + system_of_record=_sor, + ) + + if not check_sor_field(nb_role): + new_role.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + + self.add(new_role) + _content_types.clear() + + def load_manufacturer(self): + """Method to load Manufacturer objects from Nautobot into NautobotManufacturer DiffSync models.""" + for nb_manufacturer in Manufacturer.objects.all(): + self.job.logger.debug(f"Loading Nautobot Manufacturer: {nb_manufacturer}, with ID {nb_manufacturer.id}") + try: + self.get(self.manufacturer, nb_manufacturer.name) + except ObjectNotFound: + _sor = "" + if "system_of_record" in nb_manufacturer.custom_field_data: + _sor = ( + nb_manufacturer.custom_field_data["system_of_record"] + if nb_manufacturer.custom_field_data["system_of_record"] is not None + else "" + ) + new_manufacturer = self.manufacturer( + name=nb_manufacturer.name, + description=nb_manufacturer.description, + uuid=nb_manufacturer.id, + system_of_record=_sor, + ) + + if not check_sor_field(nb_manufacturer): + new_manufacturer.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + + self.add(new_manufacturer) + + def load_platform(self): + """Method to load Platform objects from Nautobot into NautobotPlatform DiffSync models.""" + for nb_platform in Platform.objects.all(): + self.job.logger.debug(f"Loading Nautobot Platform: {nb_platform}, with ID {nb_platform.id}") + try: + self.get(self.platform, nb_platform.name) + except ObjectNotFound: + if isinstance(nb_platform.napalm_args, str): + _napalm_args = {} + else: + _napalm_args = nb_platform.napalm_args + _sor = "" + if "system_of_record" in nb_platform.custom_field_data: + _sor = ( + nb_platform.custom_field_data["system_of_record"] + if nb_platform.custom_field_data["system_of_record"] is not None + else "" + ) + new_platform = self.platform( + name=nb_platform.name, + manufacturer=nb_platform.manufacturer.name, + network_driver=nb_platform.network_driver, + napalm_driver=nb_platform.napalm_driver, + napalm_arguments=_napalm_args, + description=nb_platform.description, + system_of_record=_sor, + uuid=nb_platform.id, + ) + + if not check_sor_field(nb_platform): + new_platform.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + + self.add(new_platform) + + def load_location_type(self): + """Method to load LocationType objects from Nautobot into NautobotLocationType DiffSync models.""" + for nb_location_type in LocationType.objects.all(): + self.job.logger.debug(f"Loading Nautobot LocationType: {nb_location_type}, with ID {nb_location_type.id}") + try: + self.get(self.location_type, nb_location_type.name) + except ObjectNotFound: + _content_types = [] + _content_uuids = nb_location_type.content_types.values_list("id", flat=True) + if nb_location_type.parent is not None: + _parent = nb_location_type.parent.name + else: + _parent = None + for _uuid in _content_uuids: + _content_types.append(lookup_content_type_model_path(nb_model="locations", content_id=_uuid)) + if len(_content_types) > 1: + try: + _content_types.sort() + except TypeError: + self.job.logger.warning( + f"One of your content types is not able to be associated with LocationType {nb_location_type}. Please check and try again. {_content_types}" + ) + _sor = "" + if "system_of_record" in nb_location_type.custom_field_data: + _sor = ( + nb_location_type.custom_field_data["system_of_record"] + if nb_location_type.custom_field_data["system_of_record"] is not None + else "" + ) + new_location_type = self.location_type( + name=nb_location_type.name, + parent=_parent, + nestable=nb_location_type.nestable if not None else False, + description=nb_location_type.description, + content_types=_content_types, + system_of_record=_sor, + uuid=nb_location_type.id, + ) + + if not check_sor_field(nb_location_type): + new_location_type.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + + self.add(new_location_type) + _content_types.clear() + + def load_location(self): + """Method to load Location objects from Nautobot into NautobotLocation DiffSync models.""" + for nb_location in Location.objects.all(): + self.job.logger.debug(f"Loading Nautobot Location: {nb_location}, with ID {nb_location.id}") + try: + self.get(self.location, nb_location.name) + except ObjectNotFound: + _tags = [] + if nb_location.parent is not None: + _parent = nb_location.parent.name + else: + _parent = None + if nb_location.time_zone is not None: + try: + _time_zone = nb_location.time_zone.zone + except AttributeError: + _time_zone = nb_location.time_zone + else: + _time_zone = None + if nb_location.tenant is not None: + _tenant = nb_location.tenant.name + else: + _tenant = None + if nb_location.tags is not None: + for _tag in nb_location.tags.values_list("name", flat=True): + _tags.append(_tag) + _sor = "" + if "system_of_record" in nb_location.custom_field_data: + _sor = ( + nb_location.custom_field_data["system_of_record"] + if nb_location.custom_field_data["system_of_record"] is not None + else "" + ) + new_location = self.location( + name=nb_location.name, + location_type=nb_location.location_type.name, + parent=_parent, + status=nb_location.status.name, + facility=nb_location.facility, + asn=nb_location.asn, + time_zone=_time_zone, + description=nb_location.description, + tenant=_tenant, + physical_address=nb_location.physical_address, + shipping_address=nb_location.shipping_address, + latitude=nb_location.latitude, + longitude=nb_location.longitude, + contact_name=nb_location.contact_name, + contact_phone=nb_location.contact_phone, + contact_email=nb_location.contact_email, + tags=_tags, + system_of_record=_sor, + uuid=nb_location.id, + ) + + if not check_sor_field(nb_location): + new_location.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + + self.add(new_location) + + def load_team(self): + """Method to load Team objects from Nautobot into NautobotTeam DiffSync models.""" + for nb_team in Team.objects.all(): + self.job.logger.debug(f"Loading Nautobot Team: {nb_team}, with ID: {nb_team.id}") + try: + self.get(self.team, nb_team.name) + except ObjectNotFound: + if nb_team.contacts is not None: + _contacts = [] + for _contact in nb_team.contacts.values_list("name", flat=True): + _contacts.append(_contact) + _contacts.sort() + _sor = "" + if "system_of_record" in nb_team.custom_field_data: + _sor = ( + nb_team.custom_field_data["system_of_record"] + if nb_team.custom_field_data["system_of_record"] is not None + else "" + ) + new_team = self.team( + name=nb_team.name, + phone=nb_team.phone, + email=nb_team.email, + address=nb_team.address, + # TODO: Need to consider how to allow loading from teams or contacts models. + # contacts=_contacts, + system_of_record=_sor, + uuid=nb_team.id, + ) + + if not check_sor_field(nb_team): + new_team.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + + self.add(new_team) + + def load_contact(self): + """Method to load Contact Objects from Nautobot into NautobotContact DiffSync models.""" + for nb_contact in Contact.objects.all(): + self.job.logger.debug(f"Loading Nautobot contact: {nb_contact}, with ID: {nb_contact.id}") + try: + self.get(self.contact, nb_contact.name) + except ObjectNotFound: + if nb_contact.teams is not None: + _teams = [] + for _team in nb_contact.teams.values_list("name", flat=True): + _teams.append(_team) + _teams.sort() + _sor = "" + if "system_of_record" in nb_contact.custom_field_data: + _sor = ( + nb_contact.custom_field_data["system_of_record"] + if nb_contact.custom_field_data["system_of_record"] is not None + else "" + ) + new_contact = self.contact( + name=nb_contact.name, + phone=nb_contact.phone, + email=nb_contact.email, + address=nb_contact.address, + teams=_teams, + system_of_record=_sor, + uuid=nb_contact.id, + ) + + if not check_sor_field(nb_contact): + new_contact.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + + self.add(new_contact) + + def load_provider(self): + """Method to load Provider objects from Nautobot into NautobotProvider DiffSync models.""" + for nb_provider in Provider.objects.all(): + self.job.logger.debug(f"Loading Nautobot Provider: {nb_provider}, with ID {nb_provider.id}") + try: + self.get(self.provider, nb_provider.name) + except ObjectNotFound: + if nb_provider.tags is not None: + _tags = [] + for _tag in nb_provider.tags.values_list("name", flat=True): + _tags.append(_tag) + _tags.sort() + else: + _tags = None + _sor = "" + if "system_of_record" in nb_provider.custom_field_data: + _sor = ( + nb_provider.custom_field_data["system_of_record"] + if nb_provider.custom_field_data["system_of_record"] is not None + else "" + ) + new_provider = self.provider( + name=nb_provider.name, + asn=nb_provider.asn, + account_number=nb_provider.account, + portal_url=nb_provider.portal_url, + noc_contact=nb_provider.noc_contact, + admin_contact=nb_provider.admin_contact, + tags=_tags, + system_of_record=_sor, + uuid=nb_provider.id, + ) + + if not check_sor_field(nb_provider): + new_provider.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + + self.add(new_provider) + + def load_provider_network(self): + """Method to load ProviderNetwork objects from Nautobot into NautobotProviderNetwork DiffSync models.""" + for nb_provider_network in ProviderNetwork.objects.all(): + self.job.logger.debug( + f"Loading Nautobot ProviderNetwork: {nb_provider_network}, with ID {nb_provider_network.id}" + ) + try: + self.get(self.provider_network, nb_provider_network.name) + except ObjectNotFound: + if nb_provider_network.tags is not None: + _tags = [] + for _tag in nb_provider_network.tags.values_list("name", flat=True): + _tags.append(_tag) + _tags.sort() + else: + _tags = None + _sor = "" + if "system_of_record" in nb_provider_network.custom_field_data: + _sor = ( + nb_provider_network.custom_field_data["system_of_record"] + if nb_provider_network.custom_field_data["system_of_record"] is not None + else "" + ) + new_provider_network = self.provider_network( + name=nb_provider_network.name, + provider=nb_provider_network.provider.name, + description=nb_provider_network.description, + comments=nb_provider_network.comments, + tags=_tags, + system_of_record=_sor, + uuid=nb_provider_network.id, + ) + + if not check_sor_field(nb_provider_network): + new_provider_network.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + + self.add(new_provider_network) + + def load_circuit_type(self): + """Method to load CircuitType objects from Nautobot into NautobotCircuitType DiffSync models.""" + for nb_circuit_type in CircuitType.objects.all(): + self.job.logger.debug(f"Loading Nautobot CircuitType: {nb_circuit_type}, with ID {nb_circuit_type.id}") + try: + self.get(self.circuit_type, nb_circuit_type.name) + except ObjectNotFound: + _sor = "" + if "system_of_record" in nb_circuit_type.custom_field_data: + _sor = ( + nb_circuit_type.custom_field_data["system_of_record"] + if nb_circuit_type.custom_field_data["system_of_record"] is not None + else "" + ) + new_circuit_type = self.circuit_type( + name=nb_circuit_type.name, + description=nb_circuit_type.description, + system_of_record=_sor, + uuid=nb_circuit_type.id, + ) + + if not check_sor_field(nb_circuit_type): + new_circuit_type.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + + self.add(new_circuit_type) + + def load_circuit(self): + """Method to load Circuit objects from Nautobot into NautobotCircuit DiffSync models.""" + for nb_circuit in Circuit.objects.all(): + self.job.logger.debug(f"Loading Nautobot Circuit: {nb_circuit}, with ID {nb_circuit.id}") + try: + self.get(self.circuit, nb_circuit.cid) + except ObjectNotFound: + if nb_circuit.tags is not None: + _tags = [] + for _tag in nb_circuit.tags.values_list("name", flat=True): + _tags.append(_tag) + _tags.sort() + else: + _tags = None + _sor = "" + if "system_of_record" in nb_circuit.custom_field_data: + _sor = ( + nb_circuit.custom_field_data["system_of_record"] + if nb_circuit.custom_field_data["system_of_record"] is not None + else "" + ) + new_circuit = self.circuit( + circuit_id=nb_circuit.cid, + provider=nb_circuit.provider.name, + circuit_type=nb_circuit.circuit_type.name, + status=nb_circuit.status.name, + date_installed=nb_circuit.install_date, + commit_rate_kbps=nb_circuit.commit_rate, + description=nb_circuit.description, + tenant=(nb_circuit.tenant.name if nb_circuit.tenant is not None else None), + tags=_tags, + system_of_record=_sor, + uuid=nb_circuit.id, + ) + + if not check_sor_field(nb_circuit): + new_circuit.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + + self.add(new_circuit) + + def load_circuit_termination(self): + """Method to load CircuitTermination objects from Nautobot into NautobotCircuitTermination DiffSync models.""" + for nb_circuit_termination in CircuitTermination.objects.all(): + self.job.logger.debug( + f"Loading Nautobot CircuitTermination {nb_circuit_termination}, with ID: {nb_circuit_termination.id}" + ) + _term_name = f"{nb_circuit_termination.circuit.cid}__{nb_circuit_termination.circuit.provider.name}__{nb_circuit_termination.term_side}" + try: + self.get(self.circuit_termination, _term_name) + except ObjectNotFound: + if nb_circuit_termination.tags is not None: + _tags = [] + for _tag in nb_circuit_termination.tags.values_list("name", flat=True): + _tags.append(_tag) + _tags.sort() + else: + _tags = None + _sor = "" + if "system_of_record" in nb_circuit_termination.custom_field_data: + _sor = ( + nb_circuit_termination.custom_field_data["system_of_record"] + if nb_circuit_termination.custom_field_data["system_of_record"] is not None + else "" + ) + if nb_circuit_termination.provider_network: + _termination_type = "Provider Network" + if nb_circuit_termination.location: + _termination_type = "Location" + new_circuit_termination = self.circuit_termination( + name=_term_name, + termination_type=_termination_type, + termination_side=nb_circuit_termination.term_side, + circuit_id=nb_circuit_termination.circuit.cid, + provider_network=( + nb_circuit_termination.provider_network.name + if nb_circuit_termination.provider_network is not None + else None + ), + location=( + nb_circuit_termination.location.name if nb_circuit_termination.location is not None else None + ), + port_speed_kbps=nb_circuit_termination.port_speed, + upstream_speed_kbps=nb_circuit_termination.upstream_speed, + cross_connect_id=nb_circuit_termination.xconnect_id, + patch_panel_or_ports=nb_circuit_termination.pp_info, + description=nb_circuit_termination.description, + tags=_tags, + system_of_record=_sor, + uuid=nb_circuit_termination.id, + ) + + if not check_sor_field(nb_circuit_termination): + new_circuit_termination.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + + self.add(new_circuit_termination) + try: + _circuit = self.get( + self.circuit, + { + "circuit_id": nb_circuit_termination.circuit.cid, + "provider": nb_circuit_termination.circuit.provider.name, + }, + ) + _circuit.add_child(new_circuit_termination) + except ObjectAlreadyExists as err: + self.job.logger.warning(f"CircuitTermination for {_circuit} already exists. {err}") + + def load_namespace(self): + """Method to load Namespace objects from Nautobot into NautobotNamespace DiffSync models.""" + for nb_namespace in Namespace.objects.all(): + self.job.logger.debug(f"Loading Nautobot Namespace {nb_namespace}, with ID: {nb_namespace.id}") + try: + self.get(self.namespace, nb_namespace.name) + except ObjectNotFound: + _sor = get_sor_field_nautobot_object(nb_namespace) + try: + _location = Location.objects.get(id=nb_namespace.location_id).name + except Location.DoesNotExist: + _location = "" + new_namespace = self.namespace( + name=nb_namespace.name, + description=nb_namespace.description, + location=_location, + system_of_record=_sor, + uuid=nb_namespace.id, + ) + if not check_sor_field(nb_namespace): + new_namespace.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + self.add(new_namespace) + + def load_rir(self): + """Method to load RiR objects from Nautobot into NautobotRiR DiffSync models.""" + for nb_rir in RIR.objects.all(): + self.job.logger.debug(f"Loading Nautobot RiR {nb_rir}, with ID {nb_rir.id}") + try: + self.get(self.rir, nb_rir.name) + except ObjectNotFound: + _sor = get_sor_field_nautobot_object(nb_rir) + new_rir = self.rir( + name=nb_rir.name, + private=nb_rir.is_private, + description=nb_rir.description, + system_of_record=_sor, + uuid=nb_rir.id, + ) + if not check_sor_field(nb_rir): + new_rir.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + self.add(new_rir) + + def load_vlan_group(self): + """Method to load VLANGroup objects from Nautobot into NautobotVLANGroup DiffSync models.""" + for nb_vlan_group in VLANGroup.objects.all(): + self.job.logger.debug(f"Loading Nautobot VLANGroup {nb_vlan_group}, with ID {nb_vlan_group.id}") + try: + self.get(self.vlan_group, nb_vlan_group.name) + except ObjectNotFound: + _sor = get_sor_field_nautobot_object(nb_vlan_group) + if nb_vlan_group.location: + _location = nb_vlan_group.location.name + else: + _location = "" + new_vlan_group = self.vlan_group( + name=nb_vlan_group.name, + description=nb_vlan_group.description, + location=_location, + system_of_record=_sor, + uuid=nb_vlan_group.id, + ) + if not check_sor_field(nb_vlan_group): + new_vlan_group.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + self.add(new_vlan_group) + + def load_vlan(self): + """Method to load VLAN objects from Nautobot into NautobotVLAN DiffSync models.""" + for nb_vlan in VLAN.objects.all(): + self.job.logger.debug(f"Loading Nautobot VLAN {nb_vlan}, with ID {nb_vlan.id}") + try: + self.get( + self.vlan, + { + "name": nb_vlan.name, + "vid": nb_vlan.vid, + "vlan_group": (nb_vlan.vlan_group.name if nb_vlan.vlan_group else ""), + }, + ) + except ObjectNotFound: + _locations = [] + _tags = [] + _sor = get_sor_field_nautobot_object(nb_vlan) + if nb_vlan.locations: + for _location in nb_vlan.locations.values_list("name", flat=True): + _locations.append(_location) + if nb_vlan.tags: + for _tag in nb_vlan.tags.values_list("name", flat=True): + _tags.append(_tag) + new_vlan = self.vlan( + name=nb_vlan.name, + vid=nb_vlan.vid, + vlan_group=nb_vlan.vlan_group.name if nb_vlan.vlan_group else None, + role=nb_vlan.role.name if nb_vlan.role else None, + description=nb_vlan.description, + status=nb_vlan.status.name, + locations=_locations, + tenant=nb_vlan.tenant.name if nb_vlan.tenant else None, + tags=_tags, + system_of_record=_sor, + uuid=nb_vlan.id, + ) + if not check_sor_field(nb_vlan): + new_vlan.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + self.add(new_vlan) + + def load_vrf(self): + """Method to load VRF objects from Nautobot into NautobotVRF DiffSync models.""" + for nb_vrf in VRF.objects.all(): + self.job.logger.debug(f"Loading Nautobot VRF {nb_vrf}, with ID {nb_vrf.id}") + try: + self.get( + self.vrf, + {"name": nb_vrf.name, "namespace": {nb_vrf.namespace.name}}, + ) + except ObjectNotFound: + _tags = [] + _sor = get_sor_field_nautobot_object(nb_vrf) + if nb_vrf.tags: + for _tag in nb_vrf.tags.values_list("name", flat=True): + _tags.append(_tag) + new_vrf = self.vrf( + name=nb_vrf.name, + namespace=Namespace.objects.get(id=nb_vrf.namespace_id).name, + route_distinguisher=nb_vrf.rd, + description=nb_vrf.description, + tenant=(Tenant.objects.get(id=nb_vrf.tenant_id).name if nb_vrf.tenant_id else None), + tags=_tags, + system_of_record=_sor, + uuid=nb_vrf.id, + ) + if not check_sor_field(nb_vrf): + new_vrf.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + self.add(new_vrf) + + def load_prefix(self): + """Method to load Prefix objects from Nautobot into NautobotPrefix DiffSync models.""" + for nb_prefix in Prefix.objects.all(): + self.job.logger.debug(f"Loading Nautobot Prefix {nb_prefix}, with ID {nb_prefix.id}") + try: + self.get( + self.prefix, + { + "network": nb_prefix.network, + "namespace": nb_prefix.namespace.name, + }, + ) + except ObjectNotFound: + _tags = [] + _vlan = None + _sor = get_sor_field_nautobot_object(nb_prefix) + if nb_prefix.tags: + for _tag in nb_prefix.tags.values_list("name", flat=True): + _tags.append(_tag) + if nb_prefix.vlan: + if nb_prefix.vlan.vlan_group: + _group = nb_prefix.vlan.vlan_group.name + else: + _group = "None" + _vlan = f"{nb_prefix.vlan.name}__{nb_prefix.vlan.vid}__{_group}" + _vrfs = get_vrf_prefix_assignments(prefix=nb_prefix) + _locations = get_prefix_location_assignments(prefix=nb_prefix) + new_prefix = self.prefix( + network=f"{nb_prefix.network}/{nb_prefix.prefix_length}", + namespace=Namespace.objects.get(id=nb_prefix.namespace_id).name, + prefix_type=nb_prefix.type, + status=Status.objects.get(id=nb_prefix.status_id).name, + role=nb_prefix.role.name if nb_prefix.role else None, + rir=(RIR.objects.get(id=nb_prefix.rir_id).name if nb_prefix.rir_id else None), + date_allocated=( + nb_prefix.date_allocated.replace(tzinfo=None) if nb_prefix.date_allocated else None + ), + description=nb_prefix.description, + vrfs=_vrfs, + locations=_locations, + vlan=_vlan, + tenant=(Tenant.objects.get(id=nb_prefix.tenant_id).name if nb_prefix.tenant_id else None), + tags=_tags, + system_of_record=_sor, + uuid=nb_prefix.id, + ) + if not check_sor_field(nb_prefix): + new_prefix.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + self.add(new_prefix) + + def load_secret(self): + """Method to load Secrets objects from Nautobot into NautobotSecrets DiffSync models.""" + for nb_secret in Secret.objects.all(): + self.job.logger.debug(f"Loading Nautobot Secret: {nb_secret}, with ID: {nb_secret.id}") + try: + self.get(self.secret, nb_secret.name) + except ObjectNotFound: + _sor = "" + if "system_of_record" in nb_secret.custom_field_data: + _sor = ( + nb_secret.custom_field_data["system_of_record"] + if nb_secret.custom_field_data["system_of_record"] is not None + else "" + ) + new_secret = self.secret( + name=nb_secret.name, + provider=nb_secret.provider, + parameters=nb_secret.parameters, + system_of_record=_sor, + uuid=nb_secret.id, + ) + self.job.logger.info(f"Loading Nautobot secret - {nb_secret.name}") + + if not check_sor_field(nb_secret): + new_secret.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + + self.add(new_secret) + + def load_secrets_group(self): + """Method to load SecretsGroup objects from Nautobot into NautobotSecretsGroup DiffSync models.""" + _secrets = [] + for nb_sg in SecretsGroup.objects.all(): + for nb_secret in nb_sg.secrets_group_associations.all(): + _secrets.append( + { + "name": nb_secret.secret.name, + "secret_type": nb_secret.secret_type, + "access_type": nb_secret.access_type, + } + ) + _secrets = sorted(_secrets, key=lambda x: x["name"]) + self.job.logger.debug(f"Loading Nautobot SecretsGroup: {nb_sg}") + try: + self.get(self.secrets_group, nb_sg.name) + except ObjectNotFound: + _sor = "" + if "system_of_record" in nb_sg.custom_field_data: + _sor = ( + nb_sg.custom_field_data["system_of_record"] + if nb_sg.custom_field_data["system_of_record"] is not None + else "" + ) + new_sg = self.secrets_group( + name=nb_sg.name, + secrets=_secrets, + system_of_record=_sor, + uuid=nb_sg.id, + ) + self.job.logger.info(f"Loading Nautobot secret - {nb_sg.name}") + + if not check_sor_field(nb_sg): + new_sg.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + + self.add(new_sg) + _secrets.clear() + + def load_git_repository(self): + """Method to load GitRepository objects from Nautobot into NautobotGitRepository DiffSync models.""" + for nb_gr in GitRepository.objects.all(): + self.job.logger.debug(f"Loading Nautobot GitRepository: {nb_gr}") + try: + self.get(self.git_repository, nb_gr.name) + except ObjectNotFound: + try: + _secrets_group = nb_gr.secrets_group.name + except AttributeError: + _secrets_group = None + _sor = "" + if "system_of_record" in nb_gr.custom_field_data: + _sor = ( + nb_gr.custom_field_data["system_of_record"] + if nb_gr.custom_field_data["system_of_record"] is not None + else "" + ) + new_gr = self.git_repository( + name=nb_gr.name, + url=nb_gr.remote_url, + branch=nb_gr.branch, + secrets_group=_secrets_group, + provided_contents=nb_gr.provided_contents, + system_of_record=_sor, + uuid=nb_gr.id, + ) + + if not check_sor_field(nb_gr): + new_gr.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + + self.add(new_gr) + + def load_dynamic_group(self): + """Method to load DynamicGroup objects from Nautobot into NautobotDynamicGroup DiffSync models.""" + for nb_dyn_group in DynamicGroup.objects.all(): + self.job.logger.debug(f"Loading Nautobot DynamicGroup {nb_dyn_group}") + try: + self.get(self.dynamic_group, nb_dyn_group.name) + except ObjectNotFound: + _content_type = lookup_content_type_model_path( + nb_model="dynamic_groups", content_id=nb_dyn_group.content_type.id + ) + if _content_type is None: + self.job.logger.warning( + f"Could not find ContentType for {nb_dyn_group.name} with ContentType ID {nb_dyn_group.content_type.id}" + ) + _sor = "" + if "system_of_record" in nb_dyn_group.custom_field_data: + _sor = ( + nb_dyn_group.custom_field_data["system_of_record"] + if nb_dyn_group.custom_field_data["system_of_record"] is not None + else "" + ) + new_dyn_group = self.dynamic_group( + name=nb_dyn_group.name, + content_type=_content_type, + dynamic_filter=nb_dyn_group.filter, + description=nb_dyn_group.description, + system_of_record=_sor, + uuid=nb_dyn_group.id, + ) + + if not check_sor_field(nb_dyn_group): + new_dyn_group.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + + self.add(new_dyn_group) + + def load_computed_field(self): + """Method to load ComputedField objects from Nautobot into NautobotComputedField DiffSync models.""" + for nb_comp_field in ComputedField.objects.all(): + self.job.logger.debug(f"Loading Nautobot ComputedField {nb_comp_field}") + try: + self.get(self.computed_field, nb_comp_field.label) + except ObjectNotFound: + _content_type = lookup_content_type_model_path( + nb_model="custom_fields", content_id=nb_comp_field.content_type.id + ) + if _content_type is None: + self.job.logger.warning( + f"Could not find ContentType for {nb_comp_field.label} with ContentType {nb_comp_field.content_type}, and ContentType ID {nb_comp_field.content_type.id}" + ) + new_computed_field = self.computed_field( + label=nb_comp_field.label, + content_type=_content_type, + template=nb_comp_field.template, + ) + new_computed_field.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + self.add(new_computed_field) + + def load_tag(self): + """Method to load Tag objects from Nautobot into NautobotTag DiffSync Models.""" + for nb_tag in Tag.objects.all(): + self.job.logger.debug(f"Loading Nautobot Tag {nb_tag}") + try: + self.get(self.tag, nb_tag.name) + except ObjectNotFound: + _content_types = [] + _content_uuids = nb_tag.content_types.values_list("model", "id") + for _uuid in _content_uuids: + _content_types.append(lookup_model_for_taggable_class_id(_uuid[1])) + _content_types.sort() + _sor = "" + if "system_of_record" in nb_tag.custom_field_data: + _sor = ( + nb_tag.custom_field_data["system_of_record"] + if nb_tag.custom_field_data["system_of_record"] is not None + else "" + ) + new_tag = self.tag( + name=nb_tag.name, + color=nb_tag.color, + content_types=_content_types, + description=nb_tag.description, + system_of_record=_sor, + uuid=nb_tag.id, + ) + + if not check_sor_field(nb_tag): + new_tag.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + + self.add(new_tag) + _content_types.clear() + + def load_graph_ql_query(self): + """Method to load GraphQLQuery objects from Nautobot into NautobotGraphQLQuery Models.""" + for query in GraphQLQuery.objects.all(): + self.job.logger.debug(f"Loading Nautobot GraphQLQuery {query}") + try: + self.get(self.graph_ql_query, query.name) + except ObjectNotFound: + new_query = self.graph_ql_query(name=query.name, query=query.query) + new_query.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + self.add(new_query) + + def load_software(self): + """Method to load Software objects from Nautobot into NautobotSoftware Models.""" + for nb_software in ORMSoftware.objects.all(): + self.job.logger.debug(f"Loading Nautobot SoftwareLCM {nb_software}") + try: + self.get( + self.software, + { + "version": nb_software.version, + "platform": nb_software.device_platform.name, + }, + ) + except ObjectNotFound: + _tags = list( + ORMSoftware.objects.get( + version=nb_software.version, + device_platform=nb_software.device_platform.id, + ) + .tags.all() + .values_list("name", flat=True) + ) + _sor = "" + if "system_of_record" in nb_software.custom_field_data: + _sor = ( + nb_software.custom_field_data["system_of_record"] + if nb_software.custom_field_data["system_of_record"] is not None + else "" + ) + new_software = self.software( + version=nb_software.version, + platform=nb_software.device_platform.name, + alias=nb_software.alias, + release_date=nb_software.release_date, + eos_date=nb_software.end_of_support, + documentation_url=nb_software.documentation_url, + long_term_support=nb_software.long_term_support, + pre_release=nb_software.pre_release, + tags=_tags, + system_of_record=_sor, + uuid=nb_software.id, + ) + + if not check_sor_field(nb_software): + new_software.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + + self.add(new_software) + + def load_software_image(self): + """Method to load SoftwareImage objects from Nautobot into NautobotSoftwareImage Models.""" + for nb_software_image in ORMSoftwareImage.objects.all(): + self.job.logger.debug(f"Loading Nautobot SoftwareImageLCM {nb_software_image}") + try: + self.get(self.software_image, nb_software_image.image_file_name) + except ObjectNotFound: + _tags = list( + ORMSoftwareImage.objects.get(image_file_name=nb_software_image.image_file_name) + .tags.all() + .values_list("name", flat=True) + ) + _sor = "" + if "system_of_record" in nb_software_image.custom_field_data: + _sor = ( + nb_software_image.custom_field_data["system_of_record"] + if nb_software_image.custom_field_data["system_of_record"] is not None + else "" + ) + new_software_image = self.software_image( + file_name=nb_software_image.image_file_name, + software=f"{nb_software_image.software.device_platform} - {nb_software_image.software.version}", + platform=nb_software_image.software.device_platform.name, + software_version=nb_software_image.software.version, + download_url=nb_software_image.download_url, + image_file_checksum=nb_software_image.image_file_checksum, + hashing_algorithm=nb_software_image.hashing_algorithm, + default_image=nb_software_image.default_image, + tags=_tags, + system_of_record=_sor, + uuid=nb_software_image.id, + ) + + if not check_sor_field(nb_software_image): + new_software_image.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + + self.add(new_software_image) + + def load_validated_software(self): + """Method to load ValidatedSoftware objects from Nautobot into NautobotValidatedSoftware Models.""" + for nb_validated_software in ORMValidatedSoftware.objects.all(): + self.job.logger.debug(f"Loading Nautobot ValidatedSoftwareLCM {nb_validated_software}") + try: + _software = ORMSoftware.objects.get( + version=nb_validated_software.software.version, + device_platform=nb_validated_software.software.device_platform.id, + ) + self.get( + self.validated_software, + { + "software": {nb_validated_software.software}, + "valid_since": {nb_validated_software.start}, + "valid_until": {nb_validated_software.end}, + }, + ) + except ObjectNotFound: + _val_software = ORMValidatedSoftware.objects.get( + software=_software, + start=nb_validated_software.start, + end=nb_validated_software.end, + ) + _tags = sorted(list(_val_software.tags.all().values_list("name", flat=True))) + _devices = sorted(list(_val_software.devices.all().values_list("name", flat=True))) + _device_types = sorted(list(_val_software.device_types.all().values_list("model", flat=True))) + _device_roles = sorted(list(_val_software.device_roles.all().values_list("name", flat=True))) + _inventory_items = sorted(list(_val_software.inventory_items.all().values_list("name", flat=True))) + _object_tags = sorted(list(_val_software.object_tags.all().values_list("name", flat=True))) + _sor = "" + if "system_of_record" in nb_validated_software.custom_field_data: + _sor = ( + nb_validated_software.custom_field_data["system_of_record"] + if nb_validated_software.custom_field_data["system_of_record"] is not None + else "" + ) + new_validated_software = self.validated_software( + software=f"{nb_validated_software.software.device_platform} - {nb_validated_software.software.version}", + software_version=nb_validated_software.software.version, + platform=nb_validated_software.software.device_platform.name, + valid_since=nb_validated_software.start, + valid_until=nb_validated_software.end, + preferred_version=nb_validated_software.preferred, + devices=_devices, + device_types=_device_types, + device_roles=_device_roles, + inventory_items=_inventory_items, + object_tags=_object_tags, + tags=_tags, + system_of_record=_sor, + uuid=nb_validated_software.id, + ) + + if not check_sor_field(nb_validated_software): + new_validated_software.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + + self.add(new_validated_software) + + def load(self): + """Load data from Nautobot into DiffSync models.""" + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["tenant_group"]: + self.load_tenant_group() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["tenant"]: + self.load_tenant() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["role"]: + self.load_role() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["manufacturer"]: + self.load_manufacturer() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["platform"]: + self.load_platform() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["location_type"]: + self.load_location_type() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["location"]: + self.load_location() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["team"]: + self.load_team() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["contact"]: + self.load_contact() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["provider"]: + self.load_provider() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["provider_network"]: + self.load_provider_network() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["circuit_type"]: + self.load_circuit_type() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["circuit"]: + self.load_circuit() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["circuit_termination"]: + self.load_circuit_termination() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["namespace"]: + self.load_namespace() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["rir"]: + self.load_rir() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["vlan_group"]: + self.load_vlan_group() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["vlan"]: + self.load_vlan() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["vrf"]: + self.load_vrf() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["prefix"]: + self.load_prefix() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["secret"]: + self.load_secret() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["secrets_group"]: + self.load_secrets_group() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["git_repository"]: + self.load_git_repository() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["dynamic_group"]: + self.load_dynamic_group() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["computed_field"]: + self.load_computed_field() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["tag"]: + self.load_tag() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["graph_ql_query"]: + self.load_graph_ql_query() + if LIFECYCLE_MGMT: + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["software"]: + self.load_software() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["software_image"]: + self.load_software_image() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["validated_software"]: + self.load_validated_software() diff --git a/nautobot_ssot/integrations/bootstrap/diffsync/models/__init__.py b/nautobot_ssot/integrations/bootstrap/diffsync/models/__init__.py new file mode 100644 index 000000000..f5d2b91b3 --- /dev/null +++ b/nautobot_ssot/integrations/bootstrap/diffsync/models/__init__.py @@ -0,0 +1 @@ +"""DiffSync models and adapters for the bootstrap SSoT plugin.""" diff --git a/nautobot_ssot/integrations/bootstrap/diffsync/models/base.py b/nautobot_ssot/integrations/bootstrap/diffsync/models/base.py new file mode 100755 index 000000000..f702008c3 --- /dev/null +++ b/nautobot_ssot/integrations/bootstrap/diffsync/models/base.py @@ -0,0 +1,764 @@ +"""DiffSyncModel subclasses for Nautobot-to-bootstrap data sync.""" + +import datetime +from typing import List, Optional +from uuid import UUID + +from diffsync import DiffSyncModel + + +class Secret(DiffSyncModel): + """DiffSync model for Bootstrap Secrets.""" + + _modelname = "secret" + _identifiers = ("name",) + _attributes = ("provider", "parameters", "system_of_record") + _children = {} + + name: str + provider: str + parameters: dict + system_of_record: str + + uuid: Optional[UUID] = None + + +class SecretsGroup(DiffSyncModel): + """DiffSync model for Bootstrap SecretsGroups.""" + + _modelname = "secrets_group" + _identifiers = ("name",) + _attributes = ("secrets", "system_of_record") + _children = {} + + name: str + secrets: List["dict"] = [] + system_of_record: str + + uuid: Optional[UUID] = None + + +class GitRepository(DiffSyncModel): + """DiffSync model for Bootstrap GitRepositories.""" + + _modelname = "git_repository" + _identifiers = ("name",) + _attributes = ( + "url", + "branch", + "secrets_group", + "provided_contents", + "system_of_record", + ) + _children = {} + + name: str + url: str + branch: str + secrets_group: Optional[str] = None + provided_contents: List[str] = [] + system_of_record: str + + uuid: Optional[UUID] = None + + +class DynamicGroup(DiffSyncModel): + """DiffSync model for Bootstrap DynamicGroups.""" + + _modelname = "dynamic_group" + _identifiers = ("name", "content_type") + _attributes = ("dynamic_filter", "description", "system_of_record") + _children = {} + + name: str + content_type: str + dynamic_filter: dict + description: str + system_of_record: str + + uuid: Optional[UUID] = None + + +class ComputedField(DiffSyncModel): + """DiffSync model for Bootstrap ComputedFields.""" + + _modelname = "computed_field" + _identifiers = ("label",) + _attributes = ( + "content_type", + "template", + ) + _children = {} + + label: str + content_type: str + template: str + + uuid: Optional[UUID] = None + + +class Tag(DiffSyncModel): + """DiffSync model for Bootstrap Tags.""" + + _modelname = "tag" + _identifiers = ("name",) + _attributes = ("color", "content_types", "description", "system_of_record") + _children = {} + + name: str + color: str + content_types: List[str] = [] + description: str + system_of_record: str + + uuid: Optional[UUID] = None + + +class GraphQLQuery(DiffSyncModel): + """DiffSync Model for Bootstrap GraphQLQueries.""" + + _modelname = "graph_ql_query" + _identifiers = ("name",) + _attributes = ("query",) + _children = {} + + name: str + query: str + + uuid: Optional[UUID] = None + + +class Software(DiffSyncModel): + """DiffSync Model for Bootstrap Software.""" + + _modelname = "software" + _identifiers = ( + "version", + "platform", + ) + _attributes = ( + "alias", + "release_date", + "eos_date", + "long_term_support", + "pre_release", + "documentation_url", + "tags", + "system_of_record", + ) + _children = {} + + version: str + platform: str + alias: Optional[str] = None + release_date: Optional[datetime.date] = None + eos_date: Optional[datetime.date] = None + documentation_url: Optional[str] = None + long_term_support: bool + pre_release: bool + tags: Optional[List[str]] = None + system_of_record: str + + uuid: Optional[UUID] = None + + +class SoftwareImage(DiffSyncModel): + """DiffSync Model for Bootstrap SoftwareImage.""" + + _modelname = "software_image" + _identifiers = ("software",) + _attributes = ( + "platform", + "software_version", + "file_name", + "download_url", + "image_file_checksum", + "hashing_algorithm", + "default_image", + "tags", + "system_of_record", + ) + _children = {} + + software: str + platform: str + software_version: str + file_name: str + download_url: Optional[str] = None + image_file_checksum: Optional[str] = None + hashing_algorithm: str + default_image: bool + tags: Optional[List[str]] = None + system_of_record: str + + uuid: Optional[UUID] = None + + +class ValidatedSoftware(DiffSyncModel): + """DiffSync Model for Bootstrap ValidatedSoftware.""" + + _modelname = "validated_software" + _identifiers = ("software", "valid_since", "valid_until") + _attributes = ( + "devices", + "device_types", + "device_roles", + "inventory_items", + "object_tags", + "preferred_version", + "tags", + "platform", + "software_version", + "system_of_record", + ) + _children = {} + + devices: Optional[List[str]] = None + device_types: Optional[List[str]] = None + device_roles: Optional[List[str]] = None + inventory_items: Optional[List[str]] = None + object_tags: Optional[List[str]] = None + software: str + platform: str + software_version: str + valid_since: Optional[datetime.date] = None + valid_until: Optional[datetime.date] = None + preferred_version: bool + tags: Optional[List[str]] = None + system_of_record: str + + uuid: Optional[UUID] = None + + +class TenantGroup(DiffSyncModel): + """DiffSync Model for Bootstrap TenantGroup.""" + + _modelname = "tenant_group" + _identifiers = ("name", "parent") + _attributes = ( + "description", + "system_of_record", + ) + _children = {} + + name: str + parent: Optional[str] = None + description: Optional[str] = None + system_of_record: str + + uuid: Optional[UUID] = None + + +class Tenant(DiffSyncModel): + """DiffSync Model for Bootstrap Tenant.""" + + _modelname = "tenant" + _identifiers = ("name",) + _attributes = ("description", "tenant_group", "tags", "system_of_record") + _children = {} + + name: str + tenant_group: Optional[str] = None + description: Optional[str] = None + tags: Optional[List[str]] = None + system_of_record: str + + uuid: Optional[UUID] = None + + +class Role(DiffSyncModel): + """DiffSync Model for Bootstrap Role.""" + + _modelname = "role" + _identifiers = ("name",) + _attributes = ( + "weight", + "description", + "color", + "content_types", + "system_of_record", + ) + _children = {} + + name: str + weight: Optional[int] = None + description: Optional[str] = None + color: Optional[str] = None + content_types: List[str] = [] + system_of_record: str + + uuid: Optional[UUID] = None + + +class Team(DiffSyncModel): + """DiffSync Model for Bootstrap Team.""" + + _modelname = "team" + _identifiers = ("name",) + _attributes = ("phone", "email", "address", "contacts", "system_of_record") + _children = {} + + name: str + phone: Optional[str] = None + email: Optional[str] = None + address: Optional[str] = None + contacts: Optional[List[str]] = None + system_of_record: str + + uuid: Optional[UUID] = None + + +class Contact(DiffSyncModel): + """DiffSync Model for Bootstrap Contact.""" + + _modelname = "contact" + _identifiers = ("name",) + _attributes = ("phone", "email", "address", "teams", "system_of_record") + _children = {} + + name: str + phone: Optional[str] = None + email: Optional[str] = None + address: Optional[str] = None + teams: Optional[List[str]] = None + system_of_record: str + + uuid: Optional[UUID] = None + + +class Manufacturer(DiffSyncModel): + """DiffSync Model for Bootstrap Manufacturer.""" + + _modelname = "manufacturer" + _identifiers = ("name",) + _attributes = ( + "description", + "system_of_record", + ) + _children = {} + + name: str + description: Optional[str] = None + system_of_record: str + + uuid: Optional[UUID] = None + + +class Platform(DiffSyncModel): + """DiffSync Model for Bootstrap Platform.""" + + _modelname = "platform" + _identifiers = ( + "name", + "manufacturer", + ) + _attributes = ( + "network_driver", + "napalm_driver", + "napalm_arguments", + "description", + "system_of_record", + ) + _children = {} + + name: str + manufacturer: str + network_driver: Optional[str] = None + napalm_driver: Optional[str] = None + napalm_arguments: Optional[dict] = None + description: Optional[str] = None + system_of_record: str + + uuid: Optional[UUID] = None + + +class LocationType(DiffSyncModel): + """DiffSync Model for Bootstrap LocationType.""" + + _modelname = "location_type" + _identifiers = ("name",) + _attributes = ( + "parent", + "nestable", + "description", + "content_types", + "system_of_record", + ) + _children = {} + + name: str + parent: Optional[str] = None + nestable: Optional[bool] = None + description: Optional[str] = None + content_types: Optional[List[str]] = None + system_of_record: str + + uuid: Optional[UUID] = None + + +class Location(DiffSyncModel): + """DiffSync Model for Bootstrap Location.""" + + _modelname = "location" + _identifiers = ( + "name", + "location_type", + ) + _attributes = ( + "parent", + "status", + "facility", + "asn", + "time_zone", + "description", + "tenant", + "physical_address", + "shipping_address", + "latitude", + "longitude", + "contact_name", + "contact_phone", + "contact_email", + "tags", + "system_of_record", + ) + _children = {} + + name: str + location_type: str + parent: Optional[str] = None + status: Optional[str] = None + facility: Optional[str] = None + asn: Optional[int] = None + time_zone: Optional[str] = None + description: Optional[str] = None + tenant: Optional[str] = None + physical_address: Optional[str] = None + shipping_address: Optional[str] = None + latitude: Optional[float] = None + longitude: Optional[float] = None + contact_name: Optional[str] = None + contact_phone: Optional[str] = None + contact_email: Optional[str] = None + tags: Optional[List[str]] = None + system_of_record: str + + uuid: Optional[UUID] = None + + +class Provider(DiffSyncModel): + """DiffSync model for Bootstrap Provider.""" + + _modelname = "provider" + _identifiers = ("name",) + _attributes = ( + "asn", + "account_number", + "portal_url", + "noc_contact", + "admin_contact", + "tags", + "system_of_record", + ) + _children = {} + + name: str + asn: Optional[int] = None + account_number: Optional[str] = None + portal_url: Optional[str] = None + noc_contact: Optional[str] = None + admin_contact: Optional[str] = None + tags: Optional[List[str]] = None + system_of_record: str + + uuid: Optional[UUID] = None + + +class ProviderNetwork(DiffSyncModel): + """DiffSync model for Bootstrap ProviderNetwork.""" + + _modelname = "provider_network" + _identifiers = ( + "name", + "provider", + ) + _attributes = ("description", "comments", "tags", "system_of_record") + _children = {} + + name: str + provider: str + description: Optional[str] = None + comments: Optional[str] = None + tags: Optional[List[str]] = None + system_of_record: str + + uuid: Optional[UUID] = None + + +class CircuitType(DiffSyncModel): + """DiffSync model for Bootstrap CircuitType.""" + + _modelname = "circuit_type" + _identifiers = ("name",) + _attributes = ("description", "system_of_record") + _children = {} + + name: str + description: Optional[str] = None + system_of_record: str + + uuid: Optional[UUID] = None + + +class Circuit(DiffSyncModel): + """DiffSync model for Bootstrap Circuit.""" + + _modelname = "circuit" + _identifiers = ( + "circuit_id", + "provider", + ) + _attributes = ( + "circuit_type", + "status", + "date_installed", + "commit_rate_kbps", + "description", + "tenant", + "tags", + "system_of_record", + ) + _children = {"circuit_termination": "terminations"} + + circuit_id: str + provider: str + circuit_type: str + status: str + date_installed: Optional[datetime.date] = None + commit_rate_kbps: Optional[int] = None + description: Optional[str] = None + tenant: Optional[str] = None + tags: Optional[List[str]] = None + terminations: Optional[List["Circuit"]] = [] + system_of_record: Optional[str] = None + + uuid: Optional[UUID] = None + + +class CircuitTermination(DiffSyncModel): + """DiffSync model for Bootstrap CircuitTermination.""" + + _modelname = "circuit_termination" + _identifiers = ( + "name", + "termination_side", + "circuit_id", + ) + _attributes = ( + "termination_type", + "location", + "provider_network", + "port_speed_kbps", + "upstream_speed_kbps", + "cross_connect_id", + "patch_panel_or_ports", + "description", + "tags", + "system_of_record", + ) + _children = {} + + name: str + termination_type: str + termination_side: str + circuit_id: str + location: Optional[str] = None + provider_network: Optional[str] = None + port_speed_kbps: Optional[int] = None + upstream_speed_kbps: Optional[int] = None + cross_connect_id: Optional[str] = None + patch_panel_or_ports: Optional[str] = None + description: Optional[str] = None + tags: Optional[List[str]] = None + system_of_record: str + + uuid: Optional[UUID] = None + + +class Namespace(DiffSyncModel): + """DiffSync model for Bootstrap Namespace.""" + + _modelname = "namespace" + _identifiers = ("name",) + _attributes = ("description", "location", "system_of_record") + _children = {} + + name: str + description: Optional[str] = None + location: Optional[str] = None + system_of_record: str + + uuid: Optional[UUID] = None + + +class RiR(DiffSyncModel): + """DiffSync model for Bootstrap RiR.""" + + _modelname = "rir" + _identifiers = [ + "name", + ] + _attributes = [ + "private", + "description", + "system_of_record", + ] + _children = {} + + name: str + private: bool + description: Optional[str] = None + system_of_record: str + + uuid: Optional[UUID] = None + + +class VLANGroup(DiffSyncModel): + """DiffSync model for Bootstrap VLANGroup.""" + + _modelname = "vlan_group" + _identifiers = ("name",) + _attributes = ("location", "description", "system_of_record") + _children = {} + + name: str + location: Optional[str] = None + description: Optional[str] = None + system_of_record: str + + uuid: Optional[UUID] = None + + +class VLAN(DiffSyncModel): + """DiffSync model for Bootstrap VLAN.""" + + _modelname = "vlan" + _identifiers = ( + "name", + "vid", + "vlan_group", + ) + _attributes = ( + "description", + "status", + "role", + "locations", + "tenant", + "tags", + "system_of_record", + ) + _children = {} + + name: str + vid: int + vlan_group: Optional[str] = None + description: Optional[str] = None + status: Optional[str] = None + role: Optional[str] = None + locations: Optional[List[str]] = None + tenant: Optional[str] = None + tags: Optional[List[str]] = None + system_of_record: str + + uuid: Optional[UUID] = None + + +class VRF(DiffSyncModel): + """DiffSync model for Bootstrap VRF.""" + + _modelname = "vrf" + _identifiers = ( + "name", + "namespace", + ) + _attributes = ( + "route_distinguisher", + "description", + "tenant", + "tags", + "system_of_record", + ) + _children = {} + + name: str + namespace: Optional[str] = None + route_distinguisher: Optional[str] = None + description: Optional[str] = None + tenant: Optional[str] = None + tags: Optional[List[str]] = None + system_of_record: str + + uuid: Optional[UUID] = None + + +class Prefix(DiffSyncModel): + """DiffSync model for Bootstrap Prefix.""" + + _modelname = "prefix" + _identifiers = ( + "network", + "namespace", + ) + _attributes = ( + "prefix_type", + "status", + "role", + "rir", + "date_allocated", + "description", + "vrfs", + "locations", + "vlan", + "tenant", + "tags", + "system_of_record", + ) + _children = {} + + network: str + namespace: str + prefix_type: Optional[str] = None + status: Optional[str] = None + role: Optional[str] = None + rir: Optional[str] = None + date_allocated: Optional[datetime.datetime] = None + description: Optional[str] = None + vrfs: Optional[List[str]] = None + locations: Optional[List[str]] = None + vlan: Optional[str] = None + tenant: Optional[str] = None + tags: Optional[List[str]] = None + system_of_record: str + + uuid: Optional[UUID] = None + + +class SSoTJob(DiffSyncModel): + """DiffSync model for Bootstrap SSoTJobs.""" + + _modelname = "ssot-job" + _identifiers = ( + "name", + "schedule", + ) + _attributes = () + _children = {} + + name: str + schedule: str + + uuid: Optional[UUID] = None + + +Circuit.model_rebuild() +CircuitTermination.model_rebuild() diff --git a/nautobot_ssot/integrations/bootstrap/diffsync/models/bootstrap.py b/nautobot_ssot/integrations/bootstrap/diffsync/models/bootstrap.py new file mode 100755 index 000000000..86729e066 --- /dev/null +++ b/nautobot_ssot/integrations/bootstrap/diffsync/models/bootstrap.py @@ -0,0 +1,549 @@ +"""Nautobot Ssot Bootstrap DiffSync models for Nautobot Ssot Bootstrap SSoT.""" + +from nautobot_ssot.integrations.bootstrap.diffsync.models.base import ( + VLAN, + VRF, + Circuit, + CircuitTermination, + CircuitType, + ComputedField, + Contact, + DynamicGroup, + GitRepository, + GraphQLQuery, + Location, + LocationType, + Manufacturer, + Namespace, + Platform, + Prefix, + Provider, + ProviderNetwork, + RiR, + Role, + Secret, + SecretsGroup, + Tag, + Team, + Tenant, + TenantGroup, + VLANGroup, +) + +try: + import nautobot_device_lifecycle_mgmt # noqa: F401 + + LIFECYCLE_MGMT = True +except ImportError: + LIFECYCLE_MGMT = False + +if LIFECYCLE_MGMT: + from nautobot_ssot.integrations.bootstrap.diffsync.models.base import ( + Software, + SoftwareImage, + ValidatedSoftware, + ) + + +class BootstrapTenantGroup(TenantGroup): + """Bootstrap implementation of TenantGroup DiffSync model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create TenantGroup in Bootstrap from BootstrapTenantGroup object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update TenantGroup in Bootstrap from BootstrapTenantGroup object.""" + return super().update(attrs) + + def delete(self): + """Delete TenantGroup in Bootstrap from BootstrapTenantGroup object.""" + return self + + +class BootstrapTenant(Tenant): + """Bootstrap implementation of TenantGroup DiffSync model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create Tenant in Bootstrap from BootstrapTenant object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Tenant in Bootstrap from BootstrapTenant object.""" + return super().update(attrs) + + def delete(self): + """Delete Tenant in Bootstrap from BootstrapTenant object.""" + return self + + +class BootstrapRole(Role): + """Bootstrap implementation of Role DiffSync model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create Role in Bootstrap from BootstrapRole object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Role in Bootstrap from BootstrapRole object.""" + return super().update(attrs) + + def delete(self): + """Delete Role in Bootstrap from BootstrapRole object.""" + return self + + +class BootstrapManufacturer(Manufacturer): + """Bootstrap implementation of Manufacturer DiffSync model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create Manufacturer in Bootstrap from BootstrapManufacturer object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Manufacturer in Bootstrap from BootstrapManufacturer object.""" + return super().update(attrs) + + def delete(self): + """Delete Manufacturer in Bootstrap from BootstrapManufacturer object.""" + return self + + +class BootstrapPlatform(Platform): + """Bootstrap implementation of Platform DiffSync model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create Platform in Bootstrap from BootstrapPlatform object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Platform in Bootstrap from BootstrapPlatform object.""" + return super().update(attrs) + + def delete(self): + """Delete Platform in Bootstrap from BootstrapPlatform object.""" + return self + + +class BootstrapLocationType(LocationType): + """Bootstrap implementation of LocationType DiffSync model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create LocationType in Bootstrap from BootstrapLocationType object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update LocationType in Bootstrap from BootstrapLocationType object.""" + return super().update(attrs) + + def delete(self): + """Delete LocationType in Bootstrap from BootstrapLocationType object.""" + return self + + +class BootstrapLocation(Location): + """Bootstrap implementation of Location DiffSync model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create Location in Bootstrap from BootstrapLocation object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Location in Bootstrap from BootstrapLocation object.""" + return super().update(attrs) + + def delete(self): + """Delete Location in Bootstrap from BootstrapLocation object.""" + return self + + +class BootstrapTeam(Team): + """Bootstrap implementation of Team DiffSync model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create Team in Bootstrap from BootstrapTeam object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Team in Bootstrap from BootstrapTeam object.""" + return super().update(attrs) + + def delete(self): + """Delete Team in Bootstrap from BootstrapTeam object.""" + return self + + +class BootstrapContact(Contact): + """Bootstrap implementation of Contact DiffSync model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create Contact in Bootstrap from BootstrapContact object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Contact in Bootstrap from BootstrapContact object.""" + return super().update(attrs) + + def delete(self): + """Delete Contact in Bootstrap from BootstrapContact object.""" + return self + + +class BootstrapProvider(Provider): + """Bootstrap implementation of Provider DiffSync model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create Provider in Bootstrap from BootstrapProvider object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Provider in Bootstrap from BootstrapProvider object.""" + return super().update(attrs) + + def delete(self): + """Delete Provider in Bootstrap from BootstrapProvider object.""" + return self + + +class BootstrapProviderNetwork(ProviderNetwork): + """Bootstrap implementation of ProviderNetwork DiffSync model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create ProviderNetwork in Bootstrap from BootstrapProviderNetwork object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update ProviderNetwork in Bootstrap from BootstrapProviderNetwork object.""" + return super().update(attrs) + + def delete(self): + """Delete ProviderNetwork in Bootstrap from BootstrapProviderNetwork object.""" + return self + + +class BootstrapCircuitType(CircuitType): + """Bootstrap implementation of CircuitType DiffSync model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create CircuitType in Bootstrap from BootstrapCircuitType object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update CircuitType in Bootstrap from BootstrapCircuitType object.""" + return super().update(attrs) + + def delete(self): + """Delete CircuitType in Bootstrap from BootstrapCircuitType object.""" + return self + + +class BootstrapCircuit(Circuit): + """Bootstrap implementation of Circuit DiffSync model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create Circuit in Bootstrap from BootstrapCircuit object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Circuit in Bootstrap from BootstrapCircuit object.""" + return super().update(attrs) + + def delete(self): + """Delete Circuit in Bootstrap from BootstrapCircuit object.""" + return self + + +class BootstrapCircuitTermination(CircuitTermination): + """Bootstrap implementation of CircuitTermination DiffSync model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create CircuitTermination in Bootstrap from BootstrapCircuitTermination object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update CircuitTermination in Bootstrap from BootstrapCircuitTermination object.""" + return super().update(attrs) + + def delete(self): + """Delete CircuitTermination in Bootstrap from BootstrapCircuitTermination object.""" + return self + + +class BootstrapSecret(Secret): + """Bootstrap implementation of Secret DiffSync model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create Secret in Bootstrap from BootstrapSecret object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Secret in Bootstrap from BootstrapSecret object.""" + return super().update(attrs) + + def delete(self): + """Delete Secret in Bootstrap from BootstrapSecret object.""" + return self + + +class BootstrapSecretsGroup(SecretsGroup): + """Bootstrap implementation of Secret DiffSync model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create Secret in Bootstrap from BootstrapDevice object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Secret in Bootstrap from BootstrapSecret object.""" + return super().update(attrs) + + def delete(self): + """Delete Secret in Bootstrap from BootstrapSecret object.""" + return self + + +class BootstrapGitRepository(GitRepository): + """Bootstrap implementation of GitRepository DiffSync model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create GitRepository in Bootstrap from BootstrapGitRepository object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update GitRepository in Bootstrap from BootstrapGitRepository object.""" + return super().update(attrs) + + def delete(self): + """Delete GitRepository in Bootstrap from BootstrapGitRepository object.""" + return self + + +class BootstrapDynamicGroup(DynamicGroup): + """Bootstrap implementation of DynamicGroup DiffSync model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create DynamicGroup in Bootstrap from BootstrapDynamicGroup object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update DynamicGroup in Bootstrap from BootstrapDynamicGroup object.""" + return super().update(attrs) + + def delete(self): + """Delete DynamicGroup in Bootstrap from BootstrapDynamicGroup object.""" + return self + + +class BootstrapComputedField(ComputedField): + """Bootstrap implementation of ComputedField DiffSync model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create ComputedField in Bootstrap from BootstrapComputedField object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update ComputedField in Bootstrap from BootstrapComputedField object.""" + return super().update(attrs) + + def delete(self): + """Delete ComputedField in Bootstrap from BootstrapComputedField object.""" + return self + + +class BootstrapTag(Tag): + """Bootstrap implementation of Bootstrap Tag model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create Tag in Bootstrap from BootstrapTag object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Tag in Bootstrap from BootstrapTag object.""" + return super().update(attrs) + + def delete(self): + """Delete Tag in Bootstrap from BootstrapTag object.""" + return self + + +class BootstrapGraphQLQuery(GraphQLQuery): + """Bootstrap implementation of Bootstrap GraphQLQuery model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create GraphQLQuery in Bootstrap from BootstrapTag object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update GraphQLQuery in Bootstrap from BootstrapGraphQLQuery object.""" + return super().update(attrs) + + def delete(self): + """Delete GraphQLQuery in Bootstrap from BootstrapTag object.""" + return self + + +if LIFECYCLE_MGMT: + + class BootstrapSoftware(Software): + """Bootstrap implementation of Bootstrap Software model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create Software in Bootstrap from BootstrapSoftware object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Software in Bootstrap from BootstrapSoftware object.""" + return super().update(attrs) + + def delete(self): + """Delete Software in Bootstrap from BootstrapSoftware object.""" + return self + + class BootstrapSoftwareImage(SoftwareImage): + """Bootstrap implementation of Bootstrap SoftwareImage model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create SoftwareImage in Bootstrap from BootstrapSoftwareImage object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update SoftwareImage in Bootstrap from BootstrapSoftwareImage object.""" + return super().update(attrs) + + def delete(self): + """Delete SoftwareImage in Bootstrap from BootstrapSoftwareImage object.""" + return self + + class BootstrapValidatedSoftware(ValidatedSoftware): + """Bootstrap implementation of Bootstrap ValidatedSoftware model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create ValidatedSoftware in Bootstrap from BootstrapValidatedSoftware object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update ValidatedSoftware in Bootstrap from BootstrapValidatedSoftware object.""" + return super().update(attrs) + + def delete(self): + """Delete ValidatedSoftware in Bootstrap from BootstrapValidatedSoftware object.""" + return self + + class BootstrapNamespace(Namespace): + """Bootstrap implementation of Bootstrap Namespace model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create Namespace in Bootstrap from BootstrapNamespace object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Namespace in Bootstrap from BootstrapNamespace object.""" + return super().update(attrs) + + def delete(self): + """Delete Namespace in Bootstrap from BootstrapNamespace object.""" + return self + + class BootstrapRiR(RiR): + """Bootstrap implementation of Bootstrap RiR model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create RiR in Bootstrap from BootstrapRiR object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update RiR in Bootstrap from BootstrapRiR object.""" + return super().update(attrs) + + def delete(self): + """Delete RiR in Bootstrap from BootstrapRiR object.""" + return self + + class BootstrapVLANGroup(VLANGroup): + """Bootstrap implementation of Bootstrap VLANGroup model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create VLANGroup in Bootstrap from BootstrapVLANGroup object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update VLANGroup in Bootstrap from BootstrapVLANGroup object.""" + return super().update(attrs) + + def delete(self): + """Delete VLANGroup in Bootstrap from BootstrapVLANGroup object.""" + return self + + class BootstrapVLAN(VLAN): + """Bootstrap implementation of Bootstrap VLAN model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create VLAN in Bootstrap from BootstrapVLAN object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update VLAN in Bootstrap from BootstrapVLAN object.""" + return super().update(attrs) + + def delete(self): + """Delete VLAN in Bootstrap from BootstrapVLAN object.""" + return self + + class BootstrapVRF(VRF): + """Bootstrap implementation of Bootstrap VRF model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create VRF in Bootstrap from BootstrapVRF object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update VRF in Bootstrap from BootstrapVRF object.""" + return super().update(attrs) + + def delete(self): + """Delete VRF in Bootstrap from BootstrapVRF object.""" + return self + + class BootstrapPrefix(Prefix): + """Bootstrap implementation of Bootstrap Prefix model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create Prefix in Bootstrap from BootstrapPrefix object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Prefix in Bootstrap from BootstrapPrefix object.""" + return super().update(attrs) + + def delete(self): + """Delete Prefix in Bootstrap from BootstrapPrefix object.""" + return self diff --git a/nautobot_ssot/integrations/bootstrap/diffsync/models/nautobot.py b/nautobot_ssot/integrations/bootstrap/diffsync/models/nautobot.py new file mode 100755 index 000000000..95aa88156 --- /dev/null +++ b/nautobot_ssot/integrations/bootstrap/diffsync/models/nautobot.py @@ -0,0 +1,2471 @@ +"""Nautobot DiffSync models for bootstrap SSoT.""" + +import os +from datetime import datetime + +import pytz +from django.contrib.contenttypes.models import ContentType +from django.core.exceptions import ValidationError +from django.db.models.deletion import ProtectedError +from nautobot.circuits.models import Circuit as ORMCircuit +from nautobot.circuits.models import CircuitTermination as ORMCircuitTermination +from nautobot.circuits.models import CircuitType as ORMCircuitType +from nautobot.circuits.models import Provider as ORMProvider +from nautobot.circuits.models import ProviderNetwork as ORMProviderNetwork +from nautobot.dcim.models import Device as ORMDevice +from nautobot.dcim.models import DeviceType as ORMDeviceType +from nautobot.dcim.models import InventoryItem as ORMInventoryItem +from nautobot.dcim.models import Location as ORMLocation +from nautobot.dcim.models import LocationType as ORMLocationType +from nautobot.dcim.models import Manufacturer as ORMManufacturer +from nautobot.dcim.models import Platform as ORMPlatform +from nautobot.extras.models import ComputedField as ORMComputedField +from nautobot.extras.models import Contact as ORMContact +from nautobot.extras.models import DynamicGroup as ORMDynamicGroup +from nautobot.extras.models import GitRepository as ORMGitRepository +from nautobot.extras.models import GraphQLQuery as ORMGraphQLQuery +from nautobot.extras.models import Role as ORMRole +from nautobot.extras.models import Secret as ORMSecret +from nautobot.extras.models import SecretsGroup as ORMSecretsGroup +from nautobot.extras.models import SecretsGroupAssociation as ORMSecretsGroupAssociation +from nautobot.extras.models import Status as ORMStatus +from nautobot.extras.models import Tag as ORMTag +from nautobot.extras.models import Team as ORMTeam +from nautobot.ipam.models import RIR as ORMRiR +from nautobot.ipam.models import VLAN as ORMVLAN +from nautobot.ipam.models import VRF as ORMVRF +from nautobot.ipam.models import Namespace as ORMNamespace +from nautobot.ipam.models import Prefix as ORMPrefix +from nautobot.ipam.models import VLANGroup as ORMVLANGroup +from nautobot.tenancy.models import Tenant as ORMTenant +from nautobot.tenancy.models import TenantGroup as ORMTenantGroup + +from nautobot_ssot.integrations.bootstrap.diffsync.models.base import ( + VLAN, + VRF, + Circuit, + CircuitTermination, + CircuitType, + ComputedField, + Contact, + DynamicGroup, + GitRepository, + GraphQLQuery, + Location, + LocationType, + Manufacturer, + Namespace, + Platform, + Prefix, + Provider, + ProviderNetwork, + RiR, + Role, + Secret, + SecretsGroup, + Tag, + Team, + Tenant, + TenantGroup, + VLANGroup, +) +from nautobot_ssot.integrations.bootstrap.utils import ( + # lookup_contact_for_team, + check_sor_field, + lookup_content_type_for_taggable_model_path, + lookup_content_type_id, + lookup_team_for_contact, +) + +try: + import nautobot_device_lifecycle_mgmt # noqa: F401 + + LIFECYCLE_MGMT = True +except ImportError: + LIFECYCLE_MGMT = False + +if LIFECYCLE_MGMT: + # noqa: F401 + from nautobot_device_lifecycle_mgmt.models import ( + SoftwareImageLCM as ORMSoftwareImage, + ) + + # noqa: F401 + from nautobot_device_lifecycle_mgmt.models import ( + SoftwareLCM as ORMSoftware, + ) + + # noqa: F401 + from nautobot_device_lifecycle_mgmt.models import ( + ValidatedSoftwareLCM as ORMValidatedSoftware, + ) + + from nautobot_ssot.integrations.bootstrap.diffsync.models.base import ( + Software, + SoftwareImage, + ValidatedSoftware, + ) + + +class NautobotTenantGroup(TenantGroup): + """Nautobot implementation of Bootstrap TenantGroup model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create TenantGroup in Nautobot from NautobotTenantGroup object.""" + _parent = None + if ids["parent"]: + _parent = ORMTenantGroup.objects.get(name=ids["parent"]) + adapter.job.logger.info(f'Creating Nautobot TenantGroup: {ids["name"]}') + if _parent is not None: + new_tenant_group = ORMTenantGroup(name=ids["name"], parent=_parent, description=attrs["description"]) + else: + new_tenant_group = ORMTenantGroup(name=ids["name"], description=attrs["description"]) + new_tenant_group.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + new_tenant_group.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + new_tenant_group.validated_save() + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update TenantGroup in Nautobot from NautobotTenantGroup object.""" + self.adapter.job.logger.debug(f"Updating TenantGroup {self.name} with {attrs}") + _update_tenant_group = ORMTenantGroup.objects.get(name=self.name) + if "description" in attrs: + _update_tenant_group.description = attrs["description"] + if not check_sor_field(_update_tenant_group): + _update_tenant_group.custom_field_data.update( + {"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")} + ) + _update_tenant_group.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _update_tenant_group.validated_save() + return super().update(attrs) + + def delete(self): + """Delete TenantGroup in Nautobot from NautobotTenantGroup object.""" + try: + self.adapter.job.logger.debug(f"Attempting to delete TenantGroup: {self} - {self.uuid}") + _nb_tenant_group = ORMTenantGroup.objects.get(id=self.uuid) + super().delete() + _nb_tenant_group.delete() + return self + except ORMTenantGroup.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find TenantGroup {self.uuid} for deletion. {err}") + + +class NautobotTenant(Tenant): + """Nautobot implementation of Bootstrap Tenant model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create Tenant in Nautobot from NautobotTenant object.""" + _tags = [] + _tenant_group = None + _description = "" + if "tags" in attrs: + for _tag in attrs["tags"]: + _tags.append(ORMTag.objects.get(name=_tag)) + if "tenant_group" in attrs: + try: + _tenant_group = ORMTenantGroup.objects.get(name=attrs["tenant_group"]) + except ORMTenantGroup.DoesNotExist: + adapter.job.logger.warning( + f'Could not find TenantGroup {attrs["tenant_group"]} to assign to {ids["name"]}' + ) + if "description" in attrs: + _description = attrs["description"] + adapter.job.logger.info(f'Creating Nautobot Tenant: {ids["name"]}') + new_tenant = ORMTenant( + name=ids["name"], + tenant_group=_tenant_group, + tags=_tags, + description=_description, + ) + new_tenant.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + new_tenant.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + if "tags" in attrs: + for _tag in attrs["tags"]: + new_tenant.tags.add(_tag) + new_tenant.validated_save() + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Tenant in Nautobot from NautobotTenant object.""" + _update_tenant = ORMTenant.objects.get(name=self.name) + if "description" in attrs: + _update_tenant.description = attrs["description"] + if "tenant_group" in attrs: + try: + _update_tenant.tenant_group = ORMTenantGroup.objects.get(name=attrs["tenant_group"]) + except ORMTenantGroup.DoesNotExist: + self.adapter.job.logger.warning( + f'Could not find TenantGroup {attrs["tenant_group"]} to assign to {self.name}' + ) + if "tags" in attrs: + # FIXME: There might be a better way to handle this that's easier on the database. + _update_tenant.tags.clear() + for _tag in attrs["tags"]: + _update_tenant.tags.add(_tag) + if not check_sor_field(_update_tenant): + _update_tenant.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _update_tenant.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _update_tenant.validated_save() + return super().update(attrs) + + def delete(self): + """Delete Tenant in Nautobot from NautobotTenant object.""" + try: + self.adapter.job.logger.debug(f"Attempting to delete Tenant: {self} - {self.uuid}") + _nb_tenant = ORMTenant.objects.get(id=self.uuid) + super().delete() + _nb_tenant.delete() + return self + except ORMTenant.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find Tenant {self.uuid} for deletion. {err}") + + +class NautobotRole(Role): + """Nautobot implementation of Bootstrap Role model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create Role in Nautobot from NautobotRole object.""" + _content_types = [] + adapter.job.logger.info(f'Creating Nautobot Role: {ids["name"]}') + for _model in attrs["content_types"]: + _content_types.append(lookup_content_type_for_taggable_model_path(_model)) + _new_role = ORMRole( + name=ids["name"], + weight=attrs["weight"], + description=attrs["description"], + color=attrs["color"], + ) + _new_role.validated_save() + _new_role.content_types.set(_content_types) + _new_role.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _new_role.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _new_role.validated_save() + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Role in Nautobot from NautobotRole object.""" + _content_types = [] + self.adapter.job.logger.info(f"Updating Role {self.name}") + _update_role = ORMRole.objects.get(name=self.name) + if "weight" in attrs: + _update_role.weight = attrs["weight"] + if "description" in attrs: + _update_role.description = attrs["description"] + if "color" in attrs: + _update_role.color = attrs["color"] + if "content_types" in attrs: + for _model in attrs["content_types"]: + self.adapter.job.logger.debug(f"Looking up {_model} in content types.") + _content_types.append(lookup_content_type_for_taggable_model_path(_model)) + _update_role.content_types.set(_content_types) + if not check_sor_field(_update_role): + _update_role.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _update_role.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _update_role.validated_save() + return super().update(attrs) + + def delete(self): + """Delete Role in Nautobot from NautobotRole object.""" + try: + self.adapter.job.logger.debug(f"Attempting to delete Role: {self} - {self.uuid}") + _role = ORMRole.objects.get(id=self.uuid) + _role.delete() + super().delete() + return self + except ORMTenant.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find Role {self.uuid} for deletion. {err}") + + +class NautobotManufacturer(Manufacturer): + """Nautobot implementation of Bootstrap Manufacturer model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create Manufacturer in Nautobot from NautobotManufacturer object.""" + adapter.job.logger.debug(f'Creating Nautobot Manufacturer {ids["name"]}') + _new_manufacturer = ORMManufacturer(name=ids["name"], description=attrs["description"]) + _new_manufacturer.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _new_manufacturer.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _new_manufacturer.validated_save() + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Manufacturer in Nautobot from NautobotManufacturer object.""" + _update_manufacturer = ORMManufacturer.objects.get(name=self.name) + self.adapter.job.logger.info(f"Updating Manufacturer {self.name}") + if "description" in attrs: + _update_manufacturer.description = attrs["description"] + if not check_sor_field(_update_manufacturer): + _update_manufacturer.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _update_manufacturer.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _update_manufacturer.validated_save() + return super().update(attrs) + + def delete(self): + """Delete Manufacturer in Nautobot from NautobotManufacturer object.""" + try: + self.adapter.job.logger.debug(f"Attempting to delete Manufacturer: {self} - {self.uuid}") + _manufacturer = ORMManufacturer.objects.get(id=self.uuid) + _manufacturer.delete() + super().delete() + return self + except ORMManufacturer.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find Manufacturer {self.uuid} for deletion. {err}") + except ProtectedError as err: + self.adapter.job.logger.warning( + f"Unable to delete Manufacturer {self.name}, as it is referenced by another object. {err}" + ) + + +class NautobotPlatform(Platform): + """Nautobot implementation of Bootstrap Platform model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create Platform in Nautobot from NautobotPlatform object.""" + adapter.job.logger.info(f'Creating Nautobot Platform {ids["name"]}') + try: + _manufacturer = ORMManufacturer.objects.get(name=ids["manufacturer"]) + _new_platform = ORMPlatform( + name=ids["name"], + manufacturer=_manufacturer, + network_driver=attrs["network_driver"], + napalm_driver=attrs["napalm_driver"], + napalm_args=attrs["napalm_arguments"], + description=attrs["description"], + ) + _new_platform.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _new_platform.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _new_platform.validated_save() + except ORMManufacturer.DoesNotExist: + adapter.job.logger.warning( + f'Manufacturer {ids["manufacturer"]} does not exist in Nautobot, be sure to create it.' + ) + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Platform in Nautobot from NautobotPlatform object.""" + _update_platform = ORMPlatform.objects.get(name=self.name) + if "network_driver" in attrs: + _update_platform.network_driver = attrs["network_driver"] + if "napalm_driver" in attrs: + _update_platform.napalm_driver = attrs["napalm_driver"] + if "napalm_arguments" in attrs: + _update_platform.napalm_args = attrs["napalm_arguments"] + if "description" in attrs: + _update_platform.description = attrs["description"] + _update_platform.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + if not check_sor_field(_update_platform): + _update_platform.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _update_platform.validated_save() + return super().update(attrs) + + def delete(self): + """Delete Platform in Nautobot from NautobotPlatform object.""" + try: + self.adapter.job.logger.debug(f"Attempting to delete Platform: {self} - {self.uuid}") + _platform = ORMPlatform.objects.get(id=self.uuid) + _platform.delete() + super().delete() + return self + except ORMManufacturer.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find Platform {self.uuid} for deletion. {err}") + except ProtectedError as err: + self.adapter.job.logger.warning( + f"Unable to delete Platform {self.name}, as it is referenced by another object. {err}" + ) + + +class NautobotLocationType(LocationType): + """Nautobot implementation of Bootstrap LocationType model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create LocationType in Nautobot from NautobotLocationType object.""" + _content_types = [] + adapter.job.logger.info(f'Creating Nautobot LocationType: {ids["name"]}') + _parent = None + for _model in attrs["content_types"]: + _content_types.append(lookup_content_type_for_taggable_model_path(_model)) + if "parent" in attrs: + try: + _parent = ORMLocationType.objects.get(name=attrs["parent"]) + except ORMLocationType.DoesNotExist: + adapter.job.logger.warning( + f'Could not find LocationType {ids["parent"]} in Nautobot, ensure it exists.' + ) + _new_location_type = ORMLocationType( + name=ids["name"], + parent=_parent, + nestable=attrs["nestable"] if not None else False, + description=attrs["description"], + ) + _new_location_type.validated_save() + for _model in attrs["content_types"]: + adapter.job.logger.debug(f"Looking up {_model} in content types.") + _content_types.append(lookup_content_type_for_taggable_model_path(_model)) + _new_location_type.content_types.set(_content_types) + _new_location_type.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _new_location_type.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _new_location_type.validated_save() + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update LocationType in Nautobot from NautobotLocationType object.""" + _content_types = [] + self.adapter.job.logger.info(f"Updating LocationType {self.name}") + _update_location_type = ORMLocationType.objects.get(id=self.uuid) + if "parent" in attrs: + try: + _parent = ORMLocationType.objects.get(name=attrs["parent"]) + _update_location_type.parent = _parent + except ORMLocationType.DoesNotExist: + self.adapter.job.logger.warning( + f'Parent LocationType {attrs["parent"]} does not exist, ensure it exists first.' + ) + if "nestable" in attrs: + _update_location_type.nestable = attrs["nestable"] + if "description" in attrs: + _update_location_type.description = attrs["description"] + if "content_types" in attrs: + for _model in attrs["content_types"]: + self.adapter.job.logger.debug(f"Looking up {_model} in content types.") + _content_types.append(lookup_content_type_for_taggable_model_path(_model)) + _update_location_type.content_types.set(_content_types) + if not check_sor_field(_update_location_type): + _update_location_type.custom_field_data.update( + {"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")} + ) + _update_location_type.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _update_location_type.validated_save() + return super().update(attrs) + + def delete(self): + """Delete LocationType in Nautobot from NautobotLocationType object.""" + try: + self.adapter.job.logger.debug(f"Attempting to delete LocationType: {self} - {self.uuid}") + _location_type = ORMLocationType.objects.get(id=self.uuid) + _location_type.delete() + super().delete() + return self + except ORMLocationType.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find LocationType {self.uuid} for deletion. {err}") + except ProtectedError as err: + self.adapter.job.logger.warning( + f"Unable to delete LocationType {self.name}, as it is referenced by another object. {err}" + ) + + +class NautobotLocation(Location): + """Nautobot implementation of Bootstrap Location model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create Location in Nautobot from NautobotLocation object.""" + adapter.job.logger.info(f'Creating Nautobot Location {ids["name"]}') + + try: + _parent = None + _tenant = None + _timezone = None + _tags = [] + _location_type = ORMLocationType.objects.get(name=ids["location_type"]) + _status = ORMStatus.objects.get(name=attrs["status"]) + if "parent" in attrs: + if attrs["parent"]: + _parent = ORMLocation.objects.get(name=attrs["parent"]) + if "tenant" in attrs: + if attrs["tenant"]: + _tenant = Tenant.objects.get(name=attrs["tenant"]) + if "time_zone" in attrs: + if attrs["time_zone"]: + _timezone = pytz.timezone(attrs["time_zone"]) + for _tag in attrs["tags"]: + _tags.append(ORMTag.get(name=_tag)) + _new_location = ORMLocation( + name=ids["name"], + location_type=_location_type, + parent=_parent if not None else None, + status=_status, + facility=attrs["facility"], + asn=attrs["asn"], + time_zone=_timezone, + description=attrs["description"], + tenant=_tenant, + physical_address=attrs["physical_address"], + shipping_address=attrs["shipping_address"], + latitude=attrs["latitude"], + longitude=attrs["longitude"], + contact_name=attrs["contact_name"], + contact_phone=attrs["contact_phone"], + contact_email=attrs["contact_email"], + tags=_tags, + ) + _new_location.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _new_location.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _new_location.validated_save() + except ORMStatus.DoesNotExist: + adapter.job.logger.warning(f'Status {attrs["status"]} could not be found. Make sure it exists.') + except ORMLocationType.DoesNotExist: + adapter.job.logger.warning( + f'LocationType {attrs["location_type"]} could not be found. Make sure it exists.' + ) + except ORMTenant.DoesNotExist: + adapter.job.logger.warning(f'Tenant {attrs["tenant"]} does not exist, verify it is created.') + except pytz.UnknownTimeZoneError: + adapter.job.logger.warning( + f'Timezone {attrs["time_zone"]} could not be found. Verify the timezone is a valid timezone.' + ) + except ORMLocation.DoesNotExist: + adapter.job.logger.warning(f'Parent Location {attrs["parent"]} does not exist, ensure it exists first.') + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Location in Nautobot from NautobotLocation object.""" + self.adapter.job.logger.info(f"Updating Location {self.name}.") + _parent = None + _tenant = None + _timezone = None + _location_type = ORMLocationType.objects.get(name=self.location_type) + _update_location = ORMLocation.objects.get(name=self.name, location_type=_location_type) + if "parent" in attrs: + if attrs["parent"]: + _parent = ORMLocation.objects.get(name=attrs["parent"]) + _update_location.parent = _parent + if "status" in attrs: + _status = ORMStatus.objects.get(name=attrs["status"]) + _update_location.status = _status + if "facility" in attrs: + _update_location.facility = attrs["facility"] + if "asn" in attrs: + _update_location.asn = attrs["location"] + if "time_zone" in attrs: + if attrs["time_zone"]: + _timezone = pytz.timezone(attrs["time_zone"]) + _update_location.time_zone = _timezone + if "description" in attrs: + _update_location.description = attrs["description"] + if "tenant" in attrs: + _tenant = Tenant.objects.get(name=attrs["tenant"]) + _update_location.tenant = _tenant + if "physical_address" in attrs: + _update_location.physical_address = attrs["physical_address"] + if "shipping_address" in attrs: + _update_location.shipping_address = attrs["shipping_address"] + if "latitude" in attrs: + _update_location.latitude = attrs["latitude"] + if "longitude" in attrs: + _update_location.longitude = attrs["longitude"] + if "contact_name" in attrs: + _update_location.contact_name = attrs["contact_name"] + if "contact_phone" in attrs: + _update_location.contact_name = attrs["contact_phone"] + if "contact_email" in attrs: + _update_location.contact_name = attrs["contact_email"] + if "tags" in attrs: + _tags = [] + for _tag in attrs["tags"]: + _tags.append(ORMTag.get(name=_tag)) + _update_location.tags.clear() + for _tag in _tags: + _update_location.tags.add(_tag) + if not check_sor_field(_update_location): + _update_location.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _update_location.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _update_location.validated_save() + return super().update(attrs) + + def delete(self): + """Delete Location in Nautobot from NautobotLocation object.""" + try: + self.adapter.job.logger.debug(f"Attempting to delete Location: {self} - {self.uuid}") + _location = ORMLocation.objects.get(id=self.uuid) + _location.delete() + super().delete() + return self + except ORMLocation.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find Location {self.uuid} for deletion. {err}") + except ProtectedError as err: + self.adapter.job.logger.warning( + f"Unable to delete Location {self.name}, as it is referenced by another object. {err}" + ) + + +class NautobotTeam(Team): + """Nautobot implementation of Team DiffSync model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create Team in Nautobot from NautobotTeam object.""" + adapter.job.logger.debug(f'Creating Nautobot Team {ids["name"]}') + _new_team = ORMTeam( + name=ids["name"], + phone=attrs["phone"], + email=attrs["email"], + address=attrs["address"], + ) + _new_team.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _new_team.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _new_team.validated_save() + # TODO: Need to consider how to allow loading from teams or contacts models. + # if "contacts" in attrs: + # # FIXME: There might be a better way to handle this that's easier on the database. + # _new_team.contacts.clear() + # for _contact in attrs["contacts"]: + # adapter.job.logger.debug(f'Looking up Contact: {_contact} for Team: {ids["name"]}.') + # _new_team.contact.add(lookup_contact_for_team(contact=_contact)) + # _new_team.validated_save() + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Team in Nautobot from NautobotTeam object.""" + _update_team = ORMTeam.objects.get(name=self.name) + self.adapter.job.logger.info(f"Updating Team {self.name}") + if "phone" in attrs: + _update_team.phone = attrs["phone"] + if "email" in attrs: + _update_team.email = attrs["email"] + if "address" in attrs: + _update_team.address = attrs["address"] + # TODO: Need to consider how to allow loading from teams or contacts models. + # if "contacts" in attrs: + # # FIXME: There might be a better way to handle this that's easier on the database. + # _update_team.contacts.clear() + # for _contact in attrs["contacts"]: + # self.adapter.job.logger.debug(f"Looking up Contact: {_contact} for Team: {self.name}.") + # _update_team.contacts.add(lookup_contact_for_team(contact=_contact)) + if not check_sor_field(_update_team): + _update_team.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _update_team.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _update_team.validated_save() + return super().update(attrs) + + def delete(self): + """Delete Team in Nautobot from NautobotTeam object.""" + try: + self.adapter.job.logger.debug(f"Attempting to delete Team: {self} - {self.uuid}") + _team = ORMTeam.objects.get(id=self.uuid) + _team.delete() + super().delete() + return self + except ORMTeam.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find Team {self.uuid} for deletion. {err}") + + +class NautobotContact(Contact): + """Nautobot implementation of Contact DiffSync model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create Contact in Nautobot from NautobotContact object.""" + adapter.job.logger.debug(f'Creating Nautobot Contact {ids["name"]}') + _new_contact = ORMContact( + name=ids["name"], + phone=attrs["phone"], + email=attrs["email"], + address=attrs["address"], + ) + _new_contact.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _new_contact.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _new_contact.validated_save() + if "teams" in attrs: + for _team in attrs["teams"]: + adapter.job.logger.debug(f'Looking up Team: {_team} for Contact: {ids["name"]}.') + _new_contact.teams.add(lookup_team_for_contact(team=_team)) + _new_contact.validated_save() + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Contact in Nautobot from NautobotContact object.""" + _update_contact = ORMContact.objects.get(name=self.name) + self.adapter.job.logger.info(f"Updating Contact {self.name}") + if "phone" in attrs: + _update_contact.phone = attrs["phone"] + if "email" in attrs: + _update_contact.email = attrs["email"] + if "address" in attrs: + _update_contact.address = attrs["address"] + if "teams" in attrs: + # FIXME: There might be a better way to handle this that's easier on the database. + _update_contact.teams.clear() + for _team in attrs["teams"]: + self.adapter.job.logger.debug(f"Looking up Team: {_team} for Contact: {self.name}.") + _update_contact.teams.add(lookup_team_for_contact(team=_team)) + if not check_sor_field(_update_contact): + _update_contact.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _update_contact.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _update_contact.validated_save() + return super().update(attrs) + + def delete(self): + """Delete Contact in Nautobot from NautobotContact object.""" + try: + self.adapter.job.logger.debug(f"Attempting to delete Team: {self} - {self.uuid}") + _contact = ORMContact.objects.get(id=self.uuid) + _contact.delete() + super().delete() + return self + except ORMTenant.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find Contact {self.uuid} for deletion. {err}") + + +class NautobotProvider(Provider): + """Nautobot implementation of Provider DiffSync model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create Provider in Nautobot from NautobotProvider object.""" + adapter.job.logger.info(f'Creating Nautobot Provider: {ids["name"]}') + if "tags" in attrs: + _tags = [] + for _tag in attrs["tags"]: + _tags.append(ORMTag.get(name=_tag)) + _new_provider = ORMProvider( + name=ids["name"], + asn=attrs["asn"], + account=attrs["account_number"], + portal_url=attrs["portal_url"], + noc_contact=attrs["noc_contact"], + admin_contact=attrs["admin_contact"], + ) + for _tag in attrs["tags"]: + try: + _new_provider.tags.add(ORMTag.objects.get(name=_tag)) + except ORMTag.DoesNotExist: + adapter.job.logger.warning(f"Tag {_tag} does not exist in Nautobot.") + _new_provider.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _new_provider.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _new_provider.validated_save() + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Provider in Nautobot from NautobotProvider object.""" + self.adapter.job.logger.debug(f"Updating Nautobot Provider {self.name}") + _update_provider = ORMProvider.objects.get(id=self.uuid) + if "asn" in attrs: + _update_provider.asn = attrs["asn"] + if "account_number" in attrs: + _update_provider.account = attrs["account_number"] + if "portal_url" in attrs: + _update_provider.portal_url = attrs["portal_url"] + if "noc_contact" in attrs: + _update_provider.noc_contact = attrs["noc_contact"] + if "admin_contact" in attrs: + _update_provider.admin_contact = attrs["admin_contact"] + if "tags" in attrs: + # FIXME: There might be a better way to handle this that's easier on the database. + _update_provider.tags.clear() + for _tag in attrs["tags"]: + try: + _update_provider.tags.add(ORMTag.objects.get(name=_tag)) + except ORMTag.DoesNotExist: + self.adapter.job.logger.warning(f"Tag {_tag} does not exist in Nautobot.") + if not check_sor_field(_update_provider): + _update_provider.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _update_provider.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _update_provider.validated_save() + return super().update(attrs) + + def delete(self): + """Delete Provider in Nautobot from NautobotProvider object.""" + try: + self.adapter.job.logger.debug(f"Attempting to delete Provider: {self} - {self.uuid}") + _nb_provider = ORMProvider.objects.get(id=self.uuid) + _nb_provider.delete() + super().delete() + return self + except ORMProvider.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find Provider {self.uuid} for deletion. {err}") + + +class NautobotProviderNetwork(ProviderNetwork): + """Nautobot implementation of ProviderNetwork DiffSync model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create ProviderNetwork in Nautobot from NautobotProviderNetwork object.""" + adapter.job.logger.info(f'Creating Nautobot ProviderNetwork: {ids["name"]}') + if "tags" in attrs: + _tags = [] + for _tag in attrs["tags"]: + _tags.append(ORMTag.get(name=_tag)) + _new_provider_network = ORMProviderNetwork( + name=ids["name"], + provider=ORMProvider.objects.get(name=ids["provider"]), + description=attrs["description"], + comments=attrs["comments"], + ) + for _tag in attrs["tags"]: + try: + _new_provider_network.tags.add(ORMTag.objects.get(name=_tag)) + except ORMTag.DoesNotExist: + adapter.job.logger.warning(f"Tag {_tag} does not exist in Nautobot.") + _new_provider_network.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _new_provider_network.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _new_provider_network.validated_save() + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update ProviderNetwork in Nautobot from NautobotProviderNetwork object.""" + self.adapter.job.logger.debug(f"Updating Nautobot ProviderNetwork {self.name}") + _update_provider_network = ORMProviderNetwork.objects.get(id=self.uuid) + if "description" in attrs: + _update_provider_network.description = attrs["description"] + if "comments" in attrs: + _update_provider_network.comments = attrs["comments"] + if "tags" in attrs: + # FIXME: There might be a better way to handle this that's easier on the database. + _update_provider_network.tags.clear() + for _tag in attrs["tags"]: + try: + _update_provider_network.tags.add(ORMTag.objects.get(name=_tag)) + except ORMTag.DoesNotExist: + self.adapter.job.logger.warning(f"Tag {_tag} does not exist in Nautobot.") + _update_provider_network.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + if not check_sor_field(_update_provider_network): + _update_provider_network.custom_field_data.update( + {"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")} + ) + _update_provider_network.validated_save() + return super().update(attrs) + + def delete(self): + """Delete ProviderNetwork in Nautobot from NautobotProviderNetwork object.""" + try: + self.adapter.job.logger.debug(f"Attempting to delete ProviderNetwork: {self} - {self.uuid}") + _nb_provider_network = ORMProviderNetwork.objects.get(id=self.uuid) + _nb_provider_network.delete() + super().delete() + return self + except ORMProviderNetwork.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find ProviderNetwork {self.uuid} for deletion. {err}") + + +class NautobotCircuitType(CircuitType): + """Nautobot implementation of CircuitType DiffSync model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create CircuitType in Nautobot from NautobotCircuitType object.""" + adapter.job.logger.info(f'Creating Nautobot CircuitType: {ids["name"]}') + _new_circuit_type = ORMCircuitType( + name=ids["name"], + description=attrs["description"], + ) + _new_circuit_type.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _new_circuit_type.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _new_circuit_type.validated_save() + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update CircuitType in Nautobot from NautobotCircuitType object.""" + self.adapter.job.logger.debug(f"Updating Nautobot CircuitType {self.name}") + _update_circuit_type = ORMCircuitType.objects.get(id=self.uuid) + if "description" in attrs: + _update_circuit_type.description = attrs["description"] + _update_circuit_type.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + if not check_sor_field(_update_circuit_type): + _update_circuit_type.custom_field_data.update( + {"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")} + ) + _update_circuit_type.validated_save() + return super().update(attrs) + + def delete(self): + """Delete CircuitType in Nautobot from NautobotCircuitType object.""" + try: + self.adapter.job.logger.debug(f"Attempting to delete Circuittype: {self} - {self.uuid}") + _nb_circuit_type = ORMCircuitType.objects.get(id=self.uuid) + _nb_circuit_type.delete() + super().delete() + return self + except ORMCircuitType.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find CircuitType {self.uuid} for deletion. {err}") + + +class NautobotCircuit(Circuit): + """Nautobot implementation of Circuit DiffSync model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create Circuit in Nautobot from NautobotCircuit object.""" + adapter.job.logger.info(f'Creating Nautobot Circuit: {ids["circuit_id"]}') + if "tags" in attrs: + _tags = [] + for _tag in attrs["tags"]: + _tags.append(ORMTag.get(name=_tag)) + _provider = ORMProvider.objects.get(name=ids["provider"]) + _circuit_type = ORMCircuitType.objects.get(name=attrs["circuit_type"]) + _status = ORMStatus.objects.get(name=attrs["status"]) + _tenant = None + if "tenant" in attrs: + if attrs["tenant"] is not None: + _tenant = ORMTenant.objects.get(name=attrs["tenant"]) + _new_circuit = ORMCircuit( + cid=ids["circuit_id"], + provider=_provider, + circuit_type=_circuit_type, + status=_status, + install_date=(attrs["date_installed"] if attrs["date_installed"] is not None else None), + commit_rate=attrs["commit_rate_kbps"], + description=attrs["description"], + tenant=_tenant, + ) + for _tag in attrs["tags"]: + try: + _new_circuit.tags.add(ORMTag.objects.get(name=_tag)) + except ORMTag.DoesNotExist: + adapter.job.logger.warning(f"Tag {_tag} does not exist in Nautobot.") + _new_circuit.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _new_circuit.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _new_circuit.validated_save() + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Circuit in Nautobot from NautobotCircuit object.""" + self.adapter.job.logger.debug(f"Updating Nautobot Circuit {self.circuit_id}") + _update_circuit = ORMCircuit.objects.get(id=self.uuid) + if "circuit_type" in attrs: + _circuit_type = ORMCircuitType.objects.get(name=attrs["circuit_type"]) + _update_circuit.circuit_type = _circuit_type + if "status" in attrs: + _status = ORMStatus.objects.get(name=attrs["status"]) + _update_circuit.status = _status + if "date_installed" in attrs: + _update_circuit.install_date = attrs["date_installed"] + if "commit_rate_kbps" in attrs: + _update_circuit.commit_rate = attrs["commit_rate_kbps"] + if "description" in attrs: + _update_circuit.description = attrs["description"] + if "tenant" in attrs: + _tenant = ORMTenant.objects.get(name=attrs["tenant"]) + _update_circuit.tenant = _tenant + if "tags" in attrs: + # FIXME: There might be a better way to handle this that's easier on the database. + _update_circuit.tags.clear() + for _tag in attrs["tags"]: + try: + _update_circuit.tags.add(ORMTag.objects.get(name=_tag)) + except ORMTag.DoesNotExist: + self.adapter.job.logger.warning(f"Tag {_tag} does not exist in Nautobot.") + if "terminations" in attrs: + # TODO: Implement circuit terminations + pass + if not check_sor_field(_update_circuit): + _update_circuit.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _update_circuit.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _update_circuit.validated_save() + return super().update(attrs) + + def delete(self): + """Delete Circuit in Nautobot from NautobotCircuit object.""" + try: + self.adapter.job.logger.debug(f"Attempting to delete Circuit: {self} - {self.uuid}") + _circuit = ORMCircuit.objects.get(id=self.uuid) + _circuit.delete() + super().delete() + return self + except ORMProvider.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find Circuit {self.uuid} for deletion. {err}") + + +class NautobotCircuitTermination(CircuitTermination): + """Nautobot implementation of CircuitTermination DiffSync model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create CircuitTermination in Nautobot from NautobotCircuitTermination object.""" + adapter.job.logger.info(f'Creating Nautobot CircuitTermination {ids["name"]}') + _name_parts = ids["name"].split("__", 2) + _circuit_id = _name_parts[0] + _provider_name = _name_parts[1] + _term_side = _name_parts[2] + try: + _provider = ORMProvider.objects.get(name=_provider_name) + except ORMProvider.DoesNotExist: + adapter.job.logger.warning(f"Provider {_provider_name} does not exist in Nautobot. Please create it.") + try: + _circuit = ORMCircuit.objects.get(cid=_circuit_id, provider=_provider) + except ORMCircuit.DoesNotExist: + adapter.job.logger.warning(f"Circuit {_circuit_id} does not exist in Nautobot. Please create it.") + if "tags" in attrs: + _tags = [] + for _tag in attrs["tags"]: + _tags.append(ORMTag.get(name=_tag)) + if attrs["termination_type"] == "Provider Network": + try: + _provider_network = ORMProviderNetwork.objects.get(name=attrs["provider_network"]) + except ORMProviderNetwork.DoesNotExist: + adapter.job.logger.warning( + f'ProviderNetwork {attrs["provider_network"]} does not exist in Nautobot. Please create it.' + ) + _new_circuit_termination = ORMCircuitTermination( + provider_network=_provider_network, + circuit=_circuit, + term_side=_term_side, + xconnect_id=attrs["cross_connect_id"], + pp_info=attrs["patch_panel_or_ports"], + description=attrs["description"], + upstream_speed=attrs["upstream_speed_kbps"], + port_speed=attrs["port_speed_kbps"], + ) + if attrs["termination_type"] == "Location": + try: + _location = ORMLocation.objects.get(name=attrs["location"]) + except ORMLocation.DoesNotExist: + adapter.job.logger.warning( + f'Location {attrs["location"]} does not exist in Nautobot. Please create it.' + ) + _new_circuit_termination = ORMCircuitTermination( + location=_location, + circuit=_circuit, + term_side=_term_side, + xconnect_id=attrs["cross_connect_id"], + pp_info=attrs["patch_panel_or_ports"], + description=attrs["description"], + upstream_speed=attrs["upstream_speed_kbps"], + port_speed=attrs["port_speed_kbps"], + ) + for _tag in _tags: + try: + _new_circuit_termination.tags.add(ORMTag.objects.get(name=_tag)) + except ORMTag.DoesNotExist: + adapter.job.logger.warning(f"Tag {_tag} does not exist in Nautobot.") + _new_circuit_termination.custom_field_data.update( + {"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")} + ) + _new_circuit_termination.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _new_circuit_termination.validated_save() + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update CircuitTermination in Nautobot from NautobotCircuitTermination object.""" + self.adapter.job.logger.debug(f"Updating Nautobot CircuitTermination {self.name}") + _update_circuit_termination = ORMCircuitTermination.objects.get(id=self.uuid) + if "location" in attrs: + try: + _location = ORMLocation.objects.get(name=attrs["location"]) + if _update_circuit_termination.provider_network: + _update_circuit_termination.provider_network = None + _update_circuit_termination.location = _location + except ORMLocation.DoesNotExist: + self.adapter.job.logger.warning( + f'Location {attrs["location"]} does not exist in Nautobot. Please create it.' + ) + if "provider_network" in attrs: + try: + _provider_network = ORMProviderNetwork.objects.get(name=attrs["provider_network"]) + if _update_circuit_termination.location: + _update_circuit_termination.location = None + _update_circuit_termination.provider_network = _provider_network + except ORMProviderNetwork.DoesNotExist: + self.adapter.job.logger.warning( + f'ProviderNetwork {attrs["provider_network"]} does not exist in Nautobot. Please create it.' + ) + if "port_speed_kbps" in attrs: + _update_circuit_termination.port_speed = attrs["port_speed_kbps"] + if "upstream_speed_kbps" in attrs: + _update_circuit_termination.upstream_speed = attrs["upstream_speed_kbps"] + if "cross_connect_id" in attrs: + _update_circuit_termination.xconnect_id = attrs["xconnect_id"] + if "patch_panel_or_ports" in attrs: + _update_circuit_termination.pp_info = attrs["pp_info"] + if "description" in attrs: + _update_circuit_termination.description = attrs["description"] + if "tags" in attrs: + # FIXME: There might be a better way to handle this that's easier on the database. + _update_circuit_termination.tags.clear() + for _tag in attrs["tags"]: + try: + _update_circuit_termination.tags.add(ORMTag.objects.get(name=_tag)) + except ORMTag.DoesNotExist: + self.adapter.job.logger.warning(f"Tag {_tag} does not exist in Nautobot.") + _update_circuit_termination.custom_field_data.update( + {"last_synced_from_sor": datetime.today().date().isoformat()} + ) + if not check_sor_field(_update_circuit_termination): + _update_circuit_termination.custom_field_data.update( + {"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")} + ) + _update_circuit_termination.validated_save() + return super().update(attrs) + + def delete(self): + """Delete CircuitTermination in Nautobot from NautobotCircuitTermination object.""" + try: + self.adapter.job.logger.debug(f"Attempting to delete CircuitTermination: {self} - {self.uuid}") + _nb_circuit_termination = ORMCircuitTermination.objects.get(id=self.uuid) + _nb_circuit_termination.delete() + super().delete() + return self + except ORMCircuitTermination.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find ProviderNetwork {self.uuid} for deletion. {err}") + + +class NautobotNamespace(Namespace): + """Nautobot implementation of Nautobot Namespace model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create Namespace in Nautobot from NautobotNamespace object.""" + adapter.job.logger.info(f'Creating Nautobot Namespace {ids["name"]}') + new_namespace = ORMNamespace( + name=ids["name"], + description=attrs["description"], + ) + new_namespace.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + new_namespace.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + new_namespace.validated_save() + if "location" in attrs: + try: + _location = ORMLocation.objects.get(name=attrs["location"]) + new_namespace.location = _location + new_namespace.validated_save() + except ORMLocation.DoesNotExist: + adapter.job.logger.warning( + f'Nautobot Location {attrs["location"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Namespace in Nautobot from NautobotNamespace object.""" + self.adapter.job.logger.debug(f"Updating Nautobot Namespace {self.name}.") + _update_namespace = ORMNamespace.objects.get(id=self.uuid) + if "description" in attrs: + _update_namespace.description = attrs["description"] + if "location" in attrs: + try: + _location = ORMLocation.objects.get(name=attrs["location"]) + _update_namespace.location = _location + except ORMLocation.DoesNotExist: + self.adapter.job.logger.warning( + f'Nautobot Location {attrs["location"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + if not check_sor_field(_update_namespace): + _update_namespace.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _update_namespace.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _update_namespace.validated_save() + + return super().update(attrs) + + def delete(self): + """Delete Namespace in Nautobot from NautobotNamespace object.""" + self.adapter.job.logger.debug(f"Delete Nautobot Namespace {self.uuid}") + try: + _namespace = ORMNamespace.objects.get(id=self.uuid) + super().delete() + _namespace.delete() + return self + except ORMNamespace.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find Namespace {self.uuid} for deletion. {err}") + except ProtectedError as err: + self.adapter.job.logger.warning( + f"Unable to delete Namespace {self.name} due to existing references. Error: {err}." + ) + + +class NautobotRiR(RiR): + """Nautobot implementation of Nautobot RiR model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create RiR in Nautobot from NautobotRiR object.""" + adapter.job.logger.info(f'Creating Nautobot RiR: {ids["name"]}') + new_rir = ORMRiR( + name=ids["name"], + is_private=attrs["private"], + description=attrs["description"], + ) + new_rir.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + new_rir.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + new_rir.validated_save() + + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update RiR in Nautobot from NautobotRiR object.""" + self.adapter.job.logger.info(f"Updating Nautobot RiR {self.name}") + _update_rir = ORMRiR.objects.get(id=self.uuid) + if "private" in attrs: + _update_rir.is_private = attrs["private"] + if "description" in attrs: + _update_rir.description = attrs["description"] + if not check_sor_field(_update_rir): + _update_rir.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _update_rir.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _update_rir.validated_save() + + return super().update(attrs) + + def delete(self): + """Delete RiR in Nautobot from NautobotRiR object.""" + self.adapter.job.logger.debug(f"Delete Nautobot Namespace {self.uuid}") + try: + _rir = ORMRiR.objects.get(id=self.uuid) + super().delete() + _rir.delete() + return self + except ORMRiR.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find RiR {self.uuid} for deletion. {err}") + except ProtectedError as err: + self.adapter.job.logger.warning( + f"Unable to delete RiR {self.name} due to existing references. Error: {err}." + ) + + +class NautobotVLANGroup(VLANGroup): + """Nautobot implementation of Nautobot VLANGroup model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create VLANGroup in Nautobot from NautobotVLANGroup object.""" + adapter.job.logger.info(f'Creating Nautobot VLANGroup: {ids["name"]}') + try: + _location = ORMLocation.objects.get(name=attrs["location"]) + except ORMLocation.DoesNotExist: + _location = None + adapter.job.logger.warning( + f'Nautobot Location {attrs["location"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + new_vlan_group = ORMVLANGroup( + name=ids["name"], + location=_location, + description=attrs["description"], + ) + new_vlan_group.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + new_vlan_group.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + new_vlan_group.validated_save() + + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update VLANGroup in Nautobot from NautobotVLANGroup object.""" + self.adapter.job.logger.info(f"Updating Nautobot VLANGroup {self.name}") + _update_vlan_group = ORMVLANGroup.objects.get(id=self.uuid) + if "location" in attrs: + try: + _location = ORMLocation.objects.get(name=attrs["location"]) + except ORMLocation.DoesNotExist: + _location = None + self.adapter.job.logger.warning( + f'Nautobot Location {attrs["location"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + _update_vlan_group.location = _location + if "description" in attrs: + _update_vlan_group.description = attrs["description"] + if not check_sor_field(_update_vlan_group): + _update_vlan_group.custom_field_data.update( + {"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")} + ) + _update_vlan_group.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _update_vlan_group.validated_save() + + return super().update(attrs) + + def delete(self): + """Delete VLANGroup in Nautobot from NautobotVLANGroup object.""" + self.adapter.job.logger.debug(f"Delete Nautobot VLANGroup {self.uuid}") + try: + _vlan_group = ORMVLANGroup.objects.get(id=self.uuid) + super().delete() + _vlan_group.delete() + return self + except ORMRiR.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find VLANGroup {self.uuid} for deletion. {err}") + except ProtectedError as err: + self.adapter.job.logger.warning( + f"Unable to delete VLANGroup {self.name} due to existing references. Error: {err}." + ) + return self + + +class NautobotVLAN(VLAN): + """Nautobot implementation of Nautobot VLAN model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create VLAN in Nautobot from NautobotVLAN object.""" + adapter.job.logger.info(f'Creating Nautobot VLAN: {ids["name"]}') + try: + _vlan_group = ORMVLANGroup.objects.get(name=ids["vlan_group"]) + except ORMVLANGroup.DoesNotExist: + _vlan_group = None + if ids["vlan_group"]: + adapter.job.logger.warning( + f'Nautobot VLANGroup {ids["vlan_group"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + try: + _status = ORMStatus.objects.get(name=attrs["status"]) + except ORMStatus.DoesNotExist: + _status = ORMStatus.objects.get(name="Active") + adapter.job.logger.warning( + f'Nautobot Status {attrs["status"]} does not exist. Make sure it is created manually or defined in global_settings.yaml. Defaulting to Status Active.' + ) + try: + _role = ORMRole.objects.get(name=attrs["role"]) + except ORMRole.DoesNotExist: + _role = None + if attrs["role"]: + adapter.job.logger.warning( + f'Nautobot Role {attrs["role"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + try: + _tenant = ORMTenant.objects.get(name=attrs["tenant"]) + except ORMTenant.DoesNotExist: + _tenant = None + if attrs["tenant"]: + adapter.job.logger.warning( + f'Nautobot Tenant {attrs["tenant"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + try: + if "tags" in attrs: + _tags = [] + for tag in attrs["tags"]: + _tags.append(ORMTag.objects.get(name=tag)) + except ORMTag.DoesNotExist: + adapter.job.logger.warning( + f'Nautobot Tag {attrs["tags"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + new_vlan = ORMVLAN( + name=ids["name"], + vid=ids["vid"], + vlan_group=_vlan_group, + status=_status, + role=_role, + tenant=_tenant, + description=attrs["description"], + ) + if attrs.get("tags"): + new_vlan.validated_save() + new_vlan.tags.clear() + for _tag in attrs["tags"]: + new_vlan.tags.add(ORMTag.objects.get(name=_tag)) + new_vlan.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + new_vlan.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + new_vlan.validated_save() + try: + if "locations" in attrs: + _locations = [] + for _location in attrs["locations"]: + _locations.append(ORMLocation.objects.get(name=_location)) + except ORMLocation.DoesNotExist: + _location = None + adapter.job.logger.warning( + f'Nautobot Location {attrs["location"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + if _locations: + for _location in _locations: + new_vlan.locations.add(_location) + + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update VLAN in Nautobot from NautobotVLAN object.""" + self.adapter.job.logger.info(f"Updating Nautobot VLAN: {self.name}__{self.vid}") + _update_vlan = ORMVLAN.objects.get(id=self.uuid) + if "description" in attrs: + _update_vlan.description = attrs["description"] + if "status" in attrs: + try: + _status = ORMStatus.objects.get(name=attrs["status"]) + except ORMStatus.DoesNotExist: + _status = ORMStatus.objects.get(name="Active") + self.adapter.job.logger.warning( + f'Nautobot Status {attrs["status"]} does not exist. Make sure it is created manually or defined in global_settings.yaml. Defaulting to Status Active.' + ) + _update_vlan.status = _status + if "role" in attrs: + try: + _role = ORMRole.objects.get(name=attrs["role"]) + except ORMRole.DoesNotExist: + _role = None + if attrs["role"]: + self.adapter.job.logger.warning( + f'Nautobot Role {attrs["role"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + _update_vlan.role = _role + if "tenant" in attrs: + try: + _tenant = ORMTenant.objects.get(name=attrs["tenant"]) + except ORMTenant.DoesNotExist: + _tenant = None + if attrs["tenant"]: + self.adapter.job.logger.warning( + f'Nautobot Tenant {attrs["tenant"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + _update_vlan.tenant = _tenant + if "tags" in attrs: + try: + if "tags" in attrs: + _tags = [] + for tag in attrs["tags"]: + _tags.append(ORMTag.objects.get(name=tag)) + except ORMTag.DoesNotExist: + self.adapter.job.logger.warning( + f'Nautobot Tag {attrs["tags"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + if attrs.get("tags"): + _update_vlan.validated_save() + # TODO: Probably a better way to handle this that's easier on the database. + _update_vlan.tags.clear() + for _tag in attrs["tags"]: + _update_vlan.tags.add(ORMTag.objects.get(name=_tag)) + if "locations" in attrs: + # TODO: Probably a better way to handle this that's easier on the database. + _update_vlan.locations.clear() + try: + _locations = [] + for _location in attrs["locations"]: + _locations.append(ORMLocation.objects.get(name=_location)) + except ORMLocation.DoesNotExist: + _location = None + self.adapter.job.logger.warning( + f'Nautobot Location {attrs["location"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + if _locations: + for _location in _locations: + _update_vlan.locations.add(_location) + if not check_sor_field(_update_vlan): + _update_vlan.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _update_vlan.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _update_vlan.validated_save() + return super().update(attrs) + + def delete(self): + """Delete VLAN in Nautobot from NautobotVLAN object.""" + self.adapter.job.logger.debug(f"Delete Nautobot VLAN {self.uuid}") + try: + _vlan = ORMVLAN.objects.get(id=self.uuid) + super().delete() + _vlan.delete() + return self + except ORMVLAN.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find VLAN {self.uuid} for deletion. {err}") + except ProtectedError as err: + self.adapter.job.logger.warning( + f"Unable to delete VLAN {self.name} due to existing references. Error: {err}." + ) + + +class NautobotVRF(VRF): + """Nautobot implementation of Nautobot VRF model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create VRF in Nautobot from NautobotVRF object.""" + adapter.job.logger.info(f'Creating Nautobot VRF: {ids["name"]}') + try: + _tenant = ORMTenant.objects.get(name=attrs["tenant"]) + except ORMTenant.DoesNotExist: + _tenant = None + if attrs["tenant"]: + adapter.job.logger.warning( + f'Nautobot Tenant {attrs["tenant"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + try: + _namespace = ORMNamespace.objects.get(name=ids["namespace"]) + except ORMNamespace.DoesNotExist: + _namespace = ORMNamespace.objects.get(name="Global") + adapter.job.logger.warning( + f'Nautobot Namespace {ids["namespace"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + new_vrf = ORMVRF( + name=ids["name"], + namespace=_namespace, + rd=attrs["route_distinguisher"], + tenant=_tenant, + description=attrs["description"], + ) + if attrs.get("tags"): + new_vrf.validated_save() + new_vrf.tags.clear() + for _tag in attrs["tags"]: + new_vrf.tags.add(ORMTag.objects.get(name=_tag)) + new_vrf.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + new_vrf.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + new_vrf.validated_save() + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update VRF in Nautobot from NautobotVRF object.""" + self.adapter.job.logger.info(f"Creating Nautobot VRF: {self.name}") + _update_vrf = ORMVRF.objects.get(id=self.uuid) + if "tenant" in attrs: + try: + _tenant = ORMTenant.objects.get(name=attrs["tenant"]) + except ORMTenant.DoesNotExist: + _tenant = None + if attrs["tenant"]: + self.adapter.job.logger.warning( + f'Nautobot Tenant {attrs["tenant"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + _update_vrf.tenant = _tenant + if "description" in attrs: + _update_vrf.description = attrs["description"] + if "route_distinguisher" in attrs: + _update_vrf.rd = attrs["route_distinguisher"] + if attrs.get("tags"): + _update_vrf.tags.clear() + for _tag in attrs["tags"]: + _update_vrf.tags.add(ORMTag.objects.get(name=_tag)) + if not check_sor_field(_update_vrf): + _update_vrf.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _update_vrf.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _update_vrf.validated_save() + return super().update(attrs) + + def delete(self): + """Delete VRF in Nautobot from NautobotVRF object.""" + self.adapter.job.logger.debug(f"Delete Nautobot VRF {self.uuid}") + try: + _vrf = ORMVRF.objects.get(id=self.uuid) + super().delete() + _vrf.delete() + return self + except ORMVRF.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find VRF {self.uuid} for deletion. {err}") + except ProtectedError as err: + self.adapter.job.logger.warning( + f"Unable to delete VRF {self.name} due to existing references. Error: {err}." + ) + + +class NautobotPrefix(Prefix): + """Nautobot implementation of Nautobot Prefix model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create Prefix in Nautobot from NautobotPrefix object.""" + adapter.job.logger.info(f'Creating Nautobot Prefix: {ids["network"]} in Namespace: {ids["namespace"]}') + try: + _namespace = ORMNamespace.objects.get(name=ids["namespace"]) + except ORMNamespace.DoesNotExist: + _namespace = ORMNamespace.objects.get(name="Global") + adapter.job.logger.warning( + f'Nautobot Namespace {ids["namespace"]} does not exist. Defaulting to Global Namespace.' + ) + try: + if attrs["vlan"]: + _vlan_name, _vlan_id, _vlan_group_name = attrs["vlan"].split("__", 2) + _vlan_group = ORMVLANGroup.objects.get(name=_vlan_group_name) + _vlan = ORMVLAN.objects.get( + name=_vlan_name, + vid=_vlan_id, + vlan_group=_vlan_group if _vlan_group != "None" else None, + ) + else: + _vlan = None + except ORMVLANGroup.DoesNotExist: + _vlan = None + if attrs["vlan"]: + adapter.job.logger.warning( + f'Nautobot VLANGroup {attrs["vlan"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + except ORMVLAN.DoesNotExist: + _vlan = None + if attrs["vlan"]: + adapter.job.logger.warning( + f'Nautobot VLAN {attrs["vlan"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + try: + _status = ORMStatus.objects.get(name=attrs["status"]) + except ORMStatus.DoesNotExist: + _status = ORMStatus.objects.get(name="Active") + adapter.job.logger.warning( + f'Nautobot Status {attrs["status"]} does not exist. Make sure it is created manually or defined in global_settings.yaml. Defaulting to Status Active.' + ) + try: + _role = ORMRole.objects.get(name=attrs["role"]) + except ORMRole.DoesNotExist: + _role = None + if attrs["role"]: + adapter.job.logger.warning( + f'Nautobot Role {attrs["role"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + try: + _tenant = ORMTenant.objects.get(name=attrs["tenant"]) + except ORMTenant.DoesNotExist: + _tenant = None + if attrs["tenant"]: + adapter.job.logger.warning( + f'Nautobot Tenant {attrs["tenant"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + try: + _rir = ORMRiR.objects.get(name=attrs["rir"]) + except ORMRiR.DoesNotExist: + _rir = None + if attrs["rir"]: + adapter.job.logger.warning( + f'Nautobot RiR {attrs["rir"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + try: + if "tags" in attrs: + _tags = [] + for tag in attrs["tags"]: + _tags.append(ORMTag.objects.get(name=tag)) + except ORMTag.DoesNotExist: + adapter.job.logger.warning( + f'Nautobot Tag {attrs["tags"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + new_prefix = ORMPrefix( + network=ids["network"].split("/")[0], + prefix_length=ids["network"].split("/")[1], + namespace=_namespace, + type=attrs["prefix_type"] if attrs["prefix_type"] else "Network", + status=_status, + role=_role, + rir=_rir, + tenant=_tenant, + date_allocated=attrs["date_allocated"], + description=attrs["description"], + vlan=_vlan, + ) + if attrs.get("tags"): + new_prefix.validated_save() + new_prefix.tags.clear() + for _tag in attrs["tags"]: + new_prefix.tags.add(ORMTag.objects.get(name=_tag)) + new_prefix.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + new_prefix.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + new_prefix.validated_save() + try: + if "locations" in attrs: + _locations = [] + if attrs["locations"]: + for _location in attrs["locations"]: + _locations.append(ORMLocation.objects.get(name=_location)) + except ORMLocation.DoesNotExist: + _location = None + adapter.job.logger.warning( + f'Nautobot Location {attrs["locations"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + if _locations: + for _location in _locations: + new_prefix.locations.add(_location) + try: + if "vrfs" in attrs: + _vrfs = [] + if attrs["vrfs"]: + for _vrf in attrs["vrfs"]: + _vrf_name, _vrf_namespace = _vrf.split("__") + _namespace = ORMNamespace.objects.get(name=_vrf_namespace) + _vrfs.append(ORMVRF.objects.get(name=_vrf_name, namespace=_namespace)) + if _vrfs: + for _vrf in _vrfs: + adapter.job.logger.debug(f"Assigning VRF {_vrf} to Prefix {new_prefix}") + new_prefix.vrfs.add(_vrf) + except ORMNamespace.DoesNotExist: + _vrf = None + if attrs["vrfs"]: + adapter.job.logger.warning( + f'Nautobot Namespace {attrs["vrfs"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + except ORMVRF.DoesNotExist: + _vrf = None + adapter.job.logger.warning( + f'Nautobot VRF {attrs["vrfs"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Prefix in Nautobot from NautobotPrefix object.""" + self.adapter.job.logger.info(f"Creating Nautobot Prefix: {self.network} in Namespace: {self.namespace}") + _update_prefix = ORMPrefix.objects.get(id=self.uuid) + if "prefix_type" in attrs: + _update_prefix.prefix_type = attrs["prefix_type"] + if "vlan" in attrs: + try: + if attrs["vlan"]: + _vlan_name, _vlan_id, _vlan_group_name = attrs["vlan"].split("__", 2) + _vlan_group = ORMVLANGroup.objects.get(name=_vlan_group_name) + _vlan = ORMVLAN.objects.get( + name=_vlan_name, + vid=_vlan_id, + vlan_group=_vlan_group if _vlan_group != "None" else None, + ) + else: + _vlan = None + except ORMVLANGroup.DoesNotExist: + _vlan = None + if attrs["vlan"]: + self.adapter.job.logger.warning( + f'Nautobot VLANGroup {attrs["vlan"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + except ORMVLAN.DoesNotExist: + _vlan = None + if attrs["vlan"]: + self.adapter.job.logger.warning( + f'Nautobot VLAN {attrs["vlan"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + _update_prefix.vlan = _vlan + if "status" in attrs: + try: + _status = ORMStatus.objects.get(name=attrs["status"]) + except ORMStatus.DoesNotExist: + _status = ORMStatus.objects.get(name="Active") + self.adapter.job.logger.warning( + f'Nautobot Status {attrs["status"]} does not exist. Make sure it is created manually or defined in global_settings.yaml. Defaulting to Status Active.' + ) + _update_prefix.status = _status + if "role" in attrs: + try: + _role = ORMRole.objects.get(name=attrs["role"]) + except ORMRole.DoesNotExist: + _role = None + if attrs["role"]: + self.adapter.job.logger.warning( + f'Nautobot Role {attrs["role"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + _update_prefix.role = _role + if "tenant" in attrs: + try: + _tenant = ORMTenant.objects.get(name=attrs["tenant"]) + except ORMTenant.DoesNotExist: + _tenant = None + if attrs["tenant"]: + self.adapter.job.logger.warning( + f'Nautobot Tenant {attrs["tenant"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + _update_prefix.tenant = _tenant + if "rir" in attrs: + try: + _rir = ORMRiR.objects.get(name=attrs["rir"]) + except ORMRiR.DoesNotExist: + _rir = None + if attrs["rir"]: + self.adapter.job.logger.warning( + f'Nautobot RiR {attrs["rir"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + _update_prefix.rir = _rir + if "tags" in attrs: + try: + _tags = [] + for tag in attrs["tags"]: + _tags.append(ORMTag.objects.get(name=tag)) + except ORMTag.DoesNotExist: + self.adapter.job.logger.warning( + f'Nautobot Tag {attrs["tags"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + if "date_allocated" in attrs: + _update_prefix.date_allocated = attrs["date_allocated"] + if attrs.get("tags"): + _update_prefix.validated_save() + _update_prefix.tags.clear() + for _tag in attrs["tags"]: + _update_prefix.tags.add(ORMTag.objects.get(name=_tag)) + if "locations" in attrs: + try: + _locations = [] + if attrs["locations"]: + for _location in attrs["locations"]: + _locations.append(ORMLocation.objects.get(name=_location)) + else: + _update_prefix.locations.clear() + except ORMLocation.DoesNotExist: + _location = None + self.adapter.job.logger.warning( + f'Nautobot Location {attrs["locations"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + if _locations: + _update_prefix.locations.clear() + for _location in _locations: + _update_prefix.locations.add(_location) + if "vrfs" in attrs: + try: + _vrfs = [] + if attrs["vrfs"]: + for _vrf in attrs["vrfs"]: + _vrf_name, _vrf_namespace = _vrf.split("__") + _namespace = ORMNamespace.objects.get(name=_vrf_namespace) + _vrfs.append(ORMVRF.objects.get(name=_vrf_name, namespace=_namespace)) + else: + _update_prefix.vrfs.clear() + if _vrfs: + for _vrf in _vrfs: + _update_prefix.vrfs.clear() + self.adapter.job.logger.debug(f"Assigning VRF {_vrf} to Prefix {_update_prefix}") + _update_prefix.vrfs.add(_vrf) + except ORMNamespace.DoesNotExist: + _vrf = None + if attrs["vrfs"]: + self.adapter.job.logger.warning( + f'Nautobot Namespace {attrs["vrfs"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + except ORMVRF.DoesNotExist: + _vrf = None + self.adapter.job.logger.warning( + f'Nautobot VRF {attrs["vrfs"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + if not check_sor_field(_update_prefix): + _update_prefix.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _update_prefix.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _update_prefix.validated_save() + + return super().update(attrs) + + def delete(self): + """Delete Prefix in Nautobot from NautobotPrefix object.""" + self.adapter.job.logger.debug(f"Delete Nautobot VRF {self.uuid}") + try: + _prefix = ORMPrefix.objects.get(id=self.uuid) + super().delete() + _prefix.delete() + return self + except ORMPrefix.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find Prefix {self.uuid} for deletion. {err}") + except ProtectedError as err: + self.adapter.job.logger.warning( + f"Unable to delete Prefix {self.name} due to existing references. Error: {err}." + ) + + +class NautobotSecret(Secret): + """Nautobot implementation of Bootstrap Secret model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create Secret in Nautobot from NautobotSecret object.""" + adapter.job.logger.info(f'Creating Nautobot Secret: {ids["name"]}') + new_secret = ORMSecret(name=ids["name"], provider=attrs["provider"], parameters=attrs["parameters"]) + new_secret.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + new_secret.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + new_secret.validated_save() + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Secret in Nautobot from NautobotSecret object.""" + _update_secret = ORMSecret.objects.get(id=self.uuid) + if "provider" in attrs: + _update_secret.provider = attrs["provider"] + if "parameters" in attrs: + _update_secret.parameters["variable"] = attrs["parameters"]["variable"] + _update_secret.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + if not check_sor_field(_update_secret): + _update_secret.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _update_secret.validated_save() + return super().update(attrs) + + def delete(self): + """Delete Secret in Nautobot from NautobotSecret object.""" + self.adapter.job.logger.debug(f"Delete secret uuid: {self.uuid}") + try: + secr = ORMSecret.objects.get(id=self.uuid) + super().delete() + secr.delete() + return self + except ORMSecret.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find Secret {self.uuid} for deletion. {err}") + + +class NautobotSecretsGroup(SecretsGroup): + """Nautobot implementation of Bootstrap SecretsGroup model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create SecretsGroup in Nautobot from NautobotSecretsGroup object.""" + adapter.job.logger.info(f'Creating Nautobot Secrets Group: {ids["name"]}') + _new_secrets_group = ORMSecretsGroup(name=ids["name"]) + _new_secrets_group.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _new_secrets_group.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _new_secrets_group.validated_save() + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + _group = ORMSecretsGroup.objects.get(name=ids["name"]) + for _secret in attrs["secrets"]: + try: + _orm_secret = ORMSecret.objects.get(name=_secret["name"]) + except ORMSecret.DoesNotExist: + adapter.job.logger.info(f'Secret {_secret["name"]} does not exist in Nautobot, ensure it is created.') + try: + _group.secrets.get(name=_secret["name"]) + except ORMSecret.DoesNotExist: + _group.secrets.add(_orm_secret) + _group.validated_save() + _sga = _group.secretsgroupassociation_set.get(secret_id=_orm_secret.id) + _sga.access_type = _secret["access_type"] + _sga.secret_type = _secret["secret_type"] + _sga.validated_save() + + def update(self, attrs): + """Update SecretsGroup in Nautobot from NautobotSecretsGroup object.""" + self.adapter.job.logger.info(f"Updating SecretsGroup {self.name}") + _update_group = ORMSecretsGroup.objects.get(name=self.name) + if "secrets" in attrs: + for _secret in attrs["secrets"]: + try: + _orm_secret = ORMSecret.objects.get(name=_secret["name"]) + except ORMSecret.DoesNotExist: + self.adapter.job.logger.info( + f'Secret {_secret["name"]} does not exist in Nautobot, ensure it is created.' + ) + try: + _update_group.secrets.get(name=_secret["name"]) + except ORMSecret.DoesNotExist: + _sga = ORMSecretsGroupAssociation( + secrets_group=_update_group, + secret=_orm_secret, + access_type=_secret["access_type"], + secret_type=_secret["secret_type"], + ) + _sga.validated_save() + if not check_sor_field(_update_group): + _update_group.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _update_group.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _update_group.validated_save() + return super().update(attrs) + + def delete(self): + """Delete SecretsGroup in Nautobot from NautobotSecretsGroup object.""" + self.adapter.job.logger.debug(f"Delete SecretsGroup uuid: {self.uuid}") + try: + secr = ORMSecretsGroup.objects.get(id=self.uuid) + super().delete() + secr.delete() + return self + except ORMSecretsGroup.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find SecretsGroup {self.uuid} for deletion. {err}") + + +class NautobotGitRepository(GitRepository): + """Nautobot implementation of Bootstrap GitRepository model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create GitRepository in Nautobot from NautobotGitRepository object.""" + adapter.job.logger.info(f'Creating Nautobot Git Repository: {ids["name"]}') + _secrets_group = None + if attrs.get("secrets_group"): + _secrets_group = ORMSecretsGroup.objects.get(name=attrs["secrets_group"]) + new_gitrepository = ORMGitRepository( + name=ids["name"], + remote_url=attrs["url"], + branch=attrs["branch"], + secrets_group=_secrets_group, + provided_contents=attrs["provided_contents"], + ) + new_gitrepository.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + new_gitrepository.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + new_gitrepository.validated_save() + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update GitRepository in Nautobot from NautobotSecretsGroup object.""" + self.adapter.job.logger.info(f"Updating GitRepository {self.name}") + _update_git_repo = ORMGitRepository.objects.get(name=self.name) + if attrs.get("url"): + _update_git_repo.remote_url = attrs["url"] + if attrs.get("branch"): + _update_git_repo.branch = attrs["branch"] + if attrs.get("secrets_group"): + _secrets_group = ORMSecretsGroup.objects.get(name=attrs["secrets_group"]) + _update_git_repo.secrets_group = _secrets_group + if attrs.get("provided_contents"): + _update_git_repo.provided_contents = attrs["provided_contents"] + if not check_sor_field(_update_git_repo): + _update_git_repo.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _update_git_repo.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _update_git_repo.validated_save() + return super().update(attrs) + + def delete(self): + """Delete GitRepository in Nautobot from NautobotGitRepository object.""" + self.adapter.job.logger.debug(f"Delete GitRepository uuid: {self.uuid}") + try: + git_repo = ORMGitRepository.objects.get(id=self.uuid) + super().delete() + git_repo.delete() + return self + except ORMGitRepository.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find GitRepository {self.uuid} for deletion. {err}") + + +class NautobotDynamicGroup(DynamicGroup): + """Nautobot implementation of Bootstrap DynamicGroup model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create DynamicGroup in Nautobot from NautobotDynamicGroup object.""" + adapter.job.logger.info(f'Creating Nautobot Dynamic Group: {ids["name"]}') + _content_type_id = lookup_content_type_id(nb_model="dynamic_groups", model_path=ids["content_type"]) + if _content_type_id is None: + adapter.job.logger.warning( + f'Could not find ContentType for {ids["label"]} with ContentType {ids["content_type"]}' + ) + _content_type = ContentType.objects.get_for_id(id=_content_type_id) + _new_nb_dg = ORMDynamicGroup( + name=ids["name"], + content_type=_content_type, + filter=attrs["dynamic_filter"], + description=attrs["description"], + ) + _new_nb_dg.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _new_nb_dg.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + + try: + _new_nb_dg.validated_save() + except ValidationError: + if attrs.get("dynamic_filter"): + _new_nb_dg.filter = attrs["dynamic_filter"] + if attrs.get("description"): + _new_nb_dg.description = attrs["description"] + _new_nb_dg.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _new_nb_dg.validated_save() + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update DynamicGroups in Nautobot from NautobotDynamicGroup object.""" + self.adapter.job.logger.info(f"Updating DynamicGroup {self.name}") + _update_dyn_group = ORMDynamicGroup.objects.get(name=self.name) + if attrs.get("dynamic_filter"): + _update_dyn_group.filter = attrs["dynamic_filter"] + if attrs.get("description"): + _update_dyn_group.description = attrs["description"] + if not check_sor_field(_update_dyn_group): + _update_dyn_group.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _update_dyn_group.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _update_dyn_group.validated_save() + return super().update(attrs) + + def delete(self): + """Delete DynamicGroup in Nautobot from NautobotDynamicGroup object.""" + self.adapter.job.logger.debug(f"Delete DynamicGroup uuid: {self.name}") + try: + dyn_group = ORMDynamicGroup.objects.get(name=self.name) + super().delete() + dyn_group.delete() + return self + except ORMDynamicGroup.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find DynamicGroup {self.name} for deletion. {err}") + + +class NautobotComputedField(ComputedField): + """Nautobot implementation of Bootstrap ComputedField model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create ComputedField in Nautobot from NautobotComputedField object.""" + adapter.job.logger.info(f'Creating Nautobot Computed Field: {ids["label"]}') + _content_type_id = lookup_content_type_id(nb_model="custom_fields", model_path=attrs["content_type"]) + if _content_type_id is None: + adapter.job.logger.warning( + f'Could not find ContentType for {ids["label"]} with ContentType {attrs["content_type"]}' + ) + _content_type = ContentType.objects.get_for_id(id=_content_type_id) + _new_computed_field = ORMComputedField( + label=ids["label"], content_type=_content_type, template=attrs["template"] + ) + _new_computed_field.validated_save() + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update ComputedField in Nautobot from NautobotComputedField object.""" + self.adapter.job.logger.info(f"Updating ComputedField {self.label}") + comp_field = ORMComputedField.objects.get(label=self.label) + if attrs.get("content_type"): + _content_type_id = lookup_content_type_id(nb_model="custom_fields", model_path=attrs["content_type"]) + if _content_type_id is None: + self.adapter.job.logger.warning( + f'Could not find ContentType for {self["label"]} with ContentType {attrs["content_type"]}' + ) + _content_type = ContentType.objects.get_for_id(id=_content_type_id) + comp_field.content_type = _content_type + if attrs.get("template"): + comp_field.template = attrs["template"] + comp_field.validated_save() + return super().update(attrs) + + def delete(self): + """Delete ComputedField in Nautobot from NautobotComputedField object.""" + self.adapter.job.logger.debug(f"Delete ComputedField: {self.label}") + try: + comp_field = ORMComputedField.objects.get(label=self.label) + super().delete() + comp_field.delete() + return self + except ORMComputedField.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find ComputedField {self.label} for deletion. {err}") + + +class NautobotTag(Tag): + """Nautobot implementation of Bootstrap Tag model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create Tag in Nautobot from NautobotTag object.""" + _content_types = [] + adapter.job.logger.info(f'Creating Nautobot Tag: {ids["name"]}') + for _model in attrs["content_types"]: + adapter.job.logger.debug(f"Looking up {_model} in content types.") + _content_types.append(lookup_content_type_for_taggable_model_path(_model)) + _new_tag = ORMTag( + name=ids["name"], + color=attrs["color"], + description=attrs["description"], + ) + _new_tag.validated_save() + _new_tag.content_types.set(_content_types) + _new_tag.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _new_tag.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _new_tag.validated_save() + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Tag in Nautobot from NautobotTag object.""" + self.adapter.job.logger.info(f"Updating Tag {self.name}") + _update_tag = ORMTag.objects.get(name=self.name) + if attrs.get("color"): + _update_tag.color = attrs["color"] + if attrs.get("content_types"): + _content_types = [] + for _model in attrs["content_types"]: + self.adapter.job.logger.debug(f"Looking up {_model} in content types.") + _content_types.append(lookup_content_type_for_taggable_model_path(_model)) + _update_tag.content_types.set(_content_types) + if attrs.get("description"): + _update_tag.description = attrs["description"] + if not check_sor_field(_update_tag): + _update_tag.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _update_tag.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _update_tag.validated_save() + return super().update(attrs) + + def delete(self): + """Delete Tag in Nautobot from NautobotTag object.""" + self.adapter.job.logger.debug(f"Delete Tag: {self.name}") + try: + _tag = ORMTag.objects.get(name=self.name) + super().delete() + _tag.delete() + return self + except ORMTag.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find Tag {self.name} for deletion. {err}") + + +class NautobotGraphQLQuery(GraphQLQuery): + """Nautobot implementation of Bootstrap GraphQLQuery model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create GraphQLQuery in Nautobot from NautobotGraphQLQuery object.""" + adapter.job.logger.info(f'Creating Nautobot GraphQLQuery: {ids["name"]}') + _new_query = ORMGraphQLQuery(name=ids["name"], query=attrs["query"]) + _new_query.validated_save() + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update GraphQLQuery in Nautobot from NautobotGraphQLQuery object.""" + self.adapter.job.logger.info(f"Updating GraphQLQuery: {self.name}.") + _query = ORMGraphQLQuery.objects.get(name=self.name) + if attrs.get("query"): + _query.query = attrs["query"] + _query.validated_save() + return super().update(attrs) + + def delete(self): + """Delete GraphQLQuery in Nautobot from NautobotGraphQLQuery object.""" + self.adapter.job.logger.debug(f"Delete GraphQLQuery: {self.name}") + try: + _query = ORMGraphQLQuery.objects.get(name=self.name) + super().delete() + _query.delete() + return self + except ORMGraphQLQuery.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find GraphQLQuery {self.name} for deletion. {err}") + + +if LIFECYCLE_MGMT: + + class NautobotSoftware(Software): + """Nautobot implementation of Bootstrap Software model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create Software in Nautobot from NautobotSoftware object.""" + adapter.job.logger.info(f'Creating Nautobot Software object {ids["platform"]} - {ids["version"]}.') + _tags = [] + for tag in attrs["tags"]: + _tags.append(ORMTag.objects.get(name=tag)) + _platform = ORMPlatform.objects.get(name=ids["platform"]) + _new_software = ORMSoftware( + version=ids["version"], + alias=attrs["alias"], + device_platform=_platform, + end_of_support=attrs["eos_date"], + long_term_support=attrs["long_term_support"], + pre_release=attrs["pre_release"], + documentation_url=attrs["documentation_url"], + ) + if attrs.get("tags"): + _new_software.validated_save() + _new_software.tags.clear() + for tag in attrs["tags"]: + _new_software.tags.add(ORMTag.objects.get(name=tag)) + _new_software.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _new_software.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _new_software.validated_save() + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Software in Nautobot from NautobotSoftware object.""" + self.adapter.job.logger.info(f"Updating Software: {self.platform} - {self.version}.") + _tags = [] # noqa: F841 + _platform = ORMPlatform.objects.get(name=self.platform) + _update_software = ORMSoftware.objects.get(version=self.version, device_platform=_platform) + if "alias" in attrs: + _update_software.alias = attrs["alias"] + if attrs.get("release_date"): + _update_software.release_date = attrs["release_date"] + if attrs.get("eos_date"): + _update_software.end_of_support = attrs["eos_date"] + if attrs.get("long_term_support"): + _update_software.long_term_support = attrs["long_term_support"] + if attrs.get("pre_release"): + _update_software.pre_release = attrs["pre_release"] + if attrs.get("documentation_url"): + _update_software.documentation_url = attrs["documentation_url"] + if not attrs.get("documentation_url"): + if attrs.get("documentation_url") == "": + _update_software.documentation_url = "" + if attrs.get("tags"): + _update_software.tags.clear() + for tag in attrs["tags"]: + _update_software.tags.add(ORMTag.objects.get(name=tag)) + if not check_sor_field(_update_software): + _update_software.custom_field_data.update( + {"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")} + ) + _update_software.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _update_software.validated_save() + return super().update(attrs) + + def delete(self): + """Delete Software in Nautobot from NautobotSoftware object.""" + try: + _platform = ORMPlatform.objects.get(name=self.platform) + _software = ORMSoftware.objects.get(version=self.version, device_platform=_platform) + super().delete() + _software.delete() + return self + except ORMSoftware.DoesNotExist as err: + self.adapter.job.logger.warning( + f"Unable to find Software {self.platform} - {self.version} for deletion. {err}" + ) + + class NautobotSoftwareImage(SoftwareImage): + """Nautobot implementation of Bootstrap SoftwareImage model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create SoftwareImage in Nautobot from NautobotSoftwareImage object.""" + _tags = [] + if attrs["tags"] is not None: + for tag in attrs["tags"]: + _tags.append(ORMTag.objects.get(name=tag)) + _platform = ORMPlatform.objects.get(name=attrs["platform"]) + _software = ORMSoftware.objects.get(version=attrs["software_version"], device_platform=_platform) + _new_soft_image = ORMSoftwareImage( + software=_software, + image_file_name=attrs["file_name"], + image_file_checksum=attrs["image_file_checksum"], + hashing_algorithm=attrs["hashing_algorithm"], + download_url=attrs["download_url"], + default_image=attrs["default_image"], + ) + if attrs.get("tags"): + _new_soft_image.validated_save() + _new_soft_image.tags.clear() + for tag in attrs["tags"]: + _new_soft_image.tags.add(ORMTag.objects.get(name=tag)) + _new_soft_image.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _new_soft_image.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _new_soft_image.validated_save() + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update SoftwareImage in Nautobot from NautobotSoftwareImage object.""" + self.adapter.job.logger.info(f"Updating Software Image: {self.platform} - {self.software_version}.") + _platform = ORMPlatform.objects.get(name=self.platform) + _software = ORMSoftware.objects.get(version=self.software_version, device_platform=_platform) + _update_soft_image = ORMSoftwareImage.objects.get(software=_software) + if attrs.get("platform"): + _update_soft_image.platform = _platform + if attrs.get("software_version"): + _update_soft_image.software_version = attrs["software_version"] + if attrs.get("file_name"): + _update_soft_image.image_file_name = attrs["file_name"] + if attrs.get("image_file_checksum"): + _update_soft_image.image_file_checksum = attrs["image_file_checksum"] + if attrs.get("hashing_algorithm"): + _update_soft_image.hashing_algorithm = attrs["hashing_algorithm"] + if attrs.get("download_url"): + _update_soft_image.download_url = attrs["download_url"] + if attrs.get("default_image"): + _update_soft_image.default_image = attrs["default_image"] + if attrs.get("tags"): + _update_soft_image.tags.clear() + if attrs["tags"] is not None: + for tag in attrs["tags"]: + _update_soft_image.tags.add(ORMTag.objects.get(name=tag)) + if not check_sor_field(_update_soft_image): + _update_soft_image.custom_field_data.update( + {"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")} + ) + _update_soft_image.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _update_soft_image.validated_save() + return super().update(attrs) + + def delete(self): + """Delete SoftwareImage in Nautobot from NautobotSoftwareImage object.""" + try: + _platform = ORMPlatform.objects.get(name=self.platform) + _software = ORMSoftware.objects.get(version=self.software_version, device_platform=_platform) + _soft_image = ORMSoftwareImage.objects.get(software=_software) + super().delete() + _soft_image.delete() + return self + except ORMSoftwareImage.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find SoftwareImage {self.software} for deletion. {err}") + + class NautobotValidatedSoftware(ValidatedSoftware): + """Nautobot implementation of Bootstrap ValidatedSoftware model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create ValidatedSoftware in Nautobot from NautobotValidatedSoftware object.""" + _devices = [] # noqa: F841 + _device_types = [] # noqa: F841 + _device_roles = [] # noqa: F841 + _inventory_items = [] # noqa: F841 + _object_tags = [] # noqa: F841 + _platform = ORMPlatform.objects.get(name=attrs["platform"]) + _software = ORMSoftware.objects.get(version=attrs["software_version"], device_platform=_platform) + _new_validated_software = ORMValidatedSoftware( + software=_software, + start=ids["valid_since"] if not None else datetime.today().date(), + end=ids["valid_until"], + preferred=attrs["preferred_version"], + ) + _new_validated_software.custom_field_data.update( + {"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")} + ) + _new_validated_software.custom_field_data.update( + {"last_synced_from_sor": datetime.today().date().isoformat()} + ) + _new_validated_software.validated_save() + if "devices" in attrs: + if attrs["devices"]: + for _dev in attrs["devices"]: + _devices.append(ORMDevice.objects.get(name=_dev)) + _new_validated_software.devices.set(_devices) + if "device_types" in attrs: + if attrs["device_types"]: + for _dev_type in attrs["device_types"]: + _device_types.append(ORMDeviceType.objects.get(model=_dev_type)) + _new_validated_software.device_types.set(_device_types) + if "device_roles" in attrs: + if attrs["device_roles"]: + for _dev_role in attrs["device_roles"]: + _device_roles.append(ORMRole.objects.get(name=_dev_role)) + _new_validated_software.device_roles.set(_device_roles) + if "inventory_items" in attrs: + if attrs["inventory_items"]: + for _inv_item in attrs["inventory_items"]: + _inventory_items.append(ORMInventoryItem.objects.get(name=_inv_item)) + _new_validated_software.inventory_items.set(_inventory_items) + if "object_tags" in attrs: + if attrs["object_tags"]: + for _obj_tag in attrs["object_tags"]: + _object_tags.append(ORMTag.objects.get(name=_obj_tag)) + _new_validated_software.object_tags.set(_object_tags) + if "tags" in attrs: + if attrs["tags"] is not None: + for _tag in attrs["tags"]: + _new_validated_software.tags.add(ORMTag.objects.get(name=_tag)) + _new_validated_software.validated_save() + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update ValidatedSoftware in Nautobot from NautobotValidatedSoftware object.""" + self.adapter.job.logger.info(f"Updating Validated Software - {self} with attrs {attrs}.") + _tags = [] # noqa: F841 + _devices = [] # noqa: F841 + _device_types = [] # noqa: F841 + _device_roles = [] # noqa: F841 + _inventory_items = [] # noqa: F841 + _object_tags = [] # noqa: F841 + _platform = ORMPlatform.objects.get(name=self.platform) + _software = ORMSoftware.objects.get(version=self.software_version, device_platform=_platform) + _update_validated_software = ORMValidatedSoftware.objects.get( + software=_software, start=self.valid_since, end=self.valid_until + ) + if attrs.get("preferred_version"): + _update_validated_software.preferred_version = attrs["preferred_version"] + if "tags" in attrs: + _update_validated_software.tags.clear() + if attrs["tags"] is not None: + for _tag in attrs["tags"]: + _update_validated_software.tags.add(ORMTag.objects.get(name=_tag)) + if "devices" in attrs: + # FIXME: There might be a better way to handle this that's easier on the database. + _update_validated_software.devices.clear() + if attrs["devices"]: + for _dev in attrs["devices"]: + _devices.append(ORMDevice.objects.get(name=_dev)) + _update_validated_software.devices.set(_devices) + if "device_types" in attrs: + # FIXME: There might be a better way to handle this that's easier on the database. + _update_validated_software.device_types.clear() + if attrs["device_types"]: + for _dev_type in attrs["device_types"]: + _device_types.append(ORMDeviceType.objects.get(model=_dev_type)) + _update_validated_software.device_types.set(_device_types) + if "device_roles" in attrs: + # FIXME: There might be a better way to handle this that's easier on the database. + _update_validated_software.device_roles.clear() + if attrs["device_roles"]: + for _dev_role in attrs["device_roles"]: + _device_roles.append(ORMRole.objects.get(name=_dev_role)) + _update_validated_software.device_roles.set(_device_roles) + if "inventory_items" in attrs: + # FIXME: There might be a better way to handle this that's easier on the database. + _update_validated_software.inventory_items.clear() + if attrs["inventory_items"]: + for _inv_item in attrs["inventory_items"]: + _inventory_items.append(ORMInventoryItem.objects.get(name=_inv_item)) + _update_validated_software.inventory_items.set(_inventory_items) + if "object_tags" in attrs: + # FIXME: There might be a better way to handle this that's easier on the database. + _update_validated_software.object_tags.clear() + if attrs["object_tags"]: + for _obj_tag in attrs["object_tags"]: + _object_tags.append(ORMTag.objects.get(name=_obj_tag)) + _update_validated_software.object_tags.set(_object_tags) + if not check_sor_field(_update_validated_software): + _update_validated_software.custom_field_data.update( + {"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")} + ) + _update_validated_software.custom_field_data.update( + {"last_synced_from_sor": datetime.today().date().isoformat()} + ) + _update_validated_software.validated_save() + return super().update(attrs) + + def delete(self): + """Delete ValidatedSoftware in Nautobot from NautobotValidatedSoftware object.""" + try: + _platform = ORMPlatform.objects.get(name=self.platform) + _software = ORMSoftware.objects.get(version=self.software_version, device_platform=_platform) + _validated_software = ORMValidatedSoftware.objects.get( + software=_software, start=self.valid_since, end=self.valid_until + ) + super().delete() + _validated_software.delete() + return self + except ORMValidatedSoftware.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find ValidatedSoftware {self} for deletion. {err}") diff --git a/nautobot_ssot/integrations/bootstrap/fixtures/develop.yml b/nautobot_ssot/integrations/bootstrap/fixtures/develop.yml new file mode 100644 index 000000000..893c92c05 --- /dev/null +++ b/nautobot_ssot/integrations/bootstrap/fixtures/develop.yml @@ -0,0 +1,2 @@ +--- +git_branch: "develop" diff --git a/nautobot_ssot/integrations/bootstrap/fixtures/global_settings.yml b/nautobot_ssot/integrations/bootstrap/fixtures/global_settings.yml new file mode 100755 index 000000000..f938033a0 --- /dev/null +++ b/nautobot_ssot/integrations/bootstrap/fixtures/global_settings.yml @@ -0,0 +1,794 @@ +--- +tenant_group: + - name: "Group1" + parent: "" + description: "" + - name: "Group2" + parent: "" + description: "" + - name: "Group3" + parent: "Group1" + description: "" +tenant: + - name: "Backbone" + tenant_group: "Group1" + description: "" + tags: [] + - name: "Datacenter" + tenant_group: "Group2" + description: "" + tags: ["Test"] +role: + - name: "spine_switches" + weight: + description: "" + color: "795548" + content_types: + - "dcim.device" + - name: "leaf_switches" + weight: + description: "" + color: "785530" + content_types: + - "dcim.device" + - name: "Switch" + weight: + description: "" + color: "9e9e9e" + content_types: + - "dcim.device" + - name: "Firewall" + weight: + description: "" + color: "9e9e9e" + content_types: + - "dcim.device" + - name: "Data Network" + weight: + description: "" + color: "9e9e9e" + content_types: + - "ipam.prefix" + - "ipam.vlan" + # Default Roles + - name: "Administrative" + weight: + description: "Unit plays an administrative role" + color: "2196f3" + content_types: + - "extras.contactassociation" + - name: "Anycast" + weight: + description: "" + color: "ffc107" + content_types: + - "ipam.ipaddress" + - name: "Billing" + weight: + description: "Unit plays a billing role" + color: "4caf50" + content_types: + - "extras.contactassociation" + - name: "CARP" + weight: + description: "" + color: "4caf50" + content_types: + - "ipam.ipaddress" + - name: "GLBP" + weight: + description: "" + color: "4caf50" + content_types: + - "ipam.ipaddress" + - name: "HSRP" + weight: + description: "" + color: "4caf50" + content_types: + - "ipam.ipaddress" + - name: "Loopback" + weight: + description: "" + color: "9e9e9e" + content_types: + - "ipam.ipaddress" + - name: "On Site" + weight: + description: "Unit plays an on site role" + color: "111111" + content_types: + - "extras.contactassociation" + - name: "Secondary" + weight: + description: "" + color: "2196f3" + content_types: + - "ipam.ipaddress" + - name: "Support" + weight: + description: "Unit plays a support role" + color: "ffeb3b" + content_types: + - "extras.contactassociation" + - name: "VIP" + weight: + description: "" + color: "4caf50" + content_types: + - "ipam.ipaddress" + - name: "VRRP" + weight: + description: "" + color: "4caf50" + content_types: + - "ipam.ipaddress" +manufacturer: + - name: "Generic" + description: "For generic devices like patch panels" + - name: "Palo Alto Networks" + description: "" + - name: "Arista" + description: "" + - name: "Cisco" + description: "" +platform: + - name: "paloalto_panos" + manufacturer: "Palo Alto Networks" + network_driver: "paloalto_panos" + napalm_driver: "" + napalm_arguments: {} + description: "PanOS Firewalls" + - name: "cisco_ios" + manufacturer: "Cisco" + network_driver: "cisco_ios" + napalm_driver: "" + napalm_arguments: {} + description: "Cisco Devices" + - name: "arista_eos" + manufacturer: "Arista" + network_driver: "arista_eos" + napalm_driver: "" + napalm_arguments: {} + description: "Arista Devices" +location_type: + - name: "Region" + parent: "" + nestable: true + description: "" + content_types: [] + - name: "Site" + parent: "Region" + nestable: false + description: "" + content_types: + - "dcim.device" + - "ipam.namespace" + - "ipam.prefix" + - "ipam.vlan" + - "ipam.vlangroup" + - "circuits.circuittermination" + - name: "Building" + parent: "Site" + nestable: false + description: "" + content_types: + - "dcim.device" + - "ipam.namespace" + - "ipam.prefix" + - "ipam.vlan" + - "ipam.vlangroup" + - "circuits.circuittermination" +location: + - name: "Southeast" + location_type: "Region" + parent: "" + status: "Active" + facility: "" + asn: + time_zone: "US/Eastern" + description: "" + tenant: "" + physical_address: "" + shipping_address: "" + latitude: + longitude: + contact_name: "" + contact_phone: "" + contact_email: "" + tags: [] + - name: "Atlanta" + location_type: "Site" + parent: "Southeast" + status: "Active" + facility: "AT1" + asn: 65001 + time_zone: "US/Eastern" + description: "" + tenant: "" + physical_address: | + 180 Peachtree St NE + FL 2 , FL 3 , FL 6 + Atlanta, GA 30303 + United States + shipping_address: | + Example Company + 180 Peachtree St NE + Loading Dock 1 + Atlanta, GA 30303 + United States + latitude: + longitude: + contact_name: "" + contact_phone: "" + contact_email: "" + tags: [] + - name: "Atlanta4" + location_type: "Site" + parent: "Southeast" + status: "Active" + facility: "AT4" + asn: 65004 + time_zone: "US/Eastern" + description: "" + tenant: "" + physical_address: | + 450 Interstate to N PKWY + Atlanta, GA 30339 + United States + shipping_address: | + Example Company + 450 Interstate to N PKWY + Loading Dock 1 + Atlanta, GA 30339 + United States + latitude: + longitude: + contact_name: "" + contact_phone: "" + contact_email: "" + tags: [] +team: + - name: "Datacenter" + phone: "123-456-7890" + email: "datacenter@example.com" + address: "2715 N Vermont Canyon Rd, Los Angeles, CA 90027" + # TODO: Need to consider how to allow loading from teams or contacts models. + # contacts: [] + - name: "Backbone" + phone: "123-456-7890" + email: "backbone@example.com" + address: "1600 S Azusa Ave, Rowland Heights, CA 91748" + # TODO: Need to consider how to allow loading from teams or contacts models. + # contacts: [] +contact: + - name: "Jennifer Parker" + phone: "888-555-4823" + email: "jenny@future.com" + address: "12417 Philadelphia St, Whittier, CA 90601" + teams: + - "Backbone" + - "Datacenter" + - name: "Marty McFly" + phone: "888-555-1955" + email: "marty@future.com" + address: "9303 Roslyndale Ave, Arleta, CA 91331" + teams: + - "Backbone" +provider: + - name: "Provider1" + asn: 65000 + account_number: "12345678" + portal_url: "https://provider1.com" + noc_contact: "" + admin_contact: "" + tags: [] + - name: "Provider2" + asn: 65001 + account_number: "87654321" + portal_url: "https://provider2.com" + noc_contact: "" + admin_contact: "" + tags: [] +provider_network: + - name: "Provider1 Metro-E" + provider: "Provider1" + description: "" + comments: "" + tags: [] + - name: "Provider2 Metro-E" + provider: "Provider2" + description: "" + comments: "" + tags: [] +circuit_type: + - name: "Metro-E" + description: "Metro ethernet" + - name: "DWDM" + description: "" + - name: "Internet" + description: "" +circuit: + - circuit_id: "METRO-65002-CUST1" + provider: "Provider1" + circuit_type: "Metro-E" + status: "Active" + date_installed: + commit_rate_kbps: 1000000 + description: "" + tenant: "" + tags: [] + - circuit_id: "INTERNET-65002-CUST1" + provider: "Provider2" + circuit_type: "Internet" + status: "Active" + date_installed: + commit_rate_kbps: 1000000 + description: "" + tenant: "" + tags: [] +circuit_termination: + - name: "METRO-65002-CUST1__Provider1__A" + termination_type: "Location" + location: "Atlanta" + provider_network: "" + port_speed_kbps: 1000000 + upstream_speed_kbps: + cross_connect_id: "" + patch_panel_or_ports: "" + description: "" + tags: [] + - name: "METRO-65002-CUST1__Provider1__Z" + termination_type: "Provider Network" + location: "" + provider_network: "Provider2 Metro-E" + port_speed_kbps: 1000000 + upstream_speed_kbps: + cross_connect_id: "" + patch_panel_or_ports: "" + description: "" + tags: [] + - name: "INTERNET-65002-CUST1__Provider2__A" + termination_type: "Location" + location: "Atlanta4" + provider_network: "" + port_speed_kbps: 1000000 + upstream_speed_kbps: + cross_connect_id: "" + patch_panel_or_ports: "" + description: "" + tags: [] +secret: + - name: "Github_Service_Acct" + provider: "environment-variable" # or text-file + parameters: + variable: "GITHUB_SERVICE_ACCT" + path: + - name: "Github_Service_Token" + provider: "environment-variable" # or text-file + parameters: + variable: "GITHUB_SERVICE_TOKEN" + path: +secrets_group: + - name: "Github_Service_Account" + secrets: + - name: "Github_Service_Acct" + secret_type: "username" + access_type: "HTTP(S)" + - name: "Github_Service_Token" + secret_type: "token" + access_type: "HTTP(S)" +git_repository: + - name: "Backbone Config Contexts" + url: "https://github.com/nautobot/backbone-config-contexts.git" + branch: "main" + secrets_group_name: "Github_Service_Account" + provided_data_type: + - "config contexts" + - name: "Datacenter Config Contexts" + url: "https://github.com/nautobot/datacenter-config-contexts.git" + secrets_group_name: "Github_Service_Account" + provided_data_type: + - "config contexts" + - name: "Metro Config Contexts" + url: "https://github.com/nautobot/metro-config-contexts.git" + secrets_group_name: + provided_data_type: + - "config contexts" + - name: "Access Config Contexts" + url: "https://github.com/nautobot/access-config-contexts.git" + secrets_group_name: + provided_data_type: + - "config contexts" +dynamic_group: + - name: "Backbone Domain" + content_type: "dcim.device" + description: "" + filter: | + { + "tenant": [ + "Backbone" + ] + } + - name: "Datacenter" + content_type: "dcim.device" + description: "" + filter: | + { + "location": [ + "Atlanta" + ], + "platform": [ + "arista_eos", + "paloalto_panos" + ] + } +computed_field: + - label: "Compliance Change" + content_type: "dcim.device" + template: "{{ obj | get_change_log }}" +tag: + - name: "Backbone" + color: "795547" + description: "" + content_types: + - "dcim.device" + - name: "Access" + color: "795548" + description: "" + content_types: + - "dcim.device" + - "ipam.ipaddress" + - name: "Test" + color: "795548" + description: "Test" + content_types: + - "circuits.circuit" + - "circuits.circuittermination" + - "circuits.provider" + - "circuits.providernetwork" + - "dcim.cable" + - "dcim.consoleport" + - "dcim.consoleserverport" + - "dcim.device" + - "dcim.devicebay" + - "dcim.devicetype" + - "dcim.frontport" + - "dcim.interface" + - "dcim.inventoryitem" + - "dcim.powerfeed" + - "dcim.poweroutlet" + - "dcim.powerpanel" + - "dcim.powerport" + - "dcim.rack" + - "dcim.rackreservation" + - "dcim.rearport" + - "dcim.location" + - "dcim.deviceredundancygroup" + - "extras.gitrepository" + - "extras.job" + - "extras.secret" + - "ipam.namespace" + - "ipam.ipaddress" + - "ipam.prefix" + - "ipam.routetarget" + - "ipam.service" + - "ipam.vlan" + - "ipam.vrf" + - "tenancy.tenant" + - "virtualization.cluster" + - "virtualization.virtualmachine" + - "virtualization.vminterface" +graph_ql_query: + - name: "Backbone Devices" + query: | + query ($device_id: ID!) { + device(id: $device_id) { + config_context + hostname: name + device_role { + name + } + tenant { + name + } + primary_ip4 { + address + } + } + } + - name: "Datacenter Devices" + query: | + query ($device_id: ID!) { + device(id: $device_id) { + config_context + hostname: name + device_role { + name + } + tenant { + name + } + primary_ip4 { + address + } + } + } +software: + - device_platform: "arista_eos" + version: "4.25.10M" + alias: "" + release_date: "2023-12-04" + eos_date: "2023-12-05" + documentation_url: "https://arista.com" + lts: false + pre_release: false + tags: ["Backbone"] + - device_platform: "cisco_ios" + version: "03.11.04.E" + alias: "Cisco Validated" + release_date: + eos_date: "2023-12-04" + documentation_url: "" + lts: false + pre_release: false + tags: ["Test"] + - device_platform: "paloalto_panos" + version: "11.0.3" + alias: "Panos Preferred" + release_date: + eos_date: "2024-12-04" + documentation_url: "https://paloaltonetworks.com" + lts: false + pre_release: false + tags: ["Test"] + - device_platform: "arista_eos" + version: "15.4.3" + alias: "Arista Preferred" + release_date: + eos_date: "2024-12-04" + documentation_url: "https://arista.com" + lts: false + pre_release: false + tags: ["Test"] +software_image: + - software: "arista_eos - 15.4.3" + platform: "arista_eos" + software_version: "15.4.3" + file_name: "arista15.4.3.bin" + download_url: "https://arista-files.com" + image_file_checksum: "" + hashing_algorithm: "" + default_image: false + tags: ["Test"] + - software: "paloalto_panos - 11.0.3" + platform: "paloalto_panos" + software_version: "11.0.3" + file_name: "paloalto_11.0.3.bin" + download_url: "https://paloaltonetworks.com" + image_file_checksum: "o234i09usdfsflkj" + hashing_algorithm: "SHA256" + default_image: false + tags: [] +validated_software: + - software: "arista_eos - 4.25.10M" + valid_since: 2023-08-07 + valid_until: 2025-01-01 + preferred_version: false + devices: [] + device_types: [] + device_roles: [] + inventory_items: [] + object_tags: [] + tags: ["Test"] + - software: "cisco_ios - 03.11.04.E" + valid_since: 2023-08-07 + valid_until: + preferred_version: false + devices: [] + device_types: [] + device_roles: [] + inventory_items: [] + object_tags: [] + tags: [] + - software: "paloalto_panos - 11.0.3" + valid_since: 2023-08-07 + valid_until: + preferred_version: false + devices: [] + device_types: [] + device_roles: ["Firewall"] + inventory_items: [] + object_tags: [] + tags: [] + - software: "arista_eos - 15.4.3" + valid_since: 2023-08-07 + valid_until: 2025-08-09 + preferred_version: true + devices: [] + device_types: [] + device_roles: [] + inventory_items: [] + object_tags: ["Backbone"] + tags: ["Test"] + - software: "arista_eos - 15.4.3" + valid_since: 2023-08-07 + valid_until: + preferred_version: true + devices: [] + device_types: [] + device_roles: [] + inventory_items: [] + object_tags: [] + tags: [] +namespace: + - name: "Global" + description: "" + location: "" + - name: "Customer1" + description: "Customer1 IPAM Namespace" + location: "Atlanta" + - name: "Customer2" + description: "Customer2 IPAM Namespace" + location: "Atlanta4" +rir: + - name: "RFC1918" + private: true + description: "Private IP Space" + - name: "ARIN" + private: false + description: "American Registry for Internet Numbers" +vlan_group: + - name: "Atlanta VLANs" + location: "Atlanta" + description: "" + - name: "Atlanta4 VLANs" + location: "Atlanta4" + description: "" +vlan: + - name: "vlan10" + vid: 10 + description: "" + status: "Active" + role: "Data Network" + locations: ["Atlanta"] + vlan_group: "Atlanta VLANs" + tenant: "" + tags: [] + - name: "vlan20" + vid: 20 + description: "" + status: "Reserved" + role: "Data Network" + locations: ["Atlanta", "Atlanta4"] + vlan_group: "Atlanta VLANs" + tenant: "" + tags: [] + - name: "vlan30" + vid: 30 + description: "" + status: "Reserved" + role: "Data Network" + locations: [] + vlan_group: "Atlanta VLANs" + tenant: "" + tags: [] + - name: "vlan30" + vid: 30 + description: "" + status: "Active" + role: "" + locations: [] + vlan_group: "" + tenant: "" + tags: [] +vrf: + - name: "blue" + namespace: "Global" + route_distinguisher: "65000:1" + description: "" + # TODO: Need to consider how to allow loading from vrfs or prefix models. + # prefixes: [] + tenant: "" + tags: [] + - name: "red" + namespace: "Global" + route_distinguisher: "65000:2" + description: "" + # TODO: Need to consider how to allow loading from vrfs or prefix models. + # prefixes: [] + tenant: "" + tags: [] + - name: "blue" + namespace: "Customer1" + route_distinguisher: "65000:1" + description: "" + # TODO: Need to consider how to allow loading from vrfs or prefix models. + # prefixes: [] + tenant: "" + tags: [] +prefix: + - network: "10.0.0.0/24" + namespace: "Customer1" + prefix_type: "network" # network, container, or pool + status: "Active" + role: "Data Network" + rir: "RFC1918" + date_allocated: 2024-06-01 + description: "" + # TODO: Need to consider how to allow loading from vrfs or prefix models. + vrfs: [] + locations: [] + vlan: "" + tenant: "" + tags: [] + - network: "10.0.0.0/24" + namespace: "Customer2" + prefix_type: "network" # network, container, or pool + status: "Active" + role: "Data Network" + rir: "RFC1918" + date_allocated: "2024-06-01 12:00:00" + description: "" + # TODO: Need to consider how to allow loading from vrfs or prefix models. + vrfs: [] + locations: [] + vlan: "" + tenant: "" + tags: [] + - network: "10.0.10.0/24" + namespace: "Global" + prefix_type: "network" # network, container, or pool + status: "Active" + role: "Data Network" + rir: "RFC1918" + date_allocated: + description: "" + # TODO: Need to consider how to allow loading from vrfs or prefix models. + vrfs: [] + locations: ["Atlanta", "Atlanta4"] + vlan: "vlan10__10__Atlanta VLANs" + tenant: "" + tags: [] + - network: "192.168.0.0/24" + namespace: "Customer1" + prefix_type: "network" # network, container, or pool + status: "Active" + role: "Data Network" + rir: "RFC1918" + date_allocated: + description: "" + # TODO: Need to consider how to allow loading from vrfs or prefix models. + vrfs: ["blue__Customer1"] + locations: ["Atlanta"] + vlan: "" + tenant: "" + tags: [] + - network: "192.168.0.0/24" + namespace: "Global" + prefix_type: "network" # network, container, or pool + status: "Active" + role: "Data Network" + rir: "RFC1918" + date_allocated: + description: "" + # TODO: Need to consider how to allow loading from vrfs or prefix models. + vrfs: ["red__Global"] + locations: ["Atlanta"] + vlan: "" + tenant: "" + tags: [] + - network: "192.168.1.0/24" + namespace: "" + prefix_type: "network" # network, container, or pool + status: "Active" + role: "" + rir: "" + date_allocated: + description: "" + # TODO: Need to consider how to allow loading from vrfs or prefix models. + vrfs: [] + locations: [] + vlan: "" + tenant: "" + tags: [] diff --git a/nautobot_ssot/integrations/bootstrap/fixtures/production.yml b/nautobot_ssot/integrations/bootstrap/fixtures/production.yml new file mode 100644 index 000000000..e5cf4dc6b --- /dev/null +++ b/nautobot_ssot/integrations/bootstrap/fixtures/production.yml @@ -0,0 +1,2 @@ +--- +git_branch: "production" diff --git a/nautobot_ssot/integrations/bootstrap/fixtures/staging.yml b/nautobot_ssot/integrations/bootstrap/fixtures/staging.yml new file mode 100644 index 000000000..c7915b439 --- /dev/null +++ b/nautobot_ssot/integrations/bootstrap/fixtures/staging.yml @@ -0,0 +1,2 @@ +--- +git_branch: "staging" diff --git a/nautobot_ssot/integrations/bootstrap/jobs.py b/nautobot_ssot/integrations/bootstrap/jobs.py new file mode 100644 index 000000000..a98535e3a --- /dev/null +++ b/nautobot_ssot/integrations/bootstrap/jobs.py @@ -0,0 +1,160 @@ +"""Jobs for bootstrap SSoT integration.""" + +import os + +from nautobot.apps.jobs import BooleanVar, ChoiceVar + +from nautobot_ssot.integrations.bootstrap.diffsync.adapters import bootstrap, nautobot +from nautobot_ssot.jobs.base import DataMapping, DataSource, DataTarget + +name = "Bootstrap SSoT" # pylint: disable=invalid-name + + +class BootstrapDataSource(DataSource): + """Bootstrap SSoT Data Source.""" + + debug = BooleanVar(description="Enable for more verbose debug logging", default=False) + load_source = ChoiceVar( + choices=( + ("file", "File"), + ("git", "Git"), + ("env_var", "Environment Variable"), + ), + description="Where to load the yaml files from", + label="Load Source", + default="env_var", + ) + + class Meta: # pylint: disable=too-few-public-methods + """Meta data for bootstrap.""" + + name = "Bootstrap to Nautobot" + data_source = "Bootstrap" + data_target = "Nautobot" + description = "Sync information from Bootstrap to Nautobot" + + @classmethod + def config_information(cls): + """Dictionary describing the configuration of this DataSource.""" + return { + "Git loading source": os.getenv("NAUTOBOT_BOOTSTRAP_SSOT_LOAD_SOURCE"), + "Git branch": os.getenv("NAUTOBOT_BOOTSTRAP_SSOT_ENVIRONMENT_BRANCH"), + } + + @classmethod + def data_mappings(cls): + """List describing the data mappings involved in this DataSource.""" + return ( + DataMapping("tenant_group", "", "TenantGroup", "tenancy:tenant-groups"), + DataMapping("tenant", "", "Tenant", "tenancy:tenant"), + DataMapping("role", "", "Roles", "extras.roles"), + DataMapping("manufacturer", "", "Manufacturer", "dcim.manufacturer"), + DataMapping("platform", "", "Platform", "dcim.platform"), + DataMapping("location_type", "", "LocationType", "dcim.location-type"), + DataMapping("location", "", "Location", "dcim.location"), + DataMapping("secrets", "", "Secrets", "extras:secrets"), + DataMapping("secrets_groups", "", "SecretsGroup", "extras:secrets-groups"), + DataMapping("git_repositories", "", "GitRepository", "extras:git-repositories"), + DataMapping("dynamic_groups", "", "DynamicGroup", "extras:dynamic-groups"), + DataMapping("computed_field", "", "ComputedField", "extras:computed-field"), + DataMapping("tags", "", "Tag", "extras.tag"), + DataMapping("graphql_query", "", "GraphQLQuery", "extras:graphql-query"), + DataMapping("tenant_group", "", "TenantGroup", "tenancy:tenant-troup"), + DataMapping("tenant", "", "Tenant", "tenancy:tenant"), + DataMapping("role", "", "Role", "extras:role"), + DataMapping("manufacturer", "", "Manufacturer", "dcim.manufacturer"), + DataMapping("platform", "", "Platform", "dcim.platform"), + DataMapping("location_type", "", "LocationType", "dcim.location_type"), + DataMapping("location", "", "Location", "dcim.location"), + DataMapping("team", "", "Team", "extras.team"), + DataMapping("contact", "", "Contact", "extras.contact"), + DataMapping("provider", "", "Provider", "circuits.provider"), + DataMapping("provider_network", "", "ProviderNetwork", "circuits.provider_network"), + DataMapping("circuit_type", "", "CircuitType", "circuits.circuit_type"), + DataMapping("circuit", "", "Circuit", "circuits.circuit"), + DataMapping( + "circuit_termination", + "", + "CircuitTermination", + "circuits.circuit_termination", + ), + DataMapping("namespace", "", "Namespace", "ipam.namespcae"), + DataMapping("rir", "", "RIR", "ipam.rir"), + DataMapping("vlan_group", "", "VLANGroup", "ipam.vlan_group"), + DataMapping("vlan", "", "VLAN", "ipam.vlan"), + DataMapping("vrf", "", "VRF", "ipam.vrf"), + DataMapping("prefix", "", "Prefix", "ipam.prefix"), + ) + + def load_source_adapter(self): + """Load data from Bootstrap into DiffSync models.""" + self.source_adapter = bootstrap.BootstrapAdapter(job=self, sync=self.sync) + self.source_adapter.load() + + def load_target_adapter(self): + """Load data from Nautobot into DiffSync models.""" + self.target_adapter = nautobot.NautobotAdapter(job=self, sync=self.sync) + self.target_adapter.load() + + def run(self, load_source, dryrun, memory_profiling, debug, *args, **kwargs): # pylint: disable=arguments-differ + """Perform data synchronization.""" + self.debug = debug + self.dryrun = dryrun + self.memory_profiling = memory_profiling + self.load_source = load_source + super().run(dryrun=self.dryrun, memory_profiling=self.memory_profiling, *args, **kwargs) + + +class BootstrapDataTarget(DataTarget): + """bootstrap SSoT Data Target.""" + + debug = BooleanVar(description="Enable for more verbose debug logging", default=False) + write_destination = ChoiceVar( + choices=( + ("file", "File"), + ("git", "Git"), + ("env_var", "Environment Variable"), + ), + description="Where to load the yaml files from", + label="Load Source", + default="env_var", + ) + + class Meta: # pylint: disable=too-few-public-methods + """Meta data for Bootstrap.""" + + name = "Nautobot to Bootstrap" + data_source = "Nautobot" + data_target = "Bootstrap" + description = "Sync information from Nautobot to bootstrap" + + @classmethod + def config_information(cls): + """Dictionary describing the configuration of this DataTarget.""" + return {} + + @classmethod + def data_mappings(cls): + """List describing the data mappings involved in this DataSource.""" + return () + + def load_source_adapter(self): + """Load data from Nautobot into DiffSync models.""" + self.source_adapter = nautobot.NautobotAdapter(job=self, sync=self.sync) + self.source_adapter.load() + + def load_target_adapter(self): + """Load data from Bootstrap into DiffSync models.""" + self.target_adapter = bootstrap.BootstrapAdapter(job=self, sync=self.sync) + self.target_adapter.load() + + def run(self, write_destination, dryrun, memory_profiling, debug, *args, **kwargs): # pylint: disable=arguments-differ + """Perform data synchronization.""" + self.debug = debug + self.dryrun = dryrun + self.memory_profiling = memory_profiling + self.write_destination = write_destination + super().run(dryrun=self.dryrun, memory_profiling=self.memory_profiling, *args, **kwargs) + + +jobs = [BootstrapDataSource, BootstrapDataTarget] diff --git a/nautobot_ssot/integrations/bootstrap/signals.py b/nautobot_ssot/integrations/bootstrap/signals.py new file mode 100644 index 000000000..07c6d144f --- /dev/null +++ b/nautobot_ssot/integrations/bootstrap/signals.py @@ -0,0 +1,115 @@ +"""Signals triggered when Nautobot starts to perform certain actions.""" + +import importlib.util + +from django.conf import settings +from nautobot.core.signals import nautobot_database_ready +from nautobot.extras.choices import CustomFieldTypeChoices + +from nautobot_ssot.utils import create_or_update_custom_field + +LIFECYCLE_MGMT = bool(importlib.util.find_spec("nautobot_device_lifecycle_mgmt")) + + +def register_signals(sender): + """Register signals for IPFabric integration.""" + nautobot_database_ready.connect(nautobot_database_ready_callback, sender=sender) + + +def nautobot_database_ready_callback(sender, *, apps, **kwargs): # pylint: disable=unused-argument + """Adds OS Version and Physical Address CustomField to Devices and System of Record and Last Sync'd to Device, and IPAddress. + + Callback function triggered by the nautobot_database_ready signal when the Nautobot database is fully ready. + """ + # pylint: disable=invalid-name, too-many-locals + ContentType = apps.get_model("contenttypes", "ContentType") + Manufacturer = apps.get_model("dcim", "Manufacturer") + Platform = apps.get_model("dcim", "Platform") + TenantGroup = apps.get_model("tenancy", "TenantGroup") + Tenant = apps.get_model("tenancy", "Tenant") + Team = apps.get_model("extras", "Team") + Contact = apps.get_model("extras", "Contact") + Location = apps.get_model("dcim", "Location") + LocationType = apps.get_model("dcim", "LocationType") + Namespace = apps.get_model("ipam", "Namespace") + RIR = apps.get_model("ipam", "RiR") + VLANGroup = apps.get_model("ipam", "VLANGroup") + VLAN = apps.get_model("ipam", "VLAN") + VRF = apps.get_model("ipam", "VRF") + Prefix = apps.get_model("ipam", "Prefix") + Provider = apps.get_model("circuits", "Provider") + ProviderNetwork = apps.get_model("circuits", "ProviderNetwork") + CircuitType = apps.get_model("circuits", "CircuitType") + Circuit = apps.get_model("circuits", "Circuit") + CircuitTermination = apps.get_model("circuits", "CircuitTermination") + Tag = apps.get_model("extras", "Tag") + Secret = apps.get_model("extras", "Secret") + SecretsGroup = apps.get_model("extras", "SecretsGroup") + DynamicGroup = apps.get_model("extras", "DynamicGroup") + GitRepository = apps.get_model("extras", "GitRepository") + Role = apps.get_model("extras", "Role") + + if LIFECYCLE_MGMT: + SoftwareLCM = apps.get_model("nautobot_device_lifecycle_mgmt", "SoftwareLCM") + SoftwareImageLCM = apps.get_model("nautobot_device_lifecycle_mgmt", "SoftwareImageLCM") + ValidatedSoftwareLCM = apps.get_model("nautobot_device_lifecycle_mgmt", "ValidatedSoftwareLCM") + + signal_to_model_mapping = { + "manufacturer": Manufacturer, + "platform": Platform, + "role": Role, + "tenant_group": TenantGroup, + "tenant": Tenant, + "team": Team, + "contact": Contact, + "location": Location, + "location_type": LocationType, + "namespace": Namespace, + "rir": RIR, + "vlan_group": VLANGroup, + "vlan": VLAN, + "vrf": VRF, + "prefix": Prefix, + "provider": Provider, + "provider_network": ProviderNetwork, + "circuit_type": CircuitType, + "circuit": Circuit, + "circuit_termination": CircuitTermination, + "tag": Tag, + "secret": Secret, + "secrets_group": SecretsGroup, + "dynamic_group": DynamicGroup, + "git_repository": GitRepository, + } + + if LIFECYCLE_MGMT: + signal_to_model_mapping.update( + { + "software": SoftwareLCM, + "software_image": SoftwareImageLCM, + "validated_software": ValidatedSoftwareLCM, + } + ) + + sync_custom_field, _ = create_or_update_custom_field( + key="last_synced_from_sor", + field_type=CustomFieldTypeChoices.TYPE_DATE, + label="Last sync from System of Record", + ) + sor_custom_field, _ = create_or_update_custom_field( + key="system_of_record", + field_type=CustomFieldTypeChoices.TYPE_TEXT, + label="System of Record", + ) + + models_to_sync = settings.PLUGINS_CONFIG.get("nautobot_ssot", {}).get("bootstrap_models_to_sync", {}) + no_cf = ["computed_field", "graph_ql_query"] + try: + for model in models_to_sync: + if model not in no_cf and models_to_sync[model] is True: + model_ct = ContentType.objects.get_for_model(signal_to_model_mapping[model]) + sor_custom_field.content_types.add(model_ct.id) + sync_custom_field.content_types.add(model_ct.id) + except Exception as e: + print(f"Error occurred: {e}") + raise diff --git a/nautobot_ssot/integrations/bootstrap/utils/__init__.py b/nautobot_ssot/integrations/bootstrap/utils/__init__.py new file mode 100644 index 000000000..c0e85e958 --- /dev/null +++ b/nautobot_ssot/integrations/bootstrap/utils/__init__.py @@ -0,0 +1,144 @@ +"""Utility functions for working with bootstrap and Nautobot.""" + +import inspect +import os + +from django.contrib.contenttypes.models import ContentType +from django.core.exceptions import ValidationError +from django.core.validators import URLValidator +from django.db import models +from nautobot.extras.datasources.registry import get_datasource_content_choices +from nautobot.extras.models import Contact, Team +from nautobot.extras.utils import FeatureQuery, RoleModelsQuery, TaggableClassesQuery + +from nautobot_ssot.integrations.bootstrap.constants import content_model_path_mapping + + +def is_running_tests(): + """Check whether running unittests or actual job.""" + for frame in inspect.stack(): + if frame.filename.endswith("unittest/case.py"): + return True + return False + + +def check_sor_field(model): + """Check if the System of Record field is present and is set to "Bootstrap".""" + return ( + "system_of_record" in model.custom_field_data + and model.custom_field_data["system_of_record"] is not None + and os.getenv("SYSTEM_OF_RECORD", "Bootstrap") in model.custom_field_data["system_of_record"] + ) + + +def get_sor_field_nautobot_object(nb_object): + """Get the System of Record field from an object.""" + _sor = "" + if "system_of_record" in nb_object.custom_field_data: + _sor = ( + nb_object.custom_field_data["system_of_record"] + if nb_object.custom_field_data["system_of_record"] is not None + else "" + ) + return _sor + + +def lookup_content_type(content_model_path, content_type): + """Lookup content type for a GitRepository object.""" + if content_type in content_model_path_mapping: + return content_model_path_mapping[content_type] + _choices = get_datasource_content_choices(content_model_path) + _found_type = None + for _element in _choices: + if _element[1] == content_type: + _found_type = _element[0] + return _found_type + return None + + +def lookup_content_type_id(nb_model, model_path): + """Find ContentType choices for a model path and return the ContentType ID.""" + _choices = FeatureQuery(nb_model).get_choices() + _found_type = None + for _element in _choices: + if _element[0] == model_path: + _found_type = _element[1] + return _found_type + return None + + +def lookup_content_type_model_path(nb_model, content_id): + """Find ContentType choices for a model path and return the ContentType ID.""" + _choices = FeatureQuery(nb_model).get_choices() + _found_type = None + for _element in _choices: + if _element[1] == content_id: + _found_type = _element[0] + return _found_type + return None + + +def lookup_tag_content_type_model_path(content_id): + """Find model paths for a given ContentType ID for Tag Objects.""" + _content_type = ContentType.objects.get(id=content_id) + return f"{_content_type.model}.{_content_type.name.replace(' ', '')}" + + +def lookup_model_for_taggable_class_id(content_id): + """Find a model path for a given ContentType ID.""" + _choices = TaggableClassesQuery().get_choices() + _found_type = None + for _element in _choices: + if _element[1] == content_id: + _found_type = _element[0] + return _found_type + return None + + +def lookup_content_type_for_taggable_model_path(content_model_path): + """Lookup content type for a GitRepository object.""" + _app_label = content_model_path.split(".", 1)[0] + _model = content_model_path.split(".", 1)[1] + + return ContentType.objects.get(model=_model, app_label=_app_label) + + +def string_to_urlfield(url): + """Turn string url into a URLField object.""" + url_validator = URLValidator() + + try: + url_validator(url) + except ValidationError: + return models.URLField(default="https://example.com", blank=True) + + return models.URLField(default=url, blank=True, null=True) + + +def lookup_model_for_role_id(content_id): + """Find a model path for a given ContentType ID.""" + _choices = RoleModelsQuery().get_choices() + _found_type = None + for _element in _choices: + if _element[1] == content_id: + _found_type = _element[0] + return _found_type + return None + + +def lookup_team_for_contact(team): + """Find a Nautobot Team object by name and return the object.""" + try: + _team = Team.objects.get(name=team) + return _team + except Team.DoesNotExist: + return None + + +def lookup_contact_for_team(contact): + """Find a Nautobot Contact object by name and return the object.""" + try: + _contact = Contact.objects.get(name=contact) + return _contact + except Contact.DoesNotExist: + return None diff --git a/nautobot_ssot/integrations/bootstrap/utils/bootstrap.py b/nautobot_ssot/integrations/bootstrap/utils/bootstrap.py new file mode 100644 index 000000000..b0c76a225 --- /dev/null +++ b/nautobot_ssot/integrations/bootstrap/utils/bootstrap.py @@ -0,0 +1 @@ +"""Utility functions for working with bootstrap.""" diff --git a/nautobot_ssot/integrations/bootstrap/utils/nautobot.py b/nautobot_ssot/integrations/bootstrap/utils/nautobot.py new file mode 100644 index 000000000..2dfe44757 --- /dev/null +++ b/nautobot_ssot/integrations/bootstrap/utils/nautobot.py @@ -0,0 +1,29 @@ +"""Utility functions for working with Nautobot.""" + +from nautobot.ipam.models import PrefixLocationAssignment, VRFPrefixAssignment + + +def get_vrf_prefix_assignments(prefix): + """Retreive all VRF assignments for a Prefix and return a list of VRF Names.""" + _assignments = [] + _vrf_assignments = VRFPrefixAssignment.objects.filter(prefix_id=prefix.id) + + if _vrf_assignments: + for _vrf in _vrf_assignments: + _assignments.append(f"{_vrf.vrf.name}__{prefix.namespace.name}") + return _assignments + + return None + + +def get_prefix_location_assignments(prefix): + """Retrieve all Location assignments for a Prefix and return a list of Location Names.""" + _locations = [] + _location_assignments = PrefixLocationAssignment.objects.filter(prefix_id=prefix.id) + + if _location_assignments: + for _location in _location_assignments: + _locations.append(_location.location.name) + return _locations + + return None diff --git a/nautobot_ssot/tests/bootstrap/__init__.py b/nautobot_ssot/tests/bootstrap/__init__.py new file mode 100644 index 000000000..49cf95dd0 --- /dev/null +++ b/nautobot_ssot/tests/bootstrap/__init__.py @@ -0,0 +1 @@ +"""Unit tests for nautobot_ssot_bootstrap plugin.""" diff --git a/nautobot_ssot/tests/bootstrap/fixtures/develop.json b/nautobot_ssot/tests/bootstrap/fixtures/develop.json new file mode 100644 index 000000000..2196ce74b --- /dev/null +++ b/nautobot_ssot/tests/bootstrap/fixtures/develop.json @@ -0,0 +1,3 @@ +{ + "git_branch": "develop" +} \ No newline at end of file diff --git a/nautobot_ssot/tests/bootstrap/fixtures/global_settings.json b/nautobot_ssot/tests/bootstrap/fixtures/global_settings.json new file mode 100644 index 000000000..68eb82261 --- /dev/null +++ b/nautobot_ssot/tests/bootstrap/fixtures/global_settings.json @@ -0,0 +1,757 @@ +{ + "tenant_group": [ + { + "name": "Group1", + "parent": "", + "description": "" + }, + { + "name": "Group2", + "parent": "", + "description": "" + }, + { + "name": "Group3", + "parent": "Group1", + "description": "" + } + ], + "tenant": [ + { + "name": "Backbone", + "tenant_group": "Group1", + "description": "", + "tags": [] + }, + { + "name": "Datacenter", + "tenant_group": "Group2", + "description": "", + "tags": ["Test"] + } + ], + "role": [ + { + "name": "spine_switches", + "description": "", + "color": "795548", + "content_types": [ + "dcim.device" + ] + }, + { + "name": "leaf_switches", + "description": "", + "color": "785530", + "content_types": [ + "dcim.device" + ] + }, + { + "name": "Switch", + "description": "", + "color": "9e9e9e", + "content_types": [ + "dcim.device" + ] + }, + { + "name": "Firewall", + "description": "", + "color": "9e9e9e", + "content_types": [ + "dcim.device" + ] + }, + { + "name": "Data Network", + "description": "", + "color": "9e9e9e", + "content_types": [ + "ipam.prefix", + "ipam.vlan" + ] + }, + { + "name": "Administrative", + "description": "Unit plays an administrative role", + "color": "2196f3", + "content_types": [ + "extras.contactassociation" + ] + }, + { + "name": "Anycast", + "description": "", + "color": "ffc107", + "content_types": [ + "ipam.ipaddress" + ] + }, + { + "name": "Billing", + "description": "Unit plays a billing role", + "color": "4caf50", + "content_types": [ + "extras.contactassociation" + ] + }, + { + "name": "CARP", + "description": "", + "color": "4caf50", + "content_types": [ + "ipam.ipaddress" + ] + }, + { + "name": "GLBP", + "description": "", + "color": "4caf50", + "content_types": [ + "ipam.ipaddress" + ] + }, + { + "name": "HSRP", + "description": "", + "color": "4caf50", + "content_types": [ + "ipam.ipaddress" + ] + }, + { + "name": "Loopback", + "description": "", + "color": "9e9e9e", + "content_types": [ + "ipam.ipaddress" + ] + }, + { + "name": "On Site", + "description": "Unit plays an on site role", + "color": "111111", + "content_types": [ + "extras.contactassociation" + ] + }, + { + "name": "Secondary", + "description": "", + "color": "2196f3", + "content_types": [ + "ipam.ipaddress" + ] + }, + { + "name": "Support", + "description": "Unit plays a support role", + "color": "ffeb3b", + "content_types": [ + "extras.contactassociation" + ] + }, + { + "name": "VIP", + "description": "", + "color": "4caf50", + "content_types": [ + "ipam.ipaddress" + ] + }, + { + "name": "VRRP", + "description": "", + "color": "4caf50", + "content_types": [ + "ipam.ipaddress" + ] + } + ], + "manufacturer": [ + { + "name": "Generic", + "description": "For generic devices like patch panels" + }, + { + "name": "Palo Alto Networks", + "description": "" + }, + { + "name": "Arista", + "description": "" + }, + { + "name": "Cisco", + "description": "" + } + ], + "platform": [ + { + "name": "paloalto_panos", + "manufacturer": "Palo Alto Networks", + "network_driver": "paloalto_panos", + "napalm_driver": "", + "napalm_arguments": {}, + "description": "PanOS Firewalls" + }, + { + "name": "cisco_ios", + "manufacturer": "Cisco", + "network_driver": "cisco_ios", + "napalm_driver": "", + "napalm_arguments": {}, + "description": "Cisco Devices" + }, + { + "name": "arista_eos", + "manufacturer": "Arista", + "network_driver": "arista_eos", + "napalm_driver": "", + "napalm_arguments": {}, + "description": "Arista Devices" + } + ], + "location_type": [ + { + "name": "Region", + "nestable": true, + "description": "", + "content_types": [] + }, + { + "name": "Site", + "parent": "Region", + "nestable": false, + "description": "", + "content_types": ["dcim.device", "ipam.namespace", "ipam.prefix", "ipam.vlan", "ipam.vlangroup", "circuits.circuittermination"] + }, + { + "name": "Building", + "parent": "Site", + "nestable": false, + "description": "", + "content_types": ["dcim.device", "ipam.namespace", "ipam.prefix", "ipam.vlan", "ipam.vlangroup", "circuits.circuittermination"] + } + ], + "location": [ + { + "name": "Southeast", + "location_type": "Region", + "status": "Active", + "facility": "", + "time_zone": "US/Eastern", + "description": "", + "physical_address": "", + "shipping_address": "", + "contact_name": "", + "contact_phone": "", + "contact_email": "", + "tags": [] + }, + { + "name": "Atlanta", + "location_type": "Site", + "parent": "Southeast", + "status": "Active", + "facility": "AT1", + "asn": 65001, + "time_zone": "US/Eastern", + "description": "", + "physical_address": "180 Peachtree St NE\nFL 2 , FL 3 , FL 6\nAtlanta, GA 30303\nUnited States\n", + "shipping_address": "Example Company\n180 Peachtree St NE\nLoading Dock 1\nAtlanta, GA 30303\nUnited States\n", + "contact_name": "", + "contact_phone": "", + "contact_email": "", + "tags": [] + }, + { + "name": "Atlanta4", + "location_type": "Site", + "parent": "Southeast", + "status": "Active", + "facility": "AT4", + "asn": 65004, + "time_zone": "US/Eastern", + "description": "", + "physical_address": "450 Interstate to N PKWY\nAtlanta, GA 30339\nUnited States\n", + "shipping_address": "Example Company\n450 Interstate to N PKWY\nLoading Dock 1\nAtlanta, GA 30339\nUnited States\n", + "contact_name": "", + "contact_phone": "", + "contact_email": "", + "tags": [] + } + ], + "team": [ + { + "name": "Datacenter", + "phone": "123-456-7890", + "email": "datacenter@example.com", + "address": "2715 N Vermont Canyon Rd, Los Angeles, CA 90027" + }, + { + "name": "Backbone", + "phone": "123-456-7890", + "email": "backbone@example.com", + "address": "1600 S Azusa Ave, Rowland Heights, CA 91748" + } + ], + "contact": [ + { + "name": "Jennifer Parker", + "phone": "888-555-4823", + "email": "jenny@future.com", + "address": "12417 Philadelphia St, Whittier, CA 90601", + "teams": ["Backbone", "Datacenter"] + }, + { + "name": "Marty McFly", + "phone": "888-555-1955", + "email": "marty@future.com", + "address": "9303 Roslyndale Ave, Arleta, CA 91331", + "teams": ["Backbone"] + } + ], + "provider": [ + { + "name": "Provider1", + "asn": 65000, + "account_number": "12345678", + "portal_url": "https://provider1.com", + "noc_contact": "", + "admin_contact": "", + "tags": [] + }, + { + "name": "Provider2", + "asn": 65001, + "account_number": "87654321", + "portal_url": "https://provider2.com", + "noc_contact": "", + "admin_contact": "", + "tags": [] + } + ], + "provider_network": [ + { + "name": "Provider1 Metro-E", + "provider": "Provider1", + "description": "", + "comments": "", + "tags": [] + }, + { + "name": "Provider2 Metro-E", + "provider": "Provider2", + "description": "", + "comments": "", + "tags": [] + } + ], + "circuit_type": [ + { + "name": "Metro-E", + "description": "Metro ethernet" + }, + { + "name": "DWDM", + "description": "" + }, + { + "name": "Internet", + "description": "" + } + ], + "circuit": [ + { + "circuit_id": "METRO-65002-CUST1", + "provider": "Provider1", + "circuit_type": "Metro-E", + "status": "Active", + "commit_rate_kbps": 1000000, + "description": "", + "terminations": ["METRO-65002-CUST1__Provider1__A__A__METRO-65002-CUST1", "METRO-65002-CUST1__Provider1__Z__Z__METRO-65002-CUST1"], + "tags": [] + }, + { + "circuit_id": "INTERNET-65002-CUST1", + "provider": "Provider2", + "circuit_type": "Internet", + "status": "Active", + "commit_rate_kbps": 1000000, + "description": "", + "terminations": ["INTERNET-65002-CUST1__Provider2__A__A__INTERNET-65002-CUST1"], + "tags": [] + } + ], + "circuit_termination": [ + { + "name": "METRO-65002-CUST1__Provider1__A", + "circuit_id": "METRO-65002-CUST1", + "termination_type": "Location", + "location": "Atlanta", + "termination_side": "A", + "port_speed_kbps": 1000000, + "cross_connect_id": "", + "patch_panel_or_ports": "", + "description": "", + "tags": [] + }, + { + "name": "METRO-65002-CUST1__Provider1__Z", + "circuit_id": "METRO-65002-CUST1", + "termination_type": "Provider Network", + "provider_network": "Provider2 Metro-E", + "termination_side": "Z", + "port_speed_kbps": 1000000, + "cross_connect_id": "", + "patch_panel_or_ports": "", + "description": "", + "tags": [] + }, + { + "name": "INTERNET-65002-CUST1__Provider2__A", + "circuit_id": "INTERNET-65002-CUST1", + "termination_type": "Location", + "location": "Atlanta4", + "termination_side": "A", + "port_speed_kbps": 1000000, + "cross_connect_id": "", + "patch_panel_or_ports": "", + "description": "", + "tags": [] + } + ], + "secret": [ + { + "name": "Github_Service_Acct", + "provider": "environment-variable", + "parameters": { + "variable": "GITHUB_SERVICE_ACCT", + "path": null + } + }, + { + "name": "Github_Service_Token", + "provider": "environment-variable", + "parameters": { + "variable": "GITHUB_SERVICE_TOKEN", + "path": null + } + } + ], + "secrets_group": [ + { + "name": "Github_Service_Account", + "secrets": [ + { + "name": "Github_Service_Acct", + "secret_type": "username", + "access_type": "HTTP(S)" + }, + { + "name": "Github_Service_Token", + "secret_type": "token", + "access_type": "HTTP(S)" + } + ] + } + ], + "git_repository": [ + { + "name": "Backbone Config Contexts", + "url": "https://github.com/nautobot/backbone-config-contexts.git", + "branch": "main", + "secrets_group": "Github_Service_Account", + "provided_contents": ["config contexts"] + }, + { + "name": "Datacenter Config Contexts", + "url": "https://github.com/nautobot/datacenter-config-contexts.git", + "branch": "develop", + "secrets_group": "Github_Service_Account", + "provided_contents": ["config contexts"] + }, + { + "name": "Metro Config Contexts", + "url": "https://github.com/nautobot/metro-config-contexts.git", + "branch": "develop", + "provided_contents": ["config contexts"] + }, + { + "name": "Access Config Contexts", + "url": "https://github.com/nautobot/access-config-contexts.git", + "branch": "develop", + "provided_contents": ["config contexts"] + } + ], + "dynamic_group": [ + { + "name": "Backbone Domain", + "content_type": "dcim.device", + "dynamic_filter": { + "tenant": ["Backbone"] + }, + "description": "" + }, + { + "name": "Datacenter", + "content_type": "dcim.device", + "dynamic_filter": { + "location": ["Atlanta"], + "platform": ["arista_eos", "paloalto_panos"] + }, + "description": "" + } + ], + "computed_field": [ + { + "label": "Compliance Change", + "content_type": "dcim.device", + "template": "{{ obj | get_change_log }}" + } + ], + "tag": [ + { + "name": "Backbone", + "color": "795547", + "content_types": ["dcim.device"], + "description": "" + }, + { + "name": "Access", + "color": "795548", + "content_types": ["dcim.device", "ipam.ipaddress"], + "description": "" + }, + { + "name": "Test", + "color": "795548", + "content_types": [ + "circuits.circuit", + "circuits.circuittermination", + "circuits.provider", + "circuits.providernetwork", + "dcim.cable", + "dcim.consoleport", + "dcim.consoleserverport", + "dcim.device", + "dcim.devicebay", + "dcim.deviceredundancygroup", + "dcim.devicetype", + "dcim.frontport", + "dcim.interface", + "dcim.inventoryitem", + "dcim.location", + "dcim.powerfeed", + "dcim.poweroutlet", + "dcim.powerpanel", + "dcim.powerport", + "dcim.rack", + "dcim.rackreservation", + "dcim.rearport", + "extras.gitrepository", + "extras.job", + "extras.secret", + "ipam.ipaddress", + "ipam.namespace", + "ipam.prefix", + "ipam.routetarget", + "ipam.service", + "ipam.vlan", + "ipam.vrf", + "tenancy.tenant", + "virtualization.cluster", + "virtualization.virtualmachine", + "virtualization.vminterface" + ], + "description": "Test" + } + ], + "graph_ql_query": [ + { + "name": "Backbone Devices", + "query": "query ($device_id: ID!) {\n device(id: $device_id) {\n config_context\n hostname: name\n device_role {\n name\n }\n tenant {\n name\n }\n primary_ip4 {\n address\n }\n }\n}\n" + }, + { + "name": "Datacenter Devices", + "query": "query ($device_id: ID!) {\n device(id: $device_id) {\n config_context\n hostname: name\n device_role {\n name\n }\n tenant {\n name\n }\n primary_ip4 {\n address\n }\n }\n}\n" + } + ], + "namespace": [ + { + "name": "Global", + "location": "", + "description": "" + }, + { + "name": "Customer1", + "description": "Customer1 IPAM Namespace", + "location": "Atlanta" + }, + { + "name": "Customer2", + "description": "Customer2 IPAM Namespace", + "location": "Atlanta4" + } + ], + "rir": [ + { + "name": "RFC1918", + "private": true, + "description": "Private IP Space" + }, + { + "name": "ARIN", + "private": false, + "description": "American Registry for Internet Numbers" + } + ], + "vlan_group": [ + { + "name": "Atlanta VLANs", + "location": "Atlanta", + "description": "" + }, + { + "name": "Atlanta4 VLANs", + "location": "Atlanta4", + "description": "" + } + ], + "vlan": [ + { + "name": "vlan10", + "vid": 10, + "description": "", + "status": "Active", + "role": "Data Network", + "locations": ["Atlanta"], + "vlan_group": "Atlanta VLANs", + "tags": [] + }, + { + "name": "vlan20", + "vid": 20, + "description": "", + "status": "Reserved", + "role": "Data Network", + "locations": ["Atlanta", "Atlanta4"], + "vlan_group": "Atlanta VLANs", + "tags": [] + }, + { + "name": "vlan30", + "vid": 30, + "description": "", + "status": "Reserved", + "role": "Data Network", + "locations": [], + "vlan_group": "Atlanta VLANs", + "tags": [] + }, + { + "name": "vlan30", + "vid": 30, + "description": "", + "status": "Active", + "locations": [], + "tags": [] + } + ], + "vrf": [ + { + "name": "blue", + "namespace": "Global", + "route_distinguisher": "65000:1", + "description": "", + "tenant": "", + "tags": [] + }, + { + "name": "red", + "namespace": "Global", + "route_distinguisher": "65000:2", + "description": "", + "tenant": "", + "tags": [] + }, + { + "name": "blue", + "namespace": "Customer1", + "route_distinguisher": "65000:1", + "description": "", + "tenant": "", + "tags": [] + } + ], + "prefix": [ + { + "network": "10.0.0.0/24", + "namespace": "Customer1", + "prefix_type": "network", + "status": "Active", + "role": "Data Network", + "rir": "RFC1918", + "date_allocated": "2024-06-01 00:00:00", + "description": "", + "tags": [] + }, + { + "network": "10.0.0.0/24", + "namespace": "Customer2", + "prefix_type": "network", + "status": "Active", + "role": "Data Network", + "rir": "RFC1918", + "date_allocated": "2024-06-01 12:00:00", + "description": "", + "tags": [] + }, + { + "network": "10.0.10.0/24", + "namespace": "Global", + "prefix_type": "network", + "status": "Active", + "role": "Data Network", + "rir": "RFC1918", + "description": "", + "locations": ["Atlanta", "Atlanta4"], + "vlan": "vlan10__10__Atlanta VLANs", + "tags": [] + }, + { + "network": "192.168.0.0/24", + "namespace": "Customer1", + "prefix_type": "network", + "status": "Active", + "role": "Data Network", + "rir": "RFC1918", + "description": "", + "vrfs": ["blue__Customer1"], + "locations": ["Atlanta"], + "tags": [] + }, + { + "network": "192.168.0.0/24", + "namespace": "Global", + "prefix_type": "network", + "status": "Active", + "role": "Data Network", + "rir": "RFC1918", + "description": "", + "vrfs": ["red__Global"], + "locations": ["Atlanta"], + "tags": [] + }, + { + "network": "192.168.1.0/24", + "namespace": "Global", + "prefix_type": "network", + "status": "Active", + "description": "", + "tags": [] + } + ] +} diff --git a/nautobot_ssot/tests/bootstrap/fixtures/production.json b/nautobot_ssot/tests/bootstrap/fixtures/production.json new file mode 100644 index 000000000..bb7d95b8a --- /dev/null +++ b/nautobot_ssot/tests/bootstrap/fixtures/production.json @@ -0,0 +1,3 @@ +{ + "git_branch": "production" +} \ No newline at end of file diff --git a/nautobot_ssot/tests/bootstrap/test_bootstrap_adapter.py b/nautobot_ssot/tests/bootstrap/test_bootstrap_adapter.py new file mode 100644 index 000000000..6ac8712bc --- /dev/null +++ b/nautobot_ssot/tests/bootstrap/test_bootstrap_adapter.py @@ -0,0 +1,158 @@ +"""Tests for Bootstrap adapter.""" + +import json +from datetime import datetime +from unittest.mock import MagicMock + +import yaml +from deepdiff import DeepDiff +from nautobot.core.testing import TransactionTestCase +from nautobot.extras.models import JobResult + +from nautobot_ssot.integrations.bootstrap.diffsync.adapters.bootstrap import ( + BootstrapAdapter, +) +from nautobot_ssot.integrations.bootstrap.jobs import BootstrapDataSource + +from .test_setup import ( + DEVELOP_YAML_SETTINGS, + GLOBAL_JSON_SETTINGS, + GLOBAL_YAML_SETTINGS, + MODELS_TO_SYNC, +) + + +def load_yaml(path): + """Load a yaml file.""" + with open(path, encoding="utf-8") as file: + return yaml.safe_load(file.read()) + + +def load_json(path): + """Load a json file.""" + with open(path, encoding="utf-8") as file: + return json.loads(file.read()) + + +def assert_deep_diff(test_case, actual, expected, keys_to_normalize=None): + # pylint: disable=duplicate-code + """Custom DeepDiff assertion handling.""" + keys_to_normalize = keys_to_normalize or {} + + def normalize(item): # pylint: disable=too-many-branches + if isinstance(item, list): + return [normalize(i) for i in item] + if isinstance(item, dict): + for key in list(item.keys()): + if key in ["system_of_record", "model_flags", "uuid"]: + item.pop(key, None) + elif key in keys_to_normalize and (item.get(key) is None or item.get(key) == ""): + item[key] = None + if ( + key + in [ + "weight", + "parent", + "date_installed", + "asn", + "latitude", + "longitude", + "tenant", + "terminations", + ] + and item.get(key) is None + ): + item.pop(key, None) + if key == "parameters": + if "path" not in item[key]: + item[key]["path"] = None + if key == "path" and item.get(key) is None: + item[key] = None + if key == "content_types" or key == "provided_contents" and isinstance(item[key], list): + item[key] = sorted(["config contexts" if v == "extras.configcontext" else v for v in item[key]]) + if key == "date_allocated": + if item.get(key) is not None: + # Normalize the format to 'YYYY-MM-DD HH:MM:SS' for consistency + if isinstance(item[key], datetime): + item[key] = item[key].isoformat(sep=" ") + elif isinstance(item[key], str) and len(item[key]) == 10: + # Convert 'YYYY-MM-DD' format to 'YYYY-MM-DD 00:00:00' + item[key] += " 00:00:00" + if key == "prefix": + # Sort prefixes based on network and namespace as unique identifiers + item[key] = sorted(item[key], key=lambda x: (x["network"], x["namespace"])) + return {k: normalize(v) for k, v in item.items()} # pylint: disable=duplicate-code + return item + + actual_normalized = normalize(actual) + expected_normalized = normalize(expected) + + diff = DeepDiff( + actual_normalized, + expected_normalized, + ignore_order=True, + ignore_string_type_changes=True, + exclude_regex_paths=r"root\[\d+\]\['terminations'\]", + ) + + print("Actual Normalization", actual_normalized) + print("Expected Normalization", expected_normalized) + + if diff: + print("Differences found:") + print(diff) + + test_case.assertEqual(diff, {}) + + +class TestBootstrapAdapterTestCase(TransactionTestCase): + """Test NautobotSsotBootstrapAdapter class.""" + + databases = ("default", "job_logs") + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.max_diff = None + + def setUp(self): + """Initialize test case.""" + self.job = BootstrapDataSource() + self.job.job_result = JobResult.objects.create( + name=self.job.class_path, task_name="fake task", worker="default" + ) + + self.bootstrap_client = MagicMock() + self.bootstrap_client.get_global_settings.return_value = GLOBAL_YAML_SETTINGS + self.bootstrap_client.get_develop_settings.return_value = DEVELOP_YAML_SETTINGS + self.bootstrap_client.get_production_settings.return_value = GLOBAL_YAML_SETTINGS + + self.bootstrap = BootstrapAdapter(job=self.job, sync=None, client=self.bootstrap_client) + + def test_develop_settings(self): + self.assertEqual(self.bootstrap_client.get_develop_settings(), DEVELOP_YAML_SETTINGS) + + def test_production_settings(self): + self.assertEqual(self.bootstrap_client.get_production_settings(), GLOBAL_YAML_SETTINGS) + + def test_data_loading(self): + """Test Nautobot Ssot Bootstrap load() function.""" + self.bootstrap.load() + # self.maxDiff = None + # pylint: disable=duplicate-code + for key in MODELS_TO_SYNC: + print(f"Checking: {key}") + assert_deep_diff( + self, + list(self.bootstrap.dict().get(key, {}).values()), + GLOBAL_JSON_SETTINGS.get(key, []), + keys_to_normalize={ + "parent", + "nestable", + "tenant", + "tenant_group", + "terminations", + "provider_network", + "upstream_speed_kbps", + "location", + }, + ) diff --git a/nautobot_ssot/tests/bootstrap/test_nautobot_adapter.py b/nautobot_ssot/tests/bootstrap/test_nautobot_adapter.py new file mode 100644 index 000000000..04cba638a --- /dev/null +++ b/nautobot_ssot/tests/bootstrap/test_nautobot_adapter.py @@ -0,0 +1,128 @@ +"""Testing that objects are properly loaded from Nautobot into Nautobot adapter.""" + +# test_nautobot_adapter.py + +from datetime import datetime + +from deepdiff import DeepDiff +from django.test import TransactionTestCase + +from .test_setup import ( + GLOBAL_JSON_SETTINGS, + MODELS_TO_SYNC, + NautobotTestSetup, +) + + +def assert_nautobot_deep_diff(test_case, actual, expected, keys_to_normalize=None): + # pylint: disable=duplicate-code + """Custom DeepDiff assertion handling.""" + keys_to_normalize = keys_to_normalize or {} + + def normalize(item, key=None): + if isinstance(item, list): + if key == "vrf": + return sorted( + [normalize(i, key) for i in item], + key=lambda x: (x.get("name", ""), x.get("namespace", "")), + ) + return [normalize(i, key) for i in item] + + if isinstance(item, dict): + for item_key in list(item.keys()): + if item_key in ["system_of_record", "model_flags", "uuid"]: + item.pop(item_key, None) + elif item_key in ["secrets_group"] and "secrets_group" not in item: + item[item_key] = None + elif item_key in keys_to_normalize and (item.get(item_key) is None or item.get(item_key) == ""): + item[item_key] = None + + if ( + item_key + in [ + "weight", + "parent", + "date_installed", + "asn", + "latitude", + "longitude", + "tenant", + "terminations", + ] + and item.get(item_key) is None + ): + item.pop(item_key, None) + + if item_key == "content_types" or item_key == "provided_contents" and isinstance(item[item_key], list): + item[item_key] = sorted(item[item_key]) + + if item_key == "date_allocated" and not item.get(item_key): + item.pop(item_key, None) + + if item_key == "parameters" and "path" not in item: + item["path"] = None + + if isinstance(item.get(item_key), datetime): + item[item_key] = item[item_key].isoformat(sep=" ") + + return {k: normalize(v, k) for k, v in item.items()} + return item + + actual_normalized = normalize(actual) + expected_normalized = normalize(expected) + + diff = DeepDiff( + actual_normalized, + expected_normalized, + ignore_order=True, + ignore_string_type_changes=True, + exclude_regex_paths=r"root\[\d+\]\['terminations'\]", + ) + + print("Actual Normalization", actual_normalized) + print("Expected Normalization", expected_normalized) + + if diff: + print("Differences found:") + print(diff) + + test_case.assertEqual(diff, {}) + + +class TestNautobotAdapterTestCase(TransactionTestCase): + """Test NautobotAdapter class.""" + + databases = ("default", "job_logs") + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + # self.maxDiff = None + + def setUp(self): + """Initialize test case.""" + super().setUp() + self.setup = NautobotTestSetup() + self.nb_adapter = self.setup.nb_adapter + + def test_data_loading(self): + """Test SSoT Bootstrap Nautobot load() function.""" + self.nb_adapter.load() + # self.maxDiff = None + # pylint: disable=duplicate-code + for key in MODELS_TO_SYNC: + print(f"Checking: {key}") + assert_nautobot_deep_diff( + self, + list(self.nb_adapter.dict().get(key, {}).values()), + GLOBAL_JSON_SETTINGS.get(key, []), + keys_to_normalize={ + "parent", + "nestable", + "tenant", + "tenant_group", + "terminations", + "provider_network", + "upstream_speed_kbps", + "location", + }, + ) diff --git a/nautobot_ssot/tests/bootstrap/test_setup.py b/nautobot_ssot/tests/bootstrap/test_setup.py new file mode 100644 index 000000000..7f8d198af --- /dev/null +++ b/nautobot_ssot/tests/bootstrap/test_setup.py @@ -0,0 +1,959 @@ +# pylint: disable=too-many-lines +"""Setup/Create Nautobot objects for use in unit testing.""" + +import json +import os +from unittest.mock import MagicMock + +import pytz +import yaml +from django.contrib.contenttypes.models import ContentType +from django.utils.text import slugify +from nautobot.circuits.models import ( + Circuit, + CircuitTermination, + CircuitType, + Provider, + ProviderNetwork, +) +from nautobot.dcim.models import ( + Device, + DeviceType, + InventoryItem, + Location, + LocationType, + Manufacturer, + Platform, +) +from nautobot.extras.models import ( + ComputedField, + Contact, + DynamicGroup, + GitRepository, + GraphQLQuery, + JobResult, + Role, + Secret, + SecretsGroup, + SecretsGroupAssociation, + Status, + Tag, + Team, +) +from nautobot.ipam.models import RIR, VLAN, VRF, Namespace, Prefix, VLANGroup +from nautobot.tenancy.models import Tenant, TenantGroup +from nautobot_device_lifecycle_mgmt.models import ( + SoftwareImageLCM, + SoftwareLCM, + ValidatedSoftwareLCM, +) + +from nautobot_ssot.integrations.bootstrap.diffsync.adapters.bootstrap import ( + BootstrapAdapter, +) +from nautobot_ssot.integrations.bootstrap.diffsync.adapters.nautobot import ( + NautobotAdapter, +) +from nautobot_ssot.integrations.bootstrap.jobs import BootstrapDataSource + + +def load_yaml(path): + """Load a yaml file.""" + with open(path, encoding="utf-8") as file: + return yaml.safe_load(file.read()) + + +def load_json(path): + """Load a json file.""" + with open(path, encoding="utf-8") as file: + return json.loads(file.read()) + + +FIXTURES_DIR = os.path.join("./nautobot_ssot/integrations/bootstrap/fixtures") +GLOBAL_YAML_SETTINGS = load_yaml(os.path.join(FIXTURES_DIR, "global_settings.yml")) +DEVELOP_YAML_SETTINGS = load_yaml(os.path.join(FIXTURES_DIR, "develop.yml")) + +TESTS_FIXTURES_DIR = os.path.join("./nautobot_ssot/tests/bootstrap/fixtures") +GLOBAL_JSON_SETTINGS = load_json(os.path.join(TESTS_FIXTURES_DIR, "global_settings.json")) + +MODELS_TO_SYNC = [ + "tenant_group", + "tenant", + "role", + "manufacturer", + "platform", + "location_type", + "location", + "team", + "contact", + "provider", + "provider_network", + "circuit_type", + "circuit", + "circuit_termination", + "secret", + "secrets_group", + "git_repository", + "dynamic_group", + "computed_field", + "tag", + "graph_ql_query", + "software", + "software_image", + "validated_software", + "namespace", + "rir", + "vlan_group", + "vlan", + "vrf", + "prefix", +] + + +def is_valid_timezone(tz): + """Return whether timezone passed is a valid timezone in pytz.""" + try: + pytz.timezone(tz) + return True + except pytz.UnknownTimeZoneError: + return False + + +class PrefixInfo: + """Definition for a prefix object""" + + def __init__(self, namespace, prefix_type, role, rir, vlan, tenant): # pylint: disable=too-many-arguments + self.namespace = namespace + self.prefix_type = prefix_type + self.role = role + self.rir = rir + self.vlan = vlan + self.tenant = tenant + + +class NautobotTestSetup: + """Setup basic database information to be used in other tests.""" + + def __init__(self): + self.job = BootstrapDataSource() + self.job.job_result = JobResult.objects.create( + name=self.job.class_path, task_name="fake task", worker="default" + ) + self.nb_adapter = NautobotAdapter(job=self.job, sync=None) + self.nb_adapter.job = MagicMock() + self.nb_adapter.job.logger.info = MagicMock() + self.bs_adapter = BootstrapAdapter(job=self.job, sync=None) + self.bs_adapter.job = MagicMock() + self.bs_adapter.job.logger.info = MagicMock() + self.status_active = None + self._initialize_data() + + def _initialize_data(self): + self._setup_tags() + self._setup_status() + self._setup_locations() + self._setup_tenant_groups() + self._setup_tenants() + self._setup_roles() + self._setup_teams() + self._setup_contacts() + self._setup_providers() + self._setup_provider_networks() + self._setup_circuit_types() + self._setup_circuits() + self._setup_circuit_terminations() + self._setup_manufacturers() + self._setup_platforms() + self._setup_device_types_and_devices() + self._setup_inventory_items() + self._setup_secrets_and_groups() + self._setup_computed_fields() + self._setup_graphql_queries() + self._setup_git_repositories() + self._setup_dynamic_groups() + self._setup_namespaces() + self._setup_rirs() + self._setup_vlan_groups() + self._setup_vlans() + self._setup_vrfs() + self._setup_prefixes() + self._setup_software_and_images() + self._setup_validated_software() + + def _setup_tags(self): + for _tag in GLOBAL_YAML_SETTINGS["tag"]: + _content_types = [] + for _con_type in _tag["content_types"]: + _content_types.append( + ContentType.objects.get(app_label=_con_type.split(".")[0], model=_con_type.split(".")[1]) + ) + _new_tag = Tag.objects.create( + name=_tag["name"], + description=_tag["description"], + color=_tag["color"] if not None else "9e9e9e", + ) + _new_tag.custom_field_data["system_of_record"] = "Bootstrap" + _new_tag.validated_save() + _new_tag.content_types.set(_content_types) + _new_tag.validated_save() + _new_tag.refresh_from_db() + + def _setup_status(self): + _statuses = ["Reserved"] + self.status_active, _ = Status.objects.get_or_create(name="Active") + self.status_active.save() + _content_types = [ + "circuits.circuit", + "dcim.location", + "dcim.device", + "ipam.prefix", + "ipam.namespace", + "ipam.vrf", + "ipam.vlan", + "ipam.ipaddress", + ] + for _content_type in _content_types: + _con_type = ContentType.objects.get( + app_label=_content_type.split(".", maxsplit=1)[0], + model=_content_type.split(".")[1], + ) + self.status_active.content_types.add(_con_type) + self.status_active.refresh_from_db() + for _status in _statuses: + status, _ = Status.objects.get_or_create(name=_status) + for _content_type in _content_types: + _con_type = ContentType.objects.get( + app_label=_content_type.split(".", maxsplit=1)[0], + model=_content_type.split(".")[1], + ) + status.content_types.add(_con_type) + status.validated_save() + + def _setup_locations(self): + """Set up location types and locations.""" + + # First, ensure location types are created + location_types_data = GLOBAL_YAML_SETTINGS.get("location_type", []) + for loc_type_data in location_types_data: + location_type = self._get_or_create_location_type(loc_type_data) + self._set_location_type_content_types(location_type, loc_type_data["content_types"]) + + locations_data = GLOBAL_YAML_SETTINGS.get("location", []) + for location_data in locations_data: + location_type = LocationType.objects.get(name=location_data["location_type"]) + parent_location = None + tenant = None + tags = [] + + status = Status.objects.get(name=location_data["status"]) + + if location_data["parent"]: + parent_location = Location.objects.get(name=location_data["parent"]) + + if location_data["tenant"]: + tenant = Tenant.objects.get(name=location_data["tenant"]) + + if location_data["tags"]: + tags = [Tag.objects.get(name=tag) for tag in location_data["tags"]] + + location, created = Location.objects.get_or_create( + name=location_data["name"], + location_type=location_type, + defaults={ + "parent": parent_location, + "status": status, + "facility": location_data.get("facility", ""), + "asn": location_data.get("asn"), + "time_zone": location_data.get("time_zone", ""), + "description": location_data.get("description", ""), + "tenant": tenant, + "physical_address": location_data.get("physical_address", ""), + "shipping_address": location_data.get("shipping_address", ""), + "latitude": location_data.get("latitude"), + "longitude": location_data.get("longitude"), + "contact_name": location_data.get("contact_name", ""), + "contact_phone": location_data.get("contact_phone", ""), + "contact_email": location_data.get("contact_email", ""), + "tags": tags, + }, + ) + if created: + location.validated_save() + + def _get_or_create_location_type(self, location_type_data): + """Get or create a LocationType based on the provided data.""" + parent = self._get_location_type_parent(location_type_data["parent"]) + try: + return LocationType.objects.get(name=location_type_data["name"], parent=parent) + except LocationType.DoesNotExist: + return LocationType.objects.create( + name=location_type_data["name"], + parent=parent, + nestable=location_type_data.get("nestable"), + description=location_type_data["description"], + ) + + def _get_location_type_parent(self, parent_name): + """Retrieve the parent LocationType if it exists.""" + if parent_name: + try: + return LocationType.objects.get(name=parent_name) + except LocationType.DoesNotExist: + self.job.logger.warning(f"Parent LocationType '{parent_name}' does not exist.") + return None + return None + + def _set_location_type_content_types(self, location_type, content_types): + """Set the content types for a LocationType.""" + content_type_objects = [ + ContentType.objects.get(app_label=ct.split(".")[0], model=ct.split(".")[1]) for ct in content_types + ] + location_type.content_types.set(content_type_objects) + location_type.custom_field_data["system_of_record"] = "Bootstrap" + location_type.save() + + def _setup_tenant_groups(self): + _tenant_groups = GLOBAL_YAML_SETTINGS.get("tenant_group", []) + for _tenant_group in _tenant_groups: + if _tenant_group["parent"]: + _parent = TenantGroup.objects.get(name=_tenant_group["parent"]) + _tenant_group = TenantGroup.objects.create(name=_tenant_group["name"], parent=_parent) + else: + _tenant_group = TenantGroup.objects.create(name=_tenant_group["name"]) + _tenant_group.custom_field_data["system_of_record"] = "Bootstrap" + _tenant_group.validated_save() + _tenant_group.refresh_from_db() + + def _setup_tenants(self): + _tenants = GLOBAL_YAML_SETTINGS.get("tenant", []) + for _ten in _tenants: + _tenant_group = None + if _ten["tenant_group"]: + _tenant_group = TenantGroup.objects.get(name=_ten["tenant_group"]) + _tenant = Tenant.objects.create( + name=_ten["name"], + description=_ten["description"], + tenant_group=_tenant_group, + ) + _tenant.custom_field_data["system_of_record"] = "Bootstrap" + if _ten["tags"]: + for _tag in _ten["tags"]: + _tenant.tags.add(Tag.objects.get(name=_tag)) + _tenant.validated_save() + _tenant.refresh_from_db() + + def _setup_roles(self): + _con_types = [] + _roles = GLOBAL_YAML_SETTINGS["role"] + # _roles.remove(["Administrative", "Anycast", "Billing", "CARP", "GLBP", "HSRP", "Loopback", "On Site", ]) + for _role in _roles: + for _type in _role["content_types"]: + _con_types.append(ContentType.objects.get(app_label=_type.split(".")[0], model=_type.split(".")[1])) + _r, created = Role.objects.get_or_create( + name=_role["name"], + color=_role["color"], + description=_role["description"], + ) + if created: + _r.content_types.set(_con_types) + _r.custom_field_data["system_of_record"] = "Bootstrap" + _r.validated_save() + _con_types.clear() + + def _setup_teams(self): + for _team in GLOBAL_YAML_SETTINGS["team"]: + team = Team.objects.create( + name=_team["name"], + phone=_team["phone"], + email=_team["email"], + address=_team["address"], + ) + team.custom_field_data["system_of_record"] = "Bootstrap" + team.validated_save() + + def _setup_contacts(self): + for _contact in GLOBAL_YAML_SETTINGS["contact"]: + contact = Contact.objects.create( + name=_contact["name"], + phone=_contact["phone"], + email=_contact["email"], + address=_contact["address"], + ) + contact.validated_save() + for _team in _contact["teams"]: + contact.teams.add(Team.objects.get(name=_team)) + contact.custom_field_data["system_of_record"] = "Bootstrap" + contact.validated_save() + + def _setup_providers(self): + for _provider in GLOBAL_YAML_SETTINGS["provider"]: + provider = Provider.objects.create( + name=_provider["name"], + asn=_provider["asn"], + account=_provider["account_number"], + portal_url=_provider["portal_url"], + noc_contact=_provider["noc_contact"], + admin_contact=_provider["admin_contact"], + ) + provider.validated_save() + for _tag in _provider["tags"]: + _t = Tag.objects.get(name=_tag) + provider.tags.append(_t) + provider.custom_field_data["system_of_record"] = "Bootstrap" + provider.validated_save() + + def _setup_provider_networks(self): + for _provider_network in GLOBAL_YAML_SETTINGS["provider_network"]: + _provider = Provider.objects.get(name=_provider_network["provider"]) + provider_network = ProviderNetwork.objects.create( + name=_provider_network["name"], + provider=_provider, + description=_provider_network["description"], + comments=_provider_network["comments"], + ) + provider_network.validated_save() + for _tag in _provider_network["tags"]: + _t = Tag.objects.get(name=_tag) + provider_network.tags.append(_t) + provider_network.custom_field_data["system_of_record"] = "Bootstrap" + provider_network.validated_save() + + def _setup_circuit_types(self): + for _circuit_type in GLOBAL_YAML_SETTINGS["circuit_type"]: + circuit_type = CircuitType( + name=_circuit_type["name"], + description=_circuit_type["description"], + ) + circuit_type.custom_field_data["system_of_record"] = "Bootstrap" + circuit_type.validated_save() + + def _setup_circuits(self): + for _circuit in GLOBAL_YAML_SETTINGS["circuit"]: + _provider = Provider.objects.get(name=_circuit["provider"]) + _circuit_type = CircuitType.objects.get(name=_circuit["circuit_type"]) + _status = Status.objects.get(name=_circuit["status"]) + _tenant = None + if _circuit["tenant"]: + if _circuit["tenant"] is not None: + _tenant = Tenant.objects.get(name=_circuit["tenant"]) + circuit = Circuit( + cid=_circuit["circuit_id"], + provider=_provider, + circuit_type=_circuit_type, + status=_status, + commit_rate=_circuit["commit_rate_kbps"], + description=_circuit["description"], + tenant=_tenant, + ) + circuit.validated_save() + for _tag in _circuit["tags"]: + _t = Tag.objects.get(name=_tag) + circuit.tags.append(_t) + circuit.custom_field_data["system_of_record"] = "Bootstrap" + circuit.validated_save() + + def _setup_circuit_terminations(self): + for _circuit_termination in GLOBAL_YAML_SETTINGS["circuit_termination"]: + _name_parts = _circuit_termination["name"].split("__", 2) + _circuit_id = _name_parts[0] + _provider_name = _name_parts[1] + _term_side = _name_parts[2] + _provider = Provider.objects.get(name=_provider_name) + _circuit = Circuit.objects.get(cid=_circuit_id, provider=_provider) + + if _circuit_termination["termination_type"] == "Provider Network": + _provider_network = ProviderNetwork.objects.get(name=_circuit_termination["provider_network"]) + circuit_termination = CircuitTermination.objects.create( + provider_network=_provider_network, + circuit=_circuit, + term_side=_term_side, + xconnect_id=_circuit_termination["cross_connect_id"], + pp_info=_circuit_termination["patch_panel_or_ports"], + description=_circuit_termination["description"], + upstream_speed=_circuit_termination["upstream_speed_kbps"], + port_speed=_circuit_termination["port_speed_kbps"], + ) + if _circuit_termination["termination_type"] == "Location": + _location = Location.objects.get(name=_circuit_termination["location"]) + circuit_termination = CircuitTermination.objects.create( + location=_location, + circuit=_circuit, + term_side=_term_side, + xconnect_id=_circuit_termination["cross_connect_id"], + pp_info=_circuit_termination["patch_panel_or_ports"], + description=_circuit_termination["description"], + upstream_speed=_circuit_termination["upstream_speed_kbps"], + port_speed=_circuit_termination["port_speed_kbps"], + ) + circuit_termination.custom_field_data["system_of_record"] = "Bootstrap" + circuit_termination.validated_save() + if _circuit_termination["tags"]: + for _tag in _circuit_termination["tags"]: + circuit_termination.tags.add(Tag.objects.get(name=_tag)) + + def _setup_namespaces(self): + for _namespace in GLOBAL_YAML_SETTINGS["namespace"]: + _location = None + if _namespace["location"]: + _location = Location.objects.get(name=_namespace["location"]) + namespace, _ = Namespace.objects.get_or_create( + name=_namespace["name"], + location=_location, + ) + namespace.description = _namespace["description"] + namespace.custom_field_data["system_of_record"] = "Bootstrap" + namespace.validated_save() + + def _setup_rirs(self): + for _rir in GLOBAL_YAML_SETTINGS["rir"]: + rir, _ = RIR.objects.get_or_create( + name=_rir["name"], + ) + rir.is_private = _rir["private"] + rir.description = _rir["description"] + rir.custom_field_data["system_of_record"] = "Bootstrap" + rir.validated_save() + + def _setup_vlan_groups(self): + for _vlan_group in GLOBAL_YAML_SETTINGS["vlan_group"]: + _location = None + if _vlan_group["location"]: + _location = Location.objects.get(name=_vlan_group["location"]) + vlan_group, _ = VLANGroup.objects.get_or_create(name=_vlan_group["name"], location=_location) + vlan_group.description = _vlan_group["description"] + vlan_group.custom_field_data["system_of_record"] = "Bootstrap" + vlan_group.validated_save() + + def _setup_vlans(self): + for _vlan in GLOBAL_YAML_SETTINGS["vlan"]: + _role = None + _locations = [] + _vlan_group = None + _tenant = None + _tags = [] + _status = self.status_active + if _vlan["status"] and _vlan["status"] != "Active": + _status = Status.objects.get(name=_vlan["status"]) + if _vlan["role"]: + _role = Role.objects.get(name=_vlan["role"]) + if _vlan["locations"]: + for _l in _vlan["locations"]: + _locations.append(Location.objects.get(name=_l)) + if _vlan["vlan_group"]: + _vlan_group = VLANGroup.objects.get(name=_vlan["vlan_group"]) + if _vlan["tenant"]: + _tenant = Tenant.objects.get(name=_vlan["tenant"]) + if _vlan["tags"]: + for _t in _vlan["tags"]: + _tags.append(Tag.objects.get(name=_t)) + vlan, _ = VLAN.objects.get_or_create( + vid=_vlan["vid"], + name=_vlan["name"], + vlan_group=_vlan_group, + status=_status, + ) + vlan.role = _role + vlan.locations.set(_locations) + vlan.tenant = _tenant + vlan.description = _vlan["description"] + vlan.custom_field_data["system_of_record"] = "Bootstrap" + vlan.validated_save() + vlan.tags.set(_tags) + + def _setup_vrfs(self): + for _vrf in GLOBAL_YAML_SETTINGS["vrf"]: + _namespace = None + _tenant = None + _tags = [] + if _vrf["namespace"]: + _namespace = Namespace.objects.get(name=_vrf["namespace"]) + if _vrf["tenant"]: + _tenant = Tenant.objects.get(name=_vrf["tenant"]) + if _vrf["tags"]: + for _t in _vrf["tags"]: + _tags.append(Tag.objects.get(name=_t)) + vrf, _ = VRF.objects.get_or_create( + name=_vrf["name"], + namespace=_namespace, + ) + vrf.rd = _vrf["route_distinguisher"] + vrf.description = _vrf["description"] + vrf.tenant = _tenant + vrf.custom_field_data["system_of_record"] = "Bootstrap" + vrf.tags.set(_tags) + vrf.validated_save() + + def _setup_prefixes(self): + for prefix_data in GLOBAL_YAML_SETTINGS["prefix"]: + namespace = self._get_namespace(prefix_data) + prefix_type = self._get_prefix_type(prefix_data) + role = self._get_role(prefix_data) + rir = self._get_rir(prefix_data) + vrfs = self._get_vrfs(prefix_data) + locations = self._get_locations(prefix_data) + vlan = self._get_vlan(prefix_data) + tenant = self._get_tenant(prefix_data) + tags = self._get_prefix_tags(prefix_data) + + prefix_info = PrefixInfo(namespace, prefix_type, role, rir, vlan, tenant) + prefix = self._get_or_create_prefix(prefix_data, prefix_info) + self._update_prefix(prefix, locations, vrfs, tags) + + def _get_namespace(self, prefix_data): + if prefix_data["namespace"] and prefix_data["namespace"] != "Global": + return Namespace.objects.get(name=prefix_data["namespace"]) + return Namespace.objects.get(name="Global") + + def _get_prefix_type(self, prefix_data): + if prefix_data["prefix_type"] and prefix_data["prefix_type"] != "network": + return prefix_data["prefix_type"] + return "network" + + def _get_role(self, prefix_data): + if prefix_data["role"]: + return Role.objects.get(name=prefix_data["role"]) + return None + + def _get_rir(self, prefix_data): + if prefix_data["rir"]: + return RIR.objects.get(name=prefix_data["rir"]) + return None + + def _get_vrfs(self, prefix_data): + vrfs = [] + if prefix_data["vrfs"]: + for vrf in prefix_data["vrfs"]: + namespace = Namespace.objects.get(name=vrf.split("__")[1]) + vrfs.append(VRF.objects.get(name=vrf.split("__")[0], namespace=namespace)) + return vrfs + + def _get_locations(self, prefix_data): + locations = [] + if prefix_data["locations"]: + for loc in prefix_data["locations"]: + locations.append(Location.objects.get(name=loc)) + return locations + + def _get_vlan(self, prefix_data): + if prefix_data["vlan"]: + name, vid, group = prefix_data["vlan"].split("__", 2) + vlan_group = VLANGroup.objects.get(name=group) if group else None + return VLAN.objects.get(name=name, vid=vid, vlan_group=vlan_group) + return None + + def _get_tenant(self, prefix_data): + if prefix_data["tenant"]: + return Tenant.objects.get(name=prefix_data["tenant"]) + return None + + def _get_prefix_tags(self, prefix_data): + tags = [] + if prefix_data["tags"]: + for tag in prefix_data["tags"]: + tags.append(Tag.objects.get(name=tag)) + return tags + + def _get_or_create_prefix(self, prefix_data, prefix_info): + try: + return Prefix.objects.get( + network=prefix_data["network"].split("/")[0], + prefix_length=prefix_data["network"].split("/")[1], + namespace=prefix_info.namespace, + type=prefix_info.prefix_type, + ) + except Prefix.DoesNotExist: + return Prefix.objects.create( + network=prefix_data["network"].split("/")[0], + prefix_length=prefix_data["network"].split("/")[1], + namespace=prefix_info.namespace, + type=prefix_info.prefix_type, + status=Status.objects.get(name=prefix_data["status"]), + role=prefix_info.role, + rir=prefix_info.rir, + date_allocated=prefix_data["date_allocated"], + description=prefix_data["description"], + vlan=prefix_info.vlan, + tenant=prefix_info.tenant, + ) + + def _update_prefix(self, prefix, locations, vrfs, tags): + prefix.custom_field_data["system_of_record"] = "Bootstrap" + prefix.validated_save() + for loc in locations: + prefix.locations.add(loc) + for vrf in vrfs: + prefix.vrfs.add(vrf) + for tag in tags: + prefix.tags.add(tag) + prefix.validated_save() + + def _setup_manufacturers(self): + for _manufacturer in GLOBAL_YAML_SETTINGS["manufacturer"]: + _manufac = Manufacturer.objects.create(name=_manufacturer["name"], description=_manufacturer["description"]) + _manufac.custom_field_data["system_of_record"] = "Bootstrap" + _manufac.validated_save() + + def _setup_platforms(self): + for _platform in GLOBAL_YAML_SETTINGS["platform"]: + _manufac = Manufacturer.objects.get(name=_platform["manufacturer"]) + _platf = Platform.objects.create( + name=_platform["name"], + manufacturer=_manufac, + description=_platform["description"], + network_driver=_platform["network_driver"], + napalm_args=_platform["napalm_arguments"], + napalm_driver=_platform["napalm_driver"], + ) + _platf.custom_field_data["system_of_record"] = "Bootstrap" + _platf.validated_save() + + def _setup_device_types_and_devices(self): + _device_types = [ + {"model": "WS3850-24P", "manufacturer": "Cisco"}, + {"model": "PA-820", "manufacturer": "Palo Alto Networks"}, + ] + _devices = [ + { + "name": "Switch1", + "manufacturer": "Cisco", + "platform": "cisco_ios", + "location": "Atlanta", + "device_type": "WS3850-24P", + "role": "Switch", + }, + { + "name": "Firewall1", + "manufacturer": "Palo Alto Networks", + "platform": "paloalto_panos", + "location": "Atlanta", + "device_type": "PA-820", + "role": "Firewall", + }, + ] + + for _dev_type in _device_types: + _manufacturer = Manufacturer.objects.get(name=_dev_type["manufacturer"]) + _dev_type = DeviceType.objects.create(model=_dev_type["model"], manufacturer=_manufacturer) + _dev_type.custom_field_data["system_of_record"] = "Bootstrap" + _dev_type.validated_save() + + for _dev in _devices: + _manufacturer = Manufacturer.objects.get(name=_dev["manufacturer"]) + _platform = Platform.objects.get(name=_dev["platform"]) + _dev_type = DeviceType.objects.get(model=_dev["device_type"]) + _role = Role.objects.get(name=_dev["role"]) + _site = Location.objects.get(name=_dev["location"]) + _device = Device.objects.create( + name=_dev["name"], + platform=_platform, + device_type=_dev_type, + status=self.status_active, + role=_role, + location=_site, + ) + _device.custom_field_data["system_of_record"] = "Bootstrap" + _device.save() + _device.refresh_from_db() + + def _setup_inventory_items(self): + _inventory_items = [{"name": "sfp-module", "device": "Switch1", "manufacturer": "Cisco"}] + for _inv_item in _inventory_items: + _dev = Device.objects.get(name=_inv_item["device"]) + _manufacturer = Manufacturer.objects.get(name=_inv_item["manufacturer"]) + _inventory_item = InventoryItem.objects.create( + name=_inv_item["name"], device=_dev, manufacturer=_manufacturer + ) + _inventory_item.custom_field_data["system_of_record"] = "Bootstrap" + _inventory_item.save() + _inventory_item.refresh_from_db() + + def _setup_secrets_and_groups(self): + for _sec in GLOBAL_YAML_SETTINGS["secret"]: + _secret = Secret.objects.create( + name=_sec["name"], + provider=_sec["provider"], + parameters=_sec["parameters"], + ) + _secret.custom_field_data["system_of_record"] = "Bootstrap" + _secret.save() + _secret.refresh_from_db() + + for _sec_group in GLOBAL_YAML_SETTINGS["secrets_group"]: + _secrets_group = SecretsGroup.objects.create(name=_sec_group["name"]) + _secrets_group.custom_field_data["system_of_record"] = "Bootstrap" + _secrets_group.validated_save() + _secrets_group.refresh_from_db() + for _sec in _sec_group["secrets"]: + _sga = SecretsGroupAssociation.objects.create( + secrets_group=_secrets_group, + secret=Secret.objects.get(name=_sec["name"]), + access_type=_sec["access_type"], + secret_type=_sec["secret_type"], + ) + _sga.validated_save() + _sga.refresh_from_db() + + def _setup_computed_fields(self): + for _comp_field in GLOBAL_YAML_SETTINGS["computed_field"]: + _content_type = ContentType.objects.get( + app_label=_comp_field["content_type"].split(".")[0], + model=_comp_field["content_type"].split(".")[1], + ) + _computed_field = ComputedField.objects.create( + label=_comp_field["label"], + content_type=_content_type, + template=_comp_field["template"], + ) + _computed_field.save() + _computed_field.refresh_from_db() + + def _setup_graphql_queries(self): + for _gql_query in GLOBAL_YAML_SETTINGS["graph_ql_query"]: + _qglq = GraphQLQuery.objects.create(name=_gql_query["name"], query=_gql_query["query"]) + _qglq.save() + _qglq.refresh_from_db() + + def _setup_git_repositories(self): + for _repo in GLOBAL_YAML_SETTINGS["git_repository"]: + if _repo.get("branch"): + _git_branch = _repo["branch"] + else: + _git_branch = DEVELOP_YAML_SETTINGS["git_branch"] + _secrets_group = None + if _repo.get("secrets_group_name"): + _secrets_group = SecretsGroup.objects.get(name=_repo["secrets_group_name"]) + _git_repo = GitRepository.objects.create( + name=_repo["name"], + slug=slugify(_repo["name"]), + remote_url=_repo["url"], + branch=_git_branch, + secrets_group=_secrets_group, + provided_contents=_repo["provided_data_type"], + ) + _git_repo.custom_field_data["system_of_record"] = "Bootstrap" + _git_repo.validated_save() + + def _setup_dynamic_groups(self): + for _group in GLOBAL_YAML_SETTINGS["dynamic_group"]: + _content_type = ContentType.objects.get( + app_label=_group["content_type"].split(".")[0], + model=_group["content_type"].split(".")[1], + ) + _dynamic_group = DynamicGroup.objects.create( + name=_group["name"], + content_type=_content_type, + filter=json.loads(_group["filter"]), + description=_group["description"], + ) + _dynamic_group.custom_field_data["system_of_record"] = "Bootstrap" + _dynamic_group.validated_save() + _dynamic_group.refresh_from_db() + + def _setup_software_and_images(self): + for _software in GLOBAL_YAML_SETTINGS["software"]: + _tags = [] + for _tag in _software["tags"]: + _tags.append(Tag.objects.get(name=_tag)) + _platform = Platform.objects.get(name=_software["device_platform"]) + _soft = SoftwareLCM.objects.create( + version=_software["version"], + alias=_software["alias"], + device_platform=_platform, + end_of_support=_software["eos_date"], + long_term_support=_software["lts"], + pre_release=_software["pre_release"], + documentation_url=_software["documentation_url"], + tags=_tags, + ) + _soft.custom_field_data["system_of_record"] = "Bootstrap" + _soft.validated_save() + _soft.refresh_from_db() + + for _software_image in GLOBAL_YAML_SETTINGS["software_image"]: + _tags = [] + for _tag in _software_image["tags"]: + _tags.append(Tag.objects.get(name=_tag)) + _platform = Platform.objects.get(name=_software_image["platform"]) + _software = SoftwareLCM.objects.get(version=_software_image["software_version"], device_platform=_platform) + _soft_image = SoftwareImageLCM.objects.create( + software=_software, + image_file_name=_software_image["file_name"], + image_file_checksum=_software_image["image_file_checksum"], + hashing_algorithm=_software_image["hashing_algorithm"], + download_url=_software_image["download_url"], + default_image=_software_image["default_image"], + tags=_tags, + ) + _soft_image.custom_field_data["system_of_record"] = "Bootstrap" + _soft_image.validated_save() + _soft_image.refresh_from_db() + + def _setup_validated_software(self): + for validated_software_data in GLOBAL_YAML_SETTINGS["validated_software"]: + tags = self._get_validated_software_tags(validated_software_data["tags"]) + devices = self._get_devices(validated_software_data["devices"]) + device_types = self._get_device_types(validated_software_data["device_types"]) + device_roles = self._get_device_roles(validated_software_data["device_roles"]) + inventory_items = self._get_inventory_items(validated_software_data["inventory_items"]) + object_tags = self._get_object_tags(validated_software_data["object_tags"]) + + software = self._get_software(validated_software_data["software"]) + + validated_software = ValidatedSoftwareLCM.objects.create( + software=software, + start=validated_software_data["valid_since"], + end=validated_software_data["valid_until"], + preferred=validated_software_data["preferred_version"], + tags=tags, + ) + validated_software.custom_field_data["system_of_record"] = "Bootstrap" + validated_software.validated_save() + + self._set_validated_software_relations( + validated_software, + devices, + device_types, + device_roles, + inventory_items, + object_tags, + ) + + def _get_validated_software_tags(self, tag_names): + return [Tag.objects.get(name=tag_name) for tag_name in tag_names] + + def _get_devices(self, device_names): + return [Device.objects.get(name=device_name) for device_name in device_names] + + def _get_device_types(self, device_type_names): + return [DeviceType.objects.get(model=device_type_name) for device_type_name in device_type_names] + + def _get_device_roles(self, device_role_names): + return [Role.objects.get(name=device_role_name) for device_role_name in device_role_names] + + def _get_inventory_items(self, inventory_item_names): + return [InventoryItem.objects.get(name=inventory_item_name) for inventory_item_name in inventory_item_names] + + def _get_object_tags(self, object_tag_names): + return [Tag.objects.get(name=object_tag_name) for object_tag_name in object_tag_names] + + def _get_software(self, software_name): + _, software_version = software_name.split(" - ") + platform = Platform.objects.get(name=_) + software = SoftwareLCM.objects.get(version=software_version, device_platform=platform) + return software + + def _set_validated_software_relations( + self, + validated_software, + devices, + device_types, + device_roles, + inventory_items, + object_tags, + ): # pylint: disable=too-many-arguments + validated_software.devices.set(devices) + validated_software.device_types.set(device_types) + + _ = device_roles + _ = inventory_items + _ = object_tags diff --git a/nautobot_ssot/tests/test_basic.py b/nautobot_ssot/tests/test_basic.py index 83142face..d72f2d02e 100644 --- a/nautobot_ssot/tests/test_basic.py +++ b/nautobot_ssot/tests/test_basic.py @@ -11,21 +11,11 @@ class TestDocsPackaging(unittest.TestCase): def test_version(self): """Verify that pyproject.toml dev dependencies have the same versions as in the docs requirements.txt.""" - parent_path = os.path.dirname( - os.path.dirname(os.path.dirname(os.path.realpath(__file__))) - ) + parent_path = os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(__file__)))) poetry_path = os.path.join(parent_path, "pyproject.toml") - poetry_details = toml.load(poetry_path)["tool"]["poetry"]["group"]["dev"][ - "dependencies" - ] - with open( - f"{parent_path}/docs/requirements.txt", "r", encoding="utf-8" - ) as file: - requirements = [ - line - for line in file.read().splitlines() - if (len(line) > 0 and not line.startswith("#")) - ] + poetry_details = toml.load(poetry_path)["tool"]["poetry"]["group"]["dev"]["dependencies"] + with open(f"{parent_path}/docs/requirements.txt", "r", encoding="utf-8") as file: + requirements = [line for line in file.read().splitlines() if (len(line) > 0 and not line.startswith("#"))] for pkg in requirements: package_name = pkg if len(pkg.split("==")) == 2: # noqa: PLR2004 diff --git a/nautobot_ssot/utils.py b/nautobot_ssot/utils.py index 84ea0554c..b0333b455 100644 --- a/nautobot_ssot/utils.py +++ b/nautobot_ssot/utils.py @@ -4,7 +4,7 @@ from nautobot.dcim.models import Controller, ControllerManagedDeviceGroup from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices -from nautobot.extras.models import SecretsGroup +from nautobot.extras.models import CustomField, SecretsGroup logger = logging.getLogger("nautobot.ssot") @@ -38,3 +38,13 @@ def verify_controller_managed_device_group(controller: Controller) -> Controller return ControllerManagedDeviceGroup.objects.get_or_create( controller=controller, defaults={"name": f"{controller.name} Managed Devices"} )[0] + + +def create_or_update_custom_field(key, field_type, label): + """Create or update a custom field object.""" + cf_dict = { + "type": field_type, + "key": key, + "label": label, + } + return CustomField.objects.update_or_create(key=cf_dict["key"], defaults=cf_dict) diff --git a/poetry.lock b/poetry.lock index b68480dca..352bac318 100644 --- a/poetry.lock +++ b/poetry.lock @@ -184,18 +184,18 @@ tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] [[package]] name = "autopep8" -version = "2.0.0" +version = "2.3.1" description = "A tool that automatically formats Python code to conform to the PEP 8 style guide" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "autopep8-2.0.0-py2.py3-none-any.whl", hash = "sha256:ad924b42c2e27a1ac58e432166cc4588f5b80747de02d0d35b1ecbd3e7d57207"}, - {file = "autopep8-2.0.0.tar.gz", hash = "sha256:8b1659c7f003e693199f52caffdc06585bb0716900bbc6a7442fd931d658c077"}, + {file = "autopep8-2.3.1-py2.py3-none-any.whl", hash = "sha256:a203fe0fcad7939987422140ab17a930f684763bf7335bdb6709991dd7ef6c2d"}, + {file = "autopep8-2.3.1.tar.gz", hash = "sha256:8d6c87eba648fdcfc83e29b788910b8643171c395d9c4bcf115ece035b9c9dda"}, ] [package.dependencies] -pycodestyle = ">=2.9.1" -tomli = "*" +pycodestyle = ">=2.12.0" +tomli = {version = "*", markers = "python_version < \"3.11\""} [[package]] name = "babel" @@ -336,89 +336,89 @@ zstd = ["zstandard (==0.22.0)"] [[package]] name = "certifi" -version = "2024.7.4" +version = "2024.8.30" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, - {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, ] [[package]] name = "cffi" -version = "1.17.0" +version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" files = [ - {file = "cffi-1.17.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f9338cc05451f1942d0d8203ec2c346c830f8e86469903d5126c1f0a13a2bcbb"}, - {file = "cffi-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0ce71725cacc9ebf839630772b07eeec220cbb5f03be1399e0457a1464f8e1a"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c815270206f983309915a6844fe994b2fa47e5d05c4c4cef267c3b30e34dbe42"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6bdcd415ba87846fd317bee0774e412e8792832e7805938987e4ede1d13046d"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a98748ed1a1df4ee1d6f927e151ed6c1a09d5ec21684de879c7ea6aa96f58f2"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a048d4f6630113e54bb4b77e315e1ba32a5a31512c31a273807d0027a7e69ab"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24aa705a5f5bd3a8bcfa4d123f03413de5d86e497435693b638cbffb7d5d8a1b"}, - {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:856bf0924d24e7f93b8aee12a3a1095c34085600aa805693fb7f5d1962393206"}, - {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:4304d4416ff032ed50ad6bb87416d802e67139e31c0bde4628f36a47a3164bfa"}, - {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:331ad15c39c9fe9186ceaf87203a9ecf5ae0ba2538c9e898e3a6967e8ad3db6f"}, - {file = "cffi-1.17.0-cp310-cp310-win32.whl", hash = "sha256:669b29a9eca6146465cc574659058ed949748f0809a2582d1f1a324eb91054dc"}, - {file = "cffi-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:48b389b1fd5144603d61d752afd7167dfd205973a43151ae5045b35793232aa2"}, - {file = "cffi-1.17.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c5d97162c196ce54af6700949ddf9409e9833ef1003b4741c2b39ef46f1d9720"}, - {file = "cffi-1.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ba5c243f4004c750836f81606a9fcb7841f8874ad8f3bf204ff5e56332b72b9"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb9333f58fc3a2296fb1d54576138d4cf5d496a2cc118422bd77835e6ae0b9cb"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:435a22d00ec7d7ea533db494da8581b05977f9c37338c80bc86314bec2619424"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1df34588123fcc88c872f5acb6f74ae59e9d182a2707097f9e28275ec26a12d"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df8bb0010fdd0a743b7542589223a2816bdde4d94bb5ad67884348fa2c1c67e8"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8b5b9712783415695663bd463990e2f00c6750562e6ad1d28e072a611c5f2a6"}, - {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ffef8fd58a36fb5f1196919638f73dd3ae0db1a878982b27a9a5a176ede4ba91"}, - {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e67d26532bfd8b7f7c05d5a766d6f437b362c1bf203a3a5ce3593a645e870b8"}, - {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:45f7cd36186db767d803b1473b3c659d57a23b5fa491ad83c6d40f2af58e4dbb"}, - {file = "cffi-1.17.0-cp311-cp311-win32.whl", hash = "sha256:a9015f5b8af1bb6837a3fcb0cdf3b874fe3385ff6274e8b7925d81ccaec3c5c9"}, - {file = "cffi-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:b50aaac7d05c2c26dfd50c3321199f019ba76bb650e346a6ef3616306eed67b0"}, - {file = "cffi-1.17.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aec510255ce690d240f7cb23d7114f6b351c733a74c279a84def763660a2c3bc"}, - {file = "cffi-1.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2770bb0d5e3cc0e31e7318db06efcbcdb7b31bcb1a70086d3177692a02256f59"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db9a30ec064129d605d0f1aedc93e00894b9334ec74ba9c6bdd08147434b33eb"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a47eef975d2b8b721775a0fa286f50eab535b9d56c70a6e62842134cf7841195"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f3e0992f23bbb0be00a921eae5363329253c3b86287db27092461c887b791e5e"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6107e445faf057c118d5050560695e46d272e5301feffda3c41849641222a828"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb862356ee9391dc5a0b3cbc00f416b48c1b9a52d252d898e5b7696a5f9fe150"}, - {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c1c13185b90bbd3f8b5963cd8ce7ad4ff441924c31e23c975cb150e27c2bf67a"}, - {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17c6d6d3260c7f2d94f657e6872591fe8733872a86ed1345bda872cfc8c74885"}, - {file = "cffi-1.17.0-cp312-cp312-win32.whl", hash = "sha256:c3b8bd3133cd50f6b637bb4322822c94c5ce4bf0d724ed5ae70afce62187c492"}, - {file = "cffi-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:dca802c8db0720ce1c49cce1149ff7b06e91ba15fa84b1d59144fef1a1bc7ac2"}, - {file = "cffi-1.17.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6ce01337d23884b21c03869d2f68c5523d43174d4fc405490eb0091057943118"}, - {file = "cffi-1.17.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cab2eba3830bf4f6d91e2d6718e0e1c14a2f5ad1af68a89d24ace0c6b17cced7"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:14b9cbc8f7ac98a739558eb86fabc283d4d564dafed50216e7f7ee62d0d25377"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b00e7bcd71caa0282cbe3c90966f738e2db91e64092a877c3ff7f19a1628fdcb"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:41f4915e09218744d8bae14759f983e466ab69b178de38066f7579892ff2a555"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4760a68cab57bfaa628938e9c2971137e05ce48e762a9cb53b76c9b569f1204"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:011aff3524d578a9412c8b3cfaa50f2c0bd78e03eb7af7aa5e0df59b158efb2f"}, - {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:a003ac9edc22d99ae1286b0875c460351f4e101f8c9d9d2576e78d7e048f64e0"}, - {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ef9528915df81b8f4c7612b19b8628214c65c9b7f74db2e34a646a0a2a0da2d4"}, - {file = "cffi-1.17.0-cp313-cp313-win32.whl", hash = "sha256:70d2aa9fb00cf52034feac4b913181a6e10356019b18ef89bc7c12a283bf5f5a"}, - {file = "cffi-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:b7b6ea9e36d32582cda3465f54c4b454f62f23cb083ebc7a94e2ca6ef011c3a7"}, - {file = "cffi-1.17.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:964823b2fc77b55355999ade496c54dde161c621cb1f6eac61dc30ed1b63cd4c"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:516a405f174fd3b88829eabfe4bb296ac602d6a0f68e0d64d5ac9456194a5b7e"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dec6b307ce928e8e112a6bb9921a1cb00a0e14979bf28b98e084a4b8a742bd9b"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4094c7b464cf0a858e75cd14b03509e84789abf7b79f8537e6a72152109c76e"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2404f3de742f47cb62d023f0ba7c5a916c9c653d5b368cc966382ae4e57da401"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3aa9d43b02a0c681f0bfbc12d476d47b2b2b6a3f9287f11ee42989a268a1833c"}, - {file = "cffi-1.17.0-cp38-cp38-win32.whl", hash = "sha256:0bb15e7acf8ab35ca8b24b90af52c8b391690ef5c4aec3d31f38f0d37d2cc499"}, - {file = "cffi-1.17.0-cp38-cp38-win_amd64.whl", hash = "sha256:93a7350f6706b31f457c1457d3a3259ff9071a66f312ae64dc024f049055f72c"}, - {file = "cffi-1.17.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1a2ddbac59dc3716bc79f27906c010406155031a1c801410f1bafff17ea304d2"}, - {file = "cffi-1.17.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6327b572f5770293fc062a7ec04160e89741e8552bf1c358d1a23eba68166759"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbc183e7bef690c9abe5ea67b7b60fdbca81aa8da43468287dae7b5c046107d4"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bdc0f1f610d067c70aa3737ed06e2726fd9d6f7bfee4a351f4c40b6831f4e82"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6d872186c1617d143969defeadac5a904e6e374183e07977eedef9c07c8953bf"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d46ee4764b88b91f16661a8befc6bfb24806d885e27436fdc292ed7e6f6d058"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f76a90c345796c01d85e6332e81cab6d70de83b829cf1d9762d0a3da59c7932"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0e60821d312f99d3e1569202518dddf10ae547e799d75aef3bca3a2d9e8ee693"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:eb09b82377233b902d4c3fbeeb7ad731cdab579c6c6fda1f763cd779139e47c3"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:24658baf6224d8f280e827f0a50c46ad819ec8ba380a42448e24459daf809cf4"}, - {file = "cffi-1.17.0-cp39-cp39-win32.whl", hash = "sha256:0fdacad9e0d9fc23e519efd5ea24a70348305e8d7d85ecbb1a5fa66dc834e7fb"}, - {file = "cffi-1.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:7cbc78dc018596315d4e7841c8c3a7ae31cc4d638c9b627f87d52e8abaaf2d29"}, - {file = "cffi-1.17.0.tar.gz", hash = "sha256:f3157624b7558b914cb039fd1af735e5e8049a87c817cc215109ad1c8779df76"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, ] [package.dependencies] @@ -796,13 +796,13 @@ toml = ["tomli"] [[package]] name = "cron-descriptor" -version = "1.4.3" +version = "1.4.5" description = "A Python library that converts cron expressions into human readable strings." optional = false python-versions = "*" files = [ - {file = "cron_descriptor-1.4.3-py3-none-any.whl", hash = "sha256:a67ba21804983b1427ed7f3e1ec27ee77bf24c652b0430239c268c5ddfbf9dc0"}, - {file = "cron_descriptor-1.4.3.tar.gz", hash = "sha256:7b1a00d7d25d6ae6896c0da4457e790b98cba778398a3d48e341e5e0d33f0488"}, + {file = "cron_descriptor-1.4.5-py3-none-any.whl", hash = "sha256:736b3ae9d1a99bc3dbfc5b55b5e6e7c12031e7ba5de716625772f8b02dcd6013"}, + {file = "cron_descriptor-1.4.5.tar.gz", hash = "sha256:f51ce4ffc1d1f2816939add8524f206c376a42c87a5fca3091ce26725b3b1bca"}, ] [package.extras] @@ -972,13 +972,13 @@ profile = ["gprof2dot (>=2022.7.29)"] [[package]] name = "django" -version = "4.2.15" +version = "4.2.16" description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design." optional = false python-versions = ">=3.8" files = [ - {file = "Django-4.2.15-py3-none-any.whl", hash = "sha256:61ee4a130efb8c451ef3467c67ca99fdce400fedd768634efc86a68c18d80d30"}, - {file = "Django-4.2.15.tar.gz", hash = "sha256:c77f926b81129493961e19c0e02188f8d07c112a1162df69bfab178ae447f94a"}, + {file = "Django-4.2.16-py3-none-any.whl", hash = "sha256:1ddc333a16fc139fd253035a1606bb24261951bbc3a6ca256717fa06cc41a898"}, + {file = "Django-4.2.16.tar.gz", hash = "sha256:6f1616c2786c408ce86ab7e10f792b8f15742f7b7b7460243929cb371e7f1dad"}, ] [package.dependencies] @@ -1488,13 +1488,13 @@ test = ["pytest (>=6)"] [[package]] name = "executing" -version = "2.0.1" +version = "2.1.0" description = "Get the currently executing AST node of a frame, and other information" optional = false -python-versions = ">=3.5" +python-versions = ">=3.8" files = [ - {file = "executing-2.0.1-py2.py3-none-any.whl", hash = "sha256:eac49ca94516ccc753f9fb5ce82603156e590b27525a8bc32cce8ae302eb61bc"}, - {file = "executing-2.0.1.tar.gz", hash = "sha256:35afe2ce3affba8ee97f2d69927fa823b08b472b7b994e36a52a964b93d16147"}, + {file = "executing-2.1.0-py2.py3-none-any.whl", hash = "sha256:8d63781349375b5ebccc3142f4b30350c0cd9c79f921cde38be2be4637e98eaf"}, + {file = "executing-2.1.0.tar.gz", hash = "sha256:8ea27ddd260da8150fa5a708269c4a10e76161e2496ec3e587da9e3c0fe4b9ab"}, ] [package.extras] @@ -1748,61 +1748,61 @@ colorama = ">=0.4" [[package]] name = "grpcio" -version = "1.65.5" +version = "1.66.1" description = "HTTP/2-based RPC framework" optional = true python-versions = ">=3.8" files = [ - {file = "grpcio-1.65.5-cp310-cp310-linux_armv7l.whl", hash = "sha256:b67d450f1e008fedcd81e097a3a400a711d8be1a8b20f852a7b8a73fead50fe3"}, - {file = "grpcio-1.65.5-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:a70a20eed87bba647a38bedd93b3ce7db64b3f0e8e0952315237f7f5ca97b02d"}, - {file = "grpcio-1.65.5-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:f79c87c114bf37adf408026b9e2e333fe9ff31dfc9648f6f80776c513145c813"}, - {file = "grpcio-1.65.5-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f17f9fa2d947dbfaca01b3ab2c62eefa8240131fdc67b924eb42ce6032e3e5c1"}, - {file = "grpcio-1.65.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32d60e18ff7c34fe3f6db3d35ad5c6dc99f5b43ff3982cb26fad4174462d10b1"}, - {file = "grpcio-1.65.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fe6505376f5b00bb008e4e1418152e3ad3d954b629da286c7913ff3cfc0ff740"}, - {file = "grpcio-1.65.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:33158e56c6378063923c417e9fbdb28660b6e0e2835af42e67f5a7793f587af7"}, - {file = "grpcio-1.65.5-cp310-cp310-win32.whl", hash = "sha256:1cbc208edb9acf1cc339396a1a36b83796939be52f34e591c90292045b579fbf"}, - {file = "grpcio-1.65.5-cp310-cp310-win_amd64.whl", hash = "sha256:bc74f3f745c37e2c5685c9d2a2d5a94de00f286963f5213f763ae137bf4f2358"}, - {file = "grpcio-1.65.5-cp311-cp311-linux_armv7l.whl", hash = "sha256:3207ae60d07e5282c134b6e02f9271a2cb523c6d7a346c6315211fe2bf8d61ed"}, - {file = "grpcio-1.65.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a2f80510f99f82d4eb825849c486df703f50652cea21c189eacc2b84f2bde764"}, - {file = "grpcio-1.65.5-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:a80e9a5e3f93c54f5eb82a3825ea1fc4965b2fa0026db2abfecb139a5c4ecdf1"}, - {file = "grpcio-1.65.5-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0b2944390a496567de9e70418f3742b477d85d8ca065afa90432edc91b4bb8ad"}, - {file = "grpcio-1.65.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3655139d7be213c32c79ef6fb2367cae28e56ef68e39b1961c43214b457f257"}, - {file = "grpcio-1.65.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:05f02d68fc720e085f061b704ee653b181e6d5abfe315daef085719728d3d1fd"}, - {file = "grpcio-1.65.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1c4caafe71aef4dabf53274bbf4affd6df651e9f80beedd6b8e08ff438ed3260"}, - {file = "grpcio-1.65.5-cp311-cp311-win32.whl", hash = "sha256:84c901cdec16a092099f251ef3360d15e29ef59772150fa261d94573612539b5"}, - {file = "grpcio-1.65.5-cp311-cp311-win_amd64.whl", hash = "sha256:11f8b16121768c1cb99d7dcb84e01510e60e6a206bf9123e134118802486f035"}, - {file = "grpcio-1.65.5-cp312-cp312-linux_armv7l.whl", hash = "sha256:ee6ed64a27588a2c94e8fa84fe8f3b5c89427d4d69c37690903d428ec61ca7e4"}, - {file = "grpcio-1.65.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:76991b7a6fb98630a3328839755181ce7c1aa2b1842aa085fd4198f0e5198960"}, - {file = "grpcio-1.65.5-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:89c00a18801b1ed9cc441e29b521c354725d4af38c127981f2c950c796a09b6e"}, - {file = "grpcio-1.65.5-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:078038e150a897e5e402ed3d57f1d31ebf604cbed80f595bd281b5da40762a92"}, - {file = "grpcio-1.65.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c97962720489ef31b5ad8a916e22bc31bba3664e063fb9f6702dce056d4aa61b"}, - {file = "grpcio-1.65.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:b8270b15b99781461b244f5c81d5c2bc9696ab9189fb5ff86c841417fb3b39fe"}, - {file = "grpcio-1.65.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8e5c4c15ac3fe1eb68e46bc51e66ad29be887479f231f8237cf8416058bf0cc1"}, - {file = "grpcio-1.65.5-cp312-cp312-win32.whl", hash = "sha256:f5b5970341359341d0e4c789da7568264b2a89cd976c05ea476036852b5950cd"}, - {file = "grpcio-1.65.5-cp312-cp312-win_amd64.whl", hash = "sha256:238a625f391a1b9f5f069bdc5930f4fd71b74426bea52196fc7b83f51fa97d34"}, - {file = "grpcio-1.65.5-cp38-cp38-linux_armv7l.whl", hash = "sha256:6c4e62bcf297a1568f627f39576dbfc27f1e5338a691c6dd5dd6b3979da51d1c"}, - {file = "grpcio-1.65.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d7df567b67d16d4177835a68d3f767bbcbad04da9dfb52cbd19171f430c898bd"}, - {file = "grpcio-1.65.5-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:b7ca419f1462390851eec395b2089aad1e49546b52d4e2c972ceb76da69b10f8"}, - {file = "grpcio-1.65.5-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fa36dd8496d3af0d40165252a669fa4f6fd2db4b4026b9a9411cbf060b9d6a15"}, - {file = "grpcio-1.65.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a101696f9ece90a0829988ff72f1b1ea2358f3df035bdf6d675dd8b60c2c0894"}, - {file = "grpcio-1.65.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2a6d8169812932feac514b420daffae8ab8e36f90f3122b94ae767e633296b17"}, - {file = "grpcio-1.65.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:47d0aaaab82823f0aa6adea5184350b46e2252e13a42a942db84da5b733f2e05"}, - {file = "grpcio-1.65.5-cp38-cp38-win32.whl", hash = "sha256:85ae8f8517d5bcc21fb07dbf791e94ed84cc28f84c903cdc2bd7eaeb437c8f45"}, - {file = "grpcio-1.65.5-cp38-cp38-win_amd64.whl", hash = "sha256:770bd4bd721961f6dd8049bc27338564ba8739913f77c0f381a9815e465ff965"}, - {file = "grpcio-1.65.5-cp39-cp39-linux_armv7l.whl", hash = "sha256:ab5ec837d8cee8dbce9ef6386125f119b231e4333cc6b6d57b6c5c7c82a72331"}, - {file = "grpcio-1.65.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cabd706183ee08d8026a015af5819a0b3a8959bdc9d1f6fdacd1810f09200f2a"}, - {file = "grpcio-1.65.5-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:ec71fc5b39821ad7d80db7473c8f8c2910f3382f0ddadfbcfc2c6c437107eb67"}, - {file = "grpcio-1.65.5-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3a9e35bcb045e39d7cac30464c285389b9a816ac2067e4884ad2c02e709ef8e"}, - {file = "grpcio-1.65.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d750e9330eb14236ca11b78d0c494eed13d6a95eb55472298f0e547c165ee324"}, - {file = "grpcio-1.65.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2b91ce647b6307f25650872454a4d02a2801f26a475f90d0b91ed8110baae589"}, - {file = "grpcio-1.65.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8da58ff80bc4556cf29bc03f5fff1f03b8387d6aaa7b852af9eb65b2cf833be4"}, - {file = "grpcio-1.65.5-cp39-cp39-win32.whl", hash = "sha256:7a412959aa5f08c5ac04aa7b7c3c041f5e4298cadd4fcc2acff195b56d185ebc"}, - {file = "grpcio-1.65.5-cp39-cp39-win_amd64.whl", hash = "sha256:55714ea852396ec9568f45f487639945ab674de83c12bea19d5ddbc3ae41ada3"}, - {file = "grpcio-1.65.5.tar.gz", hash = "sha256:ec6f219fb5d677a522b0deaf43cea6697b16f338cb68d009e30930c4aa0d2209"}, + {file = "grpcio-1.66.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:4877ba180591acdf127afe21ec1c7ff8a5ecf0fe2600f0d3c50e8c4a1cbc6492"}, + {file = "grpcio-1.66.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:3750c5a00bd644c75f4507f77a804d0189d97a107eb1481945a0cf3af3e7a5ac"}, + {file = "grpcio-1.66.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:a013c5fbb12bfb5f927444b477a26f1080755a931d5d362e6a9a720ca7dbae60"}, + {file = "grpcio-1.66.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b1b24c23d51a1e8790b25514157d43f0a4dce1ac12b3f0b8e9f66a5e2c4c132f"}, + {file = "grpcio-1.66.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7ffb8ea674d68de4cac6f57d2498fef477cef582f1fa849e9f844863af50083"}, + {file = "grpcio-1.66.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:307b1d538140f19ccbd3aed7a93d8f71103c5d525f3c96f8616111614b14bf2a"}, + {file = "grpcio-1.66.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1c17ebcec157cfb8dd445890a03e20caf6209a5bd4ac5b040ae9dbc59eef091d"}, + {file = "grpcio-1.66.1-cp310-cp310-win32.whl", hash = "sha256:ef82d361ed5849d34cf09105d00b94b6728d289d6b9235513cb2fcc79f7c432c"}, + {file = "grpcio-1.66.1-cp310-cp310-win_amd64.whl", hash = "sha256:292a846b92cdcd40ecca46e694997dd6b9be6c4c01a94a0dfb3fcb75d20da858"}, + {file = "grpcio-1.66.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:c30aeceeaff11cd5ddbc348f37c58bcb96da8d5aa93fed78ab329de5f37a0d7a"}, + {file = "grpcio-1.66.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8a1e224ce6f740dbb6b24c58f885422deebd7eb724aff0671a847f8951857c26"}, + {file = "grpcio-1.66.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:a66fe4dc35d2330c185cfbb42959f57ad36f257e0cc4557d11d9f0a3f14311df"}, + {file = "grpcio-1.66.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e3ba04659e4fce609de2658fe4dbf7d6ed21987a94460f5f92df7579fd5d0e22"}, + {file = "grpcio-1.66.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4573608e23f7e091acfbe3e84ac2045680b69751d8d67685ffa193a4429fedb1"}, + {file = "grpcio-1.66.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7e06aa1f764ec8265b19d8f00140b8c4b6ca179a6dc67aa9413867c47e1fb04e"}, + {file = "grpcio-1.66.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3885f037eb11f1cacc41f207b705f38a44b69478086f40608959bf5ad85826dd"}, + {file = "grpcio-1.66.1-cp311-cp311-win32.whl", hash = "sha256:97ae7edd3f3f91480e48ede5d3e7d431ad6005bfdbd65c1b56913799ec79e791"}, + {file = "grpcio-1.66.1-cp311-cp311-win_amd64.whl", hash = "sha256:cfd349de4158d797db2bd82d2020554a121674e98fbe6b15328456b3bf2495bb"}, + {file = "grpcio-1.66.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:a92c4f58c01c77205df6ff999faa008540475c39b835277fb8883b11cada127a"}, + {file = "grpcio-1.66.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:fdb14bad0835914f325349ed34a51940bc2ad965142eb3090081593c6e347be9"}, + {file = "grpcio-1.66.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:f03a5884c56256e08fd9e262e11b5cfacf1af96e2ce78dc095d2c41ccae2c80d"}, + {file = "grpcio-1.66.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ca2559692d8e7e245d456877a85ee41525f3ed425aa97eb7a70fc9a79df91a0"}, + {file = "grpcio-1.66.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84ca1be089fb4446490dd1135828bd42a7c7f8421e74fa581611f7afdf7ab761"}, + {file = "grpcio-1.66.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:d639c939ad7c440c7b2819a28d559179a4508783f7e5b991166f8d7a34b52815"}, + {file = "grpcio-1.66.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b9feb4e5ec8dc2d15709f4d5fc367794d69277f5d680baf1910fc9915c633524"}, + {file = "grpcio-1.66.1-cp312-cp312-win32.whl", hash = "sha256:7101db1bd4cd9b880294dec41a93fcdce465bdbb602cd8dc5bd2d6362b618759"}, + {file = "grpcio-1.66.1-cp312-cp312-win_amd64.whl", hash = "sha256:b0aa03d240b5539648d996cc60438f128c7f46050989e35b25f5c18286c86734"}, + {file = "grpcio-1.66.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:ecfe735e7a59e5a98208447293ff8580e9db1e890e232b8b292dc8bd15afc0d2"}, + {file = "grpcio-1.66.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:4825a3aa5648010842e1c9d35a082187746aa0cdbf1b7a2a930595a94fb10fce"}, + {file = "grpcio-1.66.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:f517fd7259fe823ef3bd21e508b653d5492e706e9f0ef82c16ce3347a8a5620c"}, + {file = "grpcio-1.66.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1fe60d0772831d96d263b53d83fb9a3d050a94b0e94b6d004a5ad111faa5b5b"}, + {file = "grpcio-1.66.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31a049daa428f928f21090403e5d18ea02670e3d5d172581670be006100db9ef"}, + {file = "grpcio-1.66.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6f914386e52cbdeb5d2a7ce3bf1fdfacbe9d818dd81b6099a05b741aaf3848bb"}, + {file = "grpcio-1.66.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bff2096bdba686019fb32d2dde45b95981f0d1490e054400f70fc9a8af34b49d"}, + {file = "grpcio-1.66.1-cp38-cp38-win32.whl", hash = "sha256:aa8ba945c96e73de29d25331b26f3e416e0c0f621e984a3ebdb2d0d0b596a3b3"}, + {file = "grpcio-1.66.1-cp38-cp38-win_amd64.whl", hash = "sha256:161d5c535c2bdf61b95080e7f0f017a1dfcb812bf54093e71e5562b16225b4ce"}, + {file = "grpcio-1.66.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:d0cd7050397b3609ea51727b1811e663ffda8bda39c6a5bb69525ef12414b503"}, + {file = "grpcio-1.66.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0e6c9b42ded5d02b6b1fea3a25f036a2236eeb75d0579bfd43c0018c88bf0a3e"}, + {file = "grpcio-1.66.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:c9f80f9fad93a8cf71c7f161778ba47fd730d13a343a46258065c4deb4b550c0"}, + {file = "grpcio-1.66.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5dd67ed9da78e5121efc5c510f0122a972216808d6de70953a740560c572eb44"}, + {file = "grpcio-1.66.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48b0d92d45ce3be2084b92fb5bae2f64c208fea8ceed7fccf6a7b524d3c4942e"}, + {file = "grpcio-1.66.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4d813316d1a752be6f5c4360c49f55b06d4fe212d7df03253dfdae90c8a402bb"}, + {file = "grpcio-1.66.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9c9bebc6627873ec27a70fc800f6083a13c70b23a5564788754b9ee52c5aef6c"}, + {file = "grpcio-1.66.1-cp39-cp39-win32.whl", hash = "sha256:30a1c2cf9390c894c90bbc70147f2372130ad189cffef161f0432d0157973f45"}, + {file = "grpcio-1.66.1-cp39-cp39-win_amd64.whl", hash = "sha256:17663598aadbedc3cacd7bbde432f541c8e07d2496564e22b214b22c7523dac8"}, + {file = "grpcio-1.66.1.tar.gz", hash = "sha256:35334f9c9745add3e357e3372756fd32d925bd52c41da97f4dfdafbde0bf0ee2"}, ] [package.extras] -protobuf = ["grpcio-tools (>=1.65.5)"] +protobuf = ["grpcio-tools (>=1.66.1)"] [[package]] name = "h11" @@ -1864,13 +1864,13 @@ trio = ["trio (>=0.22.0,<0.26.0)"] [[package]] name = "httpx" -version = "0.27.0" +version = "0.27.2" description = "The next generation HTTP client." optional = true python-versions = ">=3.8" files = [ - {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, - {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, + {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, + {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, ] [package.dependencies] @@ -1885,6 +1885,7 @@ brotli = ["brotli", "brotlicffi"] cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] +zstd = ["zstandard (>=0.18.0)"] [[package]] name = "hyperframe" @@ -1899,13 +1900,13 @@ files = [ [[package]] name = "idna" -version = "3.7" +version = "3.8" description = "Internationalized Domain Names in Applications (IDNA)" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" files = [ - {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, - {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, + {file = "idna-3.8-py3-none-any.whl", hash = "sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac"}, + {file = "idna-3.8.tar.gz", hash = "sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603"}, ] [[package]] @@ -2101,13 +2102,13 @@ files = [ [[package]] name = "ipfabric" -version = "6.9.4" +version = "6.9.6" description = "Python package for interacting with IP Fabric" optional = true python-versions = "<4.0,>=3.8" files = [ - {file = "ipfabric-6.9.4-py3-none-any.whl", hash = "sha256:31d65d4de544233ddc1278b651c1ccf2ac798bde2ce20528ce232e5db602d2f1"}, - {file = "ipfabric-6.9.4.tar.gz", hash = "sha256:6733e0b7447f7c4274735d1010d5879a3952cf540a3f3f458d256aff6a4a1b92"}, + {file = "ipfabric-6.9.6-py3-none-any.whl", hash = "sha256:bc509c4a70375b2258306886e53de7a195b47d81cc24377c093b80368640e27a"}, + {file = "ipfabric-6.9.6.tar.gz", hash = "sha256:6cd0da2f25b3810199f4e8c86adf14325a95b87d70a8b702470c230b7ad417d7"}, ] [package.dependencies] @@ -2259,115 +2260,125 @@ referencing = ">=0.31.0" [[package]] name = "kiwisolver" -version = "1.4.5" +version = "1.4.7" description = "A fast implementation of the Cassowary constraint solver" optional = true -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "kiwisolver-1.4.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:05703cf211d585109fcd72207a31bb170a0f22144d68298dc5e61b3c946518af"}, - {file = "kiwisolver-1.4.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:146d14bebb7f1dc4d5fbf74f8a6cb15ac42baadee8912eb84ac0b3b2a3dc6ac3"}, - {file = "kiwisolver-1.4.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ef7afcd2d281494c0a9101d5c571970708ad911d028137cd558f02b851c08b4"}, - {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9eaa8b117dc8337728e834b9c6e2611f10c79e38f65157c4c38e9400286f5cb1"}, - {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ec20916e7b4cbfb1f12380e46486ec4bcbaa91a9c448b97023fde0d5bbf9e4ff"}, - {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39b42c68602539407884cf70d6a480a469b93b81b7701378ba5e2328660c847a"}, - {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa12042de0171fad672b6c59df69106d20d5596e4f87b5e8f76df757a7c399aa"}, - {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a40773c71d7ccdd3798f6489aaac9eee213d566850a9533f8d26332d626b82c"}, - {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:19df6e621f6d8b4b9c4d45f40a66839294ff2bb235e64d2178f7522d9170ac5b"}, - {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:83d78376d0d4fd884e2c114d0621624b73d2aba4e2788182d286309ebdeed770"}, - {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e391b1f0a8a5a10ab3b9bb6afcfd74f2175f24f8975fb87ecae700d1503cdee0"}, - {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:852542f9481f4a62dbb5dd99e8ab7aedfeb8fb6342349a181d4036877410f525"}, - {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59edc41b24031bc25108e210c0def6f6c2191210492a972d585a06ff246bb79b"}, - {file = "kiwisolver-1.4.5-cp310-cp310-win32.whl", hash = "sha256:a6aa6315319a052b4ee378aa171959c898a6183f15c1e541821c5c59beaa0238"}, - {file = "kiwisolver-1.4.5-cp310-cp310-win_amd64.whl", hash = "sha256:d0ef46024e6a3d79c01ff13801cb19d0cad7fd859b15037aec74315540acc276"}, - {file = "kiwisolver-1.4.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:11863aa14a51fd6ec28688d76f1735f8f69ab1fabf388851a595d0721af042f5"}, - {file = "kiwisolver-1.4.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8ab3919a9997ab7ef2fbbed0cc99bb28d3c13e6d4b1ad36e97e482558a91be90"}, - {file = "kiwisolver-1.4.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fcc700eadbbccbf6bc1bcb9dbe0786b4b1cb91ca0dcda336eef5c2beed37b797"}, - {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dfdd7c0b105af050eb3d64997809dc21da247cf44e63dc73ff0fd20b96be55a9"}, - {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76c6a5964640638cdeaa0c359382e5703e9293030fe730018ca06bc2010c4437"}, - {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbea0db94288e29afcc4c28afbf3a7ccaf2d7e027489c449cf7e8f83c6346eb9"}, - {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ceec1a6bc6cab1d6ff5d06592a91a692f90ec7505d6463a88a52cc0eb58545da"}, - {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:040c1aebeda72197ef477a906782b5ab0d387642e93bda547336b8957c61022e"}, - {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f91de7223d4c7b793867797bacd1ee53bfe7359bd70d27b7b58a04efbb9436c8"}, - {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:faae4860798c31530dd184046a900e652c95513796ef51a12bc086710c2eec4d"}, - {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b0157420efcb803e71d1b28e2c287518b8808b7cf1ab8af36718fd0a2c453eb0"}, - {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:06f54715b7737c2fecdbf140d1afb11a33d59508a47bf11bb38ecf21dc9ab79f"}, - {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fdb7adb641a0d13bdcd4ef48e062363d8a9ad4a182ac7647ec88f695e719ae9f"}, - {file = "kiwisolver-1.4.5-cp311-cp311-win32.whl", hash = "sha256:bb86433b1cfe686da83ce32a9d3a8dd308e85c76b60896d58f082136f10bffac"}, - {file = "kiwisolver-1.4.5-cp311-cp311-win_amd64.whl", hash = "sha256:6c08e1312a9cf1074d17b17728d3dfce2a5125b2d791527f33ffbe805200a355"}, - {file = "kiwisolver-1.4.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:32d5cf40c4f7c7b3ca500f8985eb3fb3a7dfc023215e876f207956b5ea26632a"}, - {file = "kiwisolver-1.4.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f846c260f483d1fd217fe5ed7c173fb109efa6b1fc8381c8b7552c5781756192"}, - {file = "kiwisolver-1.4.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5ff5cf3571589b6d13bfbfd6bcd7a3f659e42f96b5fd1c4830c4cf21d4f5ef45"}, - {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7269d9e5f1084a653d575c7ec012ff57f0c042258bf5db0954bf551c158466e7"}, - {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da802a19d6e15dffe4b0c24b38b3af68e6c1a68e6e1d8f30148c83864f3881db"}, - {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3aba7311af82e335dd1e36ffff68aaca609ca6290c2cb6d821a39aa075d8e3ff"}, - {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:763773d53f07244148ccac5b084da5adb90bfaee39c197554f01b286cf869228"}, - {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2270953c0d8cdab5d422bee7d2007f043473f9d2999631c86a223c9db56cbd16"}, - {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d099e745a512f7e3bbe7249ca835f4d357c586d78d79ae8f1dcd4d8adeb9bda9"}, - {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:74db36e14a7d1ce0986fa104f7d5637aea5c82ca6326ed0ec5694280942d1162"}, - {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e5bab140c309cb3a6ce373a9e71eb7e4873c70c2dda01df6820474f9889d6d4"}, - {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0f114aa76dc1b8f636d077979c0ac22e7cd8f3493abbab152f20eb8d3cda71f3"}, - {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:88a2df29d4724b9237fc0c6eaf2a1adae0cdc0b3e9f4d8e7dc54b16812d2d81a"}, - {file = "kiwisolver-1.4.5-cp312-cp312-win32.whl", hash = "sha256:72d40b33e834371fd330fb1472ca19d9b8327acb79a5821d4008391db8e29f20"}, - {file = "kiwisolver-1.4.5-cp312-cp312-win_amd64.whl", hash = "sha256:2c5674c4e74d939b9d91dda0fae10597ac7521768fec9e399c70a1f27e2ea2d9"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3a2b053a0ab7a3960c98725cfb0bf5b48ba82f64ec95fe06f1d06c99b552e130"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cd32d6c13807e5c66a7cbb79f90b553642f296ae4518a60d8d76243b0ad2898"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59ec7b7c7e1a61061850d53aaf8e93db63dce0c936db1fda2658b70e4a1be709"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da4cfb373035def307905d05041c1d06d8936452fe89d464743ae7fb8371078b"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2400873bccc260b6ae184b2b8a4fec0e4082d30648eadb7c3d9a13405d861e89"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1b04139c4236a0f3aff534479b58f6f849a8b351e1314826c2d230849ed48985"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:4e66e81a5779b65ac21764c295087de82235597a2293d18d943f8e9e32746265"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:7931d8f1f67c4be9ba1dd9c451fb0eeca1a25b89e4d3f89e828fe12a519b782a"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:b3f7e75f3015df442238cca659f8baa5f42ce2a8582727981cbfa15fee0ee205"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:bbf1d63eef84b2e8c89011b7f2235b1e0bf7dacc11cac9431fc6468e99ac77fb"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4c380469bd3f970ef677bf2bcba2b6b0b4d5c75e7a020fb863ef75084efad66f"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-win32.whl", hash = "sha256:9408acf3270c4b6baad483865191e3e582b638b1654a007c62e3efe96f09a9a3"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-win_amd64.whl", hash = "sha256:5b94529f9b2591b7af5f3e0e730a4e0a41ea174af35a4fd067775f9bdfeee01a"}, - {file = "kiwisolver-1.4.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:11c7de8f692fc99816e8ac50d1d1aef4f75126eefc33ac79aac02c099fd3db71"}, - {file = "kiwisolver-1.4.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:53abb58632235cd154176ced1ae8f0d29a6657aa1aa9decf50b899b755bc2b93"}, - {file = "kiwisolver-1.4.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:88b9f257ca61b838b6f8094a62418421f87ac2a1069f7e896c36a7d86b5d4c29"}, - {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3195782b26fc03aa9c6913d5bad5aeb864bdc372924c093b0f1cebad603dd712"}, - {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc579bf0f502e54926519451b920e875f433aceb4624a3646b3252b5caa9e0b6"}, - {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a580c91d686376f0f7c295357595c5a026e6cbc3d77b7c36e290201e7c11ecb"}, - {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cfe6ab8da05c01ba6fbea630377b5da2cd9bcbc6338510116b01c1bc939a2c18"}, - {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:d2e5a98f0ec99beb3c10e13b387f8db39106d53993f498b295f0c914328b1333"}, - {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a51a263952b1429e429ff236d2f5a21c5125437861baeed77f5e1cc2d2c7c6da"}, - {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3edd2fa14e68c9be82c5b16689e8d63d89fe927e56debd6e1dbce7a26a17f81b"}, - {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:74d1b44c6cfc897df648cc9fdaa09bc3e7679926e6f96df05775d4fb3946571c"}, - {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:76d9289ed3f7501012e05abb8358bbb129149dbd173f1f57a1bf1c22d19ab7cc"}, - {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:92dea1ffe3714fa8eb6a314d2b3c773208d865a0e0d35e713ec54eea08a66250"}, - {file = "kiwisolver-1.4.5-cp38-cp38-win32.whl", hash = "sha256:5c90ae8c8d32e472be041e76f9d2f2dbff4d0b0be8bd4041770eddb18cf49a4e"}, - {file = "kiwisolver-1.4.5-cp38-cp38-win_amd64.whl", hash = "sha256:c7940c1dc63eb37a67721b10d703247552416f719c4188c54e04334321351ced"}, - {file = "kiwisolver-1.4.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9407b6a5f0d675e8a827ad8742e1d6b49d9c1a1da5d952a67d50ef5f4170b18d"}, - {file = "kiwisolver-1.4.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:15568384086b6df3c65353820a4473575dbad192e35010f622c6ce3eebd57af9"}, - {file = "kiwisolver-1.4.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0dc9db8e79f0036e8173c466d21ef18e1befc02de8bf8aa8dc0813a6dc8a7046"}, - {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:cdc8a402aaee9a798b50d8b827d7ecf75edc5fb35ea0f91f213ff927c15f4ff0"}, - {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6c3bd3cde54cafb87d74d8db50b909705c62b17c2099b8f2e25b461882e544ff"}, - {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:955e8513d07a283056b1396e9a57ceddbd272d9252c14f154d450d227606eb54"}, - {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:346f5343b9e3f00b8db8ba359350eb124b98c99efd0b408728ac6ebf38173958"}, - {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9098e0049e88c6a24ff64545cdfc50807818ba6c1b739cae221bbbcbc58aad3"}, - {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:00bd361b903dc4bbf4eb165f24d1acbee754fce22ded24c3d56eec268658a5cf"}, - {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7b8b454bac16428b22560d0a1cf0a09875339cab69df61d7805bf48919415901"}, - {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:f1d072c2eb0ad60d4c183f3fb44ac6f73fb7a8f16a2694a91f988275cbf352f9"}, - {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:31a82d498054cac9f6d0b53d02bb85811185bcb477d4b60144f915f3b3126342"}, - {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6512cb89e334e4700febbffaaa52761b65b4f5a3cf33f960213d5656cea36a77"}, - {file = "kiwisolver-1.4.5-cp39-cp39-win32.whl", hash = "sha256:9db8ea4c388fdb0f780fe91346fd438657ea602d58348753d9fb265ce1bca67f"}, - {file = "kiwisolver-1.4.5-cp39-cp39-win_amd64.whl", hash = "sha256:59415f46a37f7f2efeec758353dd2eae1b07640d8ca0f0c42548ec4125492635"}, - {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5c7b3b3a728dc6faf3fc372ef24f21d1e3cee2ac3e9596691d746e5a536de920"}, - {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:620ced262a86244e2be10a676b646f29c34537d0d9cc8eb26c08f53d98013390"}, - {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:378a214a1e3bbf5ac4a8708304318b4f890da88c9e6a07699c4ae7174c09a68d"}, - {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf7be1207676ac608a50cd08f102f6742dbfc70e8d60c4db1c6897f62f71523"}, - {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:ba55dce0a9b8ff59495ddd050a0225d58bd0983d09f87cfe2b6aec4f2c1234e4"}, - {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:fd32ea360bcbb92d28933fc05ed09bffcb1704ba3fc7942e81db0fd4f81a7892"}, - {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5e7139af55d1688f8b960ee9ad5adafc4ac17c1c473fe07133ac092310d76544"}, - {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dced8146011d2bc2e883f9bd68618b8247387f4bbec46d7392b3c3b032640126"}, - {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9bf3325c47b11b2e51bca0824ea217c7cd84491d8ac4eefd1e409705ef092bd"}, - {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5794cf59533bc3f1b1c821f7206a3617999db9fbefc345360aafe2e067514929"}, - {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e368f200bbc2e4f905b8e71eb38b3c04333bddaa6a2464a6355487b02bb7fb09"}, - {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5d706eba36b4c4d5bc6c6377bb6568098765e990cfc21ee16d13963fab7b3e7"}, - {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85267bd1aa8880a9c88a8cb71e18d3d64d2751a790e6ca6c27b8ccc724bcd5ad"}, - {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:210ef2c3a1f03272649aff1ef992df2e724748918c4bc2d5a90352849eb40bea"}, - {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:11d011a7574eb3b82bcc9c1a1d35c1d7075677fdd15de527d91b46bd35e935ee"}, - {file = "kiwisolver-1.4.5.tar.gz", hash = "sha256:e57e563a57fb22a142da34f38acc2fc1a5c864bc29ca1517a88abc963e60d6ec"}, + {file = "kiwisolver-1.4.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8a9c83f75223d5e48b0bc9cb1bf2776cf01563e00ade8775ffe13b0b6e1af3a6"}, + {file = "kiwisolver-1.4.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:58370b1ffbd35407444d57057b57da5d6549d2d854fa30249771775c63b5fe17"}, + {file = "kiwisolver-1.4.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aa0abdf853e09aff551db11fce173e2177d00786c688203f52c87ad7fcd91ef9"}, + {file = "kiwisolver-1.4.7-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8d53103597a252fb3ab8b5845af04c7a26d5e7ea8122303dd7a021176a87e8b9"}, + {file = "kiwisolver-1.4.7-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:88f17c5ffa8e9462fb79f62746428dd57b46eb931698e42e990ad63103f35e6c"}, + {file = "kiwisolver-1.4.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88a9ca9c710d598fd75ee5de59d5bda2684d9db36a9f50b6125eaea3969c2599"}, + {file = "kiwisolver-1.4.7-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f4d742cb7af1c28303a51b7a27aaee540e71bb8e24f68c736f6f2ffc82f2bf05"}, + {file = "kiwisolver-1.4.7-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e28c7fea2196bf4c2f8d46a0415c77a1c480cc0724722f23d7410ffe9842c407"}, + {file = "kiwisolver-1.4.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e968b84db54f9d42046cf154e02911e39c0435c9801681e3fc9ce8a3c4130278"}, + {file = "kiwisolver-1.4.7-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0c18ec74c0472de033e1bebb2911c3c310eef5649133dd0bedf2a169a1b269e5"}, + {file = "kiwisolver-1.4.7-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8f0ea6da6d393d8b2e187e6a5e3fb81f5862010a40c3945e2c6d12ae45cfb2ad"}, + {file = "kiwisolver-1.4.7-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:f106407dda69ae456dd1227966bf445b157ccc80ba0dff3802bb63f30b74e895"}, + {file = "kiwisolver-1.4.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:84ec80df401cfee1457063732d90022f93951944b5b58975d34ab56bb150dfb3"}, + {file = "kiwisolver-1.4.7-cp310-cp310-win32.whl", hash = "sha256:71bb308552200fb2c195e35ef05de12f0c878c07fc91c270eb3d6e41698c3bcc"}, + {file = "kiwisolver-1.4.7-cp310-cp310-win_amd64.whl", hash = "sha256:44756f9fd339de0fb6ee4f8c1696cfd19b2422e0d70b4cefc1cc7f1f64045a8c"}, + {file = "kiwisolver-1.4.7-cp310-cp310-win_arm64.whl", hash = "sha256:78a42513018c41c2ffd262eb676442315cbfe3c44eed82385c2ed043bc63210a"}, + {file = "kiwisolver-1.4.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d2b0e12a42fb4e72d509fc994713d099cbb15ebf1103545e8a45f14da2dfca54"}, + {file = "kiwisolver-1.4.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2a8781ac3edc42ea4b90bc23e7d37b665d89423818e26eb6df90698aa2287c95"}, + {file = "kiwisolver-1.4.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:46707a10836894b559e04b0fd143e343945c97fd170d69a2d26d640b4e297935"}, + {file = "kiwisolver-1.4.7-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef97b8df011141c9b0f6caf23b29379f87dd13183c978a30a3c546d2c47314cb"}, + {file = "kiwisolver-1.4.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ab58c12a2cd0fc769089e6d38466c46d7f76aced0a1f54c77652446733d2d02"}, + {file = "kiwisolver-1.4.7-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:803b8e1459341c1bb56d1c5c010406d5edec8a0713a0945851290a7930679b51"}, + {file = "kiwisolver-1.4.7-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f9a9e8a507420fe35992ee9ecb302dab68550dedc0da9e2880dd88071c5fb052"}, + {file = "kiwisolver-1.4.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18077b53dc3bb490e330669a99920c5e6a496889ae8c63b58fbc57c3d7f33a18"}, + {file = "kiwisolver-1.4.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6af936f79086a89b3680a280c47ea90b4df7047b5bdf3aa5c524bbedddb9e545"}, + {file = "kiwisolver-1.4.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:3abc5b19d24af4b77d1598a585b8a719beb8569a71568b66f4ebe1fb0449460b"}, + {file = "kiwisolver-1.4.7-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:933d4de052939d90afbe6e9d5273ae05fb836cc86c15b686edd4b3560cc0ee36"}, + {file = "kiwisolver-1.4.7-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:65e720d2ab2b53f1f72fb5da5fb477455905ce2c88aaa671ff0a447c2c80e8e3"}, + {file = "kiwisolver-1.4.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3bf1ed55088f214ba6427484c59553123fdd9b218a42bbc8c6496d6754b1e523"}, + {file = "kiwisolver-1.4.7-cp311-cp311-win32.whl", hash = "sha256:4c00336b9dd5ad96d0a558fd18a8b6f711b7449acce4c157e7343ba92dd0cf3d"}, + {file = "kiwisolver-1.4.7-cp311-cp311-win_amd64.whl", hash = "sha256:929e294c1ac1e9f615c62a4e4313ca1823ba37326c164ec720a803287c4c499b"}, + {file = "kiwisolver-1.4.7-cp311-cp311-win_arm64.whl", hash = "sha256:e33e8fbd440c917106b237ef1a2f1449dfbb9b6f6e1ce17c94cd6a1e0d438376"}, + {file = "kiwisolver-1.4.7-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:5360cc32706dab3931f738d3079652d20982511f7c0ac5711483e6eab08efff2"}, + {file = "kiwisolver-1.4.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:942216596dc64ddb25adb215c3c783215b23626f8d84e8eff8d6d45c3f29f75a"}, + {file = "kiwisolver-1.4.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:48b571ecd8bae15702e4f22d3ff6a0f13e54d3d00cd25216d5e7f658242065ee"}, + {file = "kiwisolver-1.4.7-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ad42ba922c67c5f219097b28fae965e10045ddf145d2928bfac2eb2e17673640"}, + {file = "kiwisolver-1.4.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:612a10bdae23404a72941a0fc8fa2660c6ea1217c4ce0dbcab8a8f6543ea9e7f"}, + {file = "kiwisolver-1.4.7-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9e838bba3a3bac0fe06d849d29772eb1afb9745a59710762e4ba3f4cb8424483"}, + {file = "kiwisolver-1.4.7-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:22f499f6157236c19f4bbbd472fa55b063db77a16cd74d49afe28992dff8c258"}, + {file = "kiwisolver-1.4.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693902d433cf585133699972b6d7c42a8b9f8f826ebcaf0132ff55200afc599e"}, + {file = "kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4e77f2126c3e0b0d055f44513ed349038ac180371ed9b52fe96a32aa071a5107"}, + {file = "kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:657a05857bda581c3656bfc3b20e353c232e9193eb167766ad2dc58b56504948"}, + {file = "kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4bfa75a048c056a411f9705856abfc872558e33c055d80af6a380e3658766038"}, + {file = "kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:34ea1de54beef1c104422d210c47c7d2a4999bdecf42c7b5718fbe59a4cac383"}, + {file = "kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:90da3b5f694b85231cf93586dad5e90e2d71b9428f9aad96952c99055582f520"}, + {file = "kiwisolver-1.4.7-cp312-cp312-win32.whl", hash = "sha256:18e0cca3e008e17fe9b164b55735a325140a5a35faad8de92dd80265cd5eb80b"}, + {file = "kiwisolver-1.4.7-cp312-cp312-win_amd64.whl", hash = "sha256:58cb20602b18f86f83a5c87d3ee1c766a79c0d452f8def86d925e6c60fbf7bfb"}, + {file = "kiwisolver-1.4.7-cp312-cp312-win_arm64.whl", hash = "sha256:f5a8b53bdc0b3961f8b6125e198617c40aeed638b387913bf1ce78afb1b0be2a"}, + {file = "kiwisolver-1.4.7-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:2e6039dcbe79a8e0f044f1c39db1986a1b8071051efba3ee4d74f5b365f5226e"}, + {file = "kiwisolver-1.4.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a1ecf0ac1c518487d9d23b1cd7139a6a65bc460cd101ab01f1be82ecf09794b6"}, + {file = "kiwisolver-1.4.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7ab9ccab2b5bd5702ab0803676a580fffa2aa178c2badc5557a84cc943fcf750"}, + {file = "kiwisolver-1.4.7-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f816dd2277f8d63d79f9c8473a79fe54047bc0467754962840782c575522224d"}, + {file = "kiwisolver-1.4.7-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf8bcc23ceb5a1b624572a1623b9f79d2c3b337c8c455405ef231933a10da379"}, + {file = "kiwisolver-1.4.7-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dea0bf229319828467d7fca8c7c189780aa9ff679c94539eed7532ebe33ed37c"}, + {file = "kiwisolver-1.4.7-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c06a4c7cf15ec739ce0e5971b26c93638730090add60e183530d70848ebdd34"}, + {file = "kiwisolver-1.4.7-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:913983ad2deb14e66d83c28b632fd35ba2b825031f2fa4ca29675e665dfecbe1"}, + {file = "kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5337ec7809bcd0f424c6b705ecf97941c46279cf5ed92311782c7c9c2026f07f"}, + {file = "kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4c26ed10c4f6fa6ddb329a5120ba3b6db349ca192ae211e882970bfc9d91420b"}, + {file = "kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c619b101e6de2222c1fcb0531e1b17bbffbe54294bfba43ea0d411d428618c27"}, + {file = "kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:073a36c8273647592ea332e816e75ef8da5c303236ec0167196793eb1e34657a"}, + {file = "kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:3ce6b2b0231bda412463e152fc18335ba32faf4e8c23a754ad50ffa70e4091ee"}, + {file = "kiwisolver-1.4.7-cp313-cp313-win32.whl", hash = "sha256:f4c9aee212bc89d4e13f58be11a56cc8036cabad119259d12ace14b34476fd07"}, + {file = "kiwisolver-1.4.7-cp313-cp313-win_amd64.whl", hash = "sha256:8a3ec5aa8e38fc4c8af308917ce12c536f1c88452ce554027e55b22cbbfbff76"}, + {file = "kiwisolver-1.4.7-cp313-cp313-win_arm64.whl", hash = "sha256:76c8094ac20ec259471ac53e774623eb62e6e1f56cd8690c67ce6ce4fcb05650"}, + {file = "kiwisolver-1.4.7-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5d5abf8f8ec1f4e22882273c423e16cae834c36856cac348cfbfa68e01c40f3a"}, + {file = "kiwisolver-1.4.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:aeb3531b196ef6f11776c21674dba836aeea9d5bd1cf630f869e3d90b16cfade"}, + {file = "kiwisolver-1.4.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b7d755065e4e866a8086c9bdada157133ff466476a2ad7861828e17b6026e22c"}, + {file = "kiwisolver-1.4.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08471d4d86cbaec61f86b217dd938a83d85e03785f51121e791a6e6689a3be95"}, + {file = "kiwisolver-1.4.7-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7bbfcb7165ce3d54a3dfbe731e470f65739c4c1f85bb1018ee912bae139e263b"}, + {file = "kiwisolver-1.4.7-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d34eb8494bea691a1a450141ebb5385e4b69d38bb8403b5146ad279f4b30fa3"}, + {file = "kiwisolver-1.4.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9242795d174daa40105c1d86aba618e8eab7bf96ba8c3ee614da8302a9f95503"}, + {file = "kiwisolver-1.4.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a0f64a48bb81af7450e641e3fe0b0394d7381e342805479178b3d335d60ca7cf"}, + {file = "kiwisolver-1.4.7-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8e045731a5416357638d1700927529e2b8ab304811671f665b225f8bf8d8f933"}, + {file = "kiwisolver-1.4.7-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:4322872d5772cae7369f8351da1edf255a604ea7087fe295411397d0cfd9655e"}, + {file = "kiwisolver-1.4.7-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:e1631290ee9271dffe3062d2634c3ecac02c83890ada077d225e081aca8aab89"}, + {file = "kiwisolver-1.4.7-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:edcfc407e4eb17e037bca59be0e85a2031a2ac87e4fed26d3e9df88b4165f92d"}, + {file = "kiwisolver-1.4.7-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:4d05d81ecb47d11e7f8932bd8b61b720bf0b41199358f3f5e36d38e28f0532c5"}, + {file = "kiwisolver-1.4.7-cp38-cp38-win32.whl", hash = "sha256:b38ac83d5f04b15e515fd86f312479d950d05ce2368d5413d46c088dda7de90a"}, + {file = "kiwisolver-1.4.7-cp38-cp38-win_amd64.whl", hash = "sha256:d83db7cde68459fc803052a55ace60bea2bae361fc3b7a6d5da07e11954e4b09"}, + {file = "kiwisolver-1.4.7-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3f9362ecfca44c863569d3d3c033dbe8ba452ff8eed6f6b5806382741a1334bd"}, + {file = "kiwisolver-1.4.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e8df2eb9b2bac43ef8b082e06f750350fbbaf2887534a5be97f6cf07b19d9583"}, + {file = "kiwisolver-1.4.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f32d6edbc638cde7652bd690c3e728b25332acbadd7cad670cc4a02558d9c417"}, + {file = "kiwisolver-1.4.7-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e2e6c39bd7b9372b0be21456caab138e8e69cc0fc1190a9dfa92bd45a1e6e904"}, + {file = "kiwisolver-1.4.7-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dda56c24d869b1193fcc763f1284b9126550eaf84b88bbc7256e15028f19188a"}, + {file = "kiwisolver-1.4.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79849239c39b5e1fd906556c474d9b0439ea6792b637511f3fe3a41158d89ca8"}, + {file = "kiwisolver-1.4.7-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5e3bc157fed2a4c02ec468de4ecd12a6e22818d4f09cde2c31ee3226ffbefab2"}, + {file = "kiwisolver-1.4.7-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3da53da805b71e41053dc670f9a820d1157aae77b6b944e08024d17bcd51ef88"}, + {file = "kiwisolver-1.4.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8705f17dfeb43139a692298cb6637ee2e59c0194538153e83e9ee0c75c2eddde"}, + {file = "kiwisolver-1.4.7-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:82a5c2f4b87c26bb1a0ef3d16b5c4753434633b83d365cc0ddf2770c93829e3c"}, + {file = "kiwisolver-1.4.7-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ce8be0466f4c0d585cdb6c1e2ed07232221df101a4c6f28821d2aa754ca2d9e2"}, + {file = "kiwisolver-1.4.7-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:409afdfe1e2e90e6ee7fc896f3df9a7fec8e793e58bfa0d052c8a82f99c37abb"}, + {file = "kiwisolver-1.4.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5b9c3f4ee0b9a439d2415012bd1b1cc2df59e4d6a9939f4d669241d30b414327"}, + {file = "kiwisolver-1.4.7-cp39-cp39-win32.whl", hash = "sha256:a79ae34384df2b615eefca647a2873842ac3b596418032bef9a7283675962644"}, + {file = "kiwisolver-1.4.7-cp39-cp39-win_amd64.whl", hash = "sha256:cf0438b42121a66a3a667de17e779330fc0f20b0d97d59d2f2121e182b0505e4"}, + {file = "kiwisolver-1.4.7-cp39-cp39-win_arm64.whl", hash = "sha256:764202cc7e70f767dab49e8df52c7455e8de0df5d858fa801a11aa0d882ccf3f"}, + {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:94252291e3fe68001b1dd747b4c0b3be12582839b95ad4d1b641924d68fd4643"}, + {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5b7dfa3b546da08a9f622bb6becdb14b3e24aaa30adba66749d38f3cc7ea9706"}, + {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd3de6481f4ed8b734da5df134cd5a6a64fe32124fe83dde1e5b5f29fe30b1e6"}, + {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a91b5f9f1205845d488c928e8570dcb62b893372f63b8b6e98b863ebd2368ff2"}, + {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40fa14dbd66b8b8f470d5fc79c089a66185619d31645f9b0773b88b19f7223c4"}, + {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:eb542fe7933aa09d8d8f9d9097ef37532a7df6497819d16efe4359890a2f417a"}, + {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:bfa1acfa0c54932d5607e19a2c24646fb4c1ae2694437789129cf099789a3b00"}, + {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:eee3ea935c3d227d49b4eb85660ff631556841f6e567f0f7bda972df6c2c9935"}, + {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f3160309af4396e0ed04db259c3ccbfdc3621b5559b5453075e5de555e1f3a1b"}, + {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a17f6a29cf8935e587cc8a4dbfc8368c55edc645283db0ce9801016f83526c2d"}, + {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10849fb2c1ecbfae45a693c070e0320a91b35dd4bcf58172c023b994283a124d"}, + {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:ac542bf38a8a4be2dc6b15248d36315ccc65f0743f7b1a76688ffb6b5129a5c2"}, + {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:8b01aac285f91ca889c800042c35ad3b239e704b150cfd3382adfc9dcc780e39"}, + {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:48be928f59a1f5c8207154f935334d374e79f2b5d212826307d072595ad76a2e"}, + {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f37cfe618a117e50d8c240555331160d73d0411422b59b5ee217843d7b693608"}, + {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:599b5c873c63a1f6ed7eead644a8a380cfbdf5db91dcb6f85707aaab213b1674"}, + {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:801fa7802e5cfabe3ab0c81a34c323a319b097dfb5004be950482d882f3d7225"}, + {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:0c6c43471bc764fad4bc99c5c2d6d16a676b1abf844ca7c8702bdae92df01ee0"}, + {file = "kiwisolver-1.4.7.tar.gz", hash = "sha256:9893ff81bd7107f7b685d3017cc6583daadb4fc26e4a888350df530e41980a60"}, ] [[package]] @@ -2770,13 +2781,13 @@ min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4)", "ghp-imp [[package]] name = "mkdocs-autorefs" -version = "1.1.0" +version = "1.2.0" description = "Automatically link across pages in MkDocs." optional = false python-versions = ">=3.8" files = [ - {file = "mkdocs_autorefs-1.1.0-py3-none-any.whl", hash = "sha256:492ac42f50214e81565e968f8cb0df9aba9d981542b9e7121b8f8ae9407fe6eb"}, - {file = "mkdocs_autorefs-1.1.0.tar.gz", hash = "sha256:f2fd43b11f66284bd014f9b542a05c8ecbfaad4e0d7b30b68584788217b6c656"}, + {file = "mkdocs_autorefs-1.2.0-py3-none-any.whl", hash = "sha256:d588754ae89bd0ced0c70c06f58566a4ee43471eeeee5202427da7de9ef85a2f"}, + {file = "mkdocs_autorefs-1.2.0.tar.gz", hash = "sha256:a86b93abff653521bda71cf3fc5596342b7a23982093915cb74273f67522190f"}, ] [package.dependencies] @@ -2977,13 +2988,13 @@ testing-docutils = ["pygments", "pytest (>=7,<8)", "pytest-param-files (>=0.3.4, [[package]] name = "nautobot" -version = "2.3.1" +version = "2.3.2" description = "Source of truth and network automation platform." optional = false python-versions = "<3.13,>=3.8" files = [ - {file = "nautobot-2.3.1-py3-none-any.whl", hash = "sha256:28c02e229dcc87d69dba0e75d36c3bd219fefa9328ac996471e9b39f3ec74bb3"}, - {file = "nautobot-2.3.1.tar.gz", hash = "sha256:96a3f0ee9cf73b404abca34bd2ed53a6d4494fcf85338734baa10dcd977f27f7"}, + {file = "nautobot-2.3.2-py3-none-any.whl", hash = "sha256:5318a26af1dde8919345bd242a3ed2be221bf2cc11149708fdcfdc55470b761a"}, + {file = "nautobot-2.3.2.tar.gz", hash = "sha256:03f0c7ca0224bf2a37a0a81ef978a20284c44e896a14e75bd403a0d09c2f913d"}, ] [package.dependencies] @@ -3238,14 +3249,19 @@ files = [ [[package]] name = "paginate" -version = "0.5.6" +version = "0.5.7" description = "Divides large result sets into pages for easier browsing" optional = false python-versions = "*" files = [ - {file = "paginate-0.5.6.tar.gz", hash = "sha256:5e6007b6a9398177a7e1648d04fdd9f8c9766a1a945bceac82f1929e8c78af2d"}, + {file = "paginate-0.5.7-py2.py3-none-any.whl", hash = "sha256:b885e2af73abcf01d9559fd5216b57ef722f8c42affbb63942377668e35c7591"}, + {file = "paginate-0.5.7.tar.gz", hash = "sha256:22bd083ab41e1a8b4f3690544afb2c60c25e5c9a63a30fa2f483f6c60c8e5945"}, ] +[package.extras] +dev = ["pytest", "tox"] +lint = ["black"] + [[package]] name = "parameterized" version = "0.8.1" @@ -3409,19 +3425,19 @@ files = [ [[package]] name = "platformdirs" -version = "4.2.2" +version = "4.3.2" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, - {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, + {file = "platformdirs-4.3.2-py3-none-any.whl", hash = "sha256:eb1c8582560b34ed4ba105009a4badf7f6f85768b30126f351328507b2beb617"}, + {file = "platformdirs-4.3.2.tar.gz", hash = "sha256:9e5e27a08aa095dd127b9f2e764d74254f482fef22b0970773bfba79d091ab8c"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] -type = ["mypy (>=1.8)"] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.11.2)"] [[package]] name = "prometheus-client" @@ -3520,6 +3536,7 @@ files = [ {file = "psycopg2_binary-2.9.9-cp311-cp311-win32.whl", hash = "sha256:dc4926288b2a3e9fd7b50dc6a1909a13bbdadfc67d93f3374d984e56f885579d"}, {file = "psycopg2_binary-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:b76bedd166805480ab069612119ea636f5ab8f8771e640ae103e05a4aae3e417"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8532fd6e6e2dc57bcb3bc90b079c60de896d2128c5d9d6f24a63875a95a088cf"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0605eaed3eb239e87df0d5e3c6489daae3f7388d455d0c0b4df899519c6a38d"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f8544b092a29a6ddd72f3556a9fcf249ec412e10ad28be6a0c0d948924f2212"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d423c8d8a3c82d08fe8af900ad5b613ce3632a1249fd6a223941d0735fce493"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e5afae772c00980525f6d6ecf7cbca55676296b580c0e6abb407f15f3706996"}, @@ -3528,6 +3545,8 @@ files = [ {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:cb16c65dcb648d0a43a2521f2f0a2300f40639f6f8c1ecbc662141e4e3e1ee07"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:911dda9c487075abd54e644ccdf5e5c16773470a6a5d3826fda76699410066fb"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57fede879f08d23c85140a360c6a77709113efd1c993923c59fde17aa27599fe"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-win32.whl", hash = "sha256:64cf30263844fa208851ebb13b0732ce674d8ec6a0c86a4e160495d299ba3c93"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:81ff62668af011f9a48787564ab7eded4e9fb17a4a6a74af5ffa6a457400d2ab"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2293b001e319ab0d869d660a704942c9e2cce19745262a8aba2115ef41a0a42a"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ef7df18daf2c4c07e2695e8cfd5ee7f748a1d54d802330985a78d2a5a6dca9"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a602ea5aff39bb9fac6308e9c9d82b9a35c2bf288e184a816002c9fae930b77"}, @@ -3603,13 +3622,13 @@ files = [ [[package]] name = "pycodestyle" -version = "2.9.1" +version = "2.12.1" description = "Python style guide checker" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "pycodestyle-2.9.1-py2.py3-none-any.whl", hash = "sha256:d1735fc58b418fd7c5f658d28d943854f8a849b01a5d0a1e6f3f3fdd0166804b"}, - {file = "pycodestyle-2.9.1.tar.gz", hash = "sha256:2c9607871d58c76354b697b42f5d57e1ada7d261c261efac224b664affdc5785"}, + {file = "pycodestyle-2.12.1-py2.py3-none-any.whl", hash = "sha256:46f0fb92069a7c28ab7bb558f05bfc0110dac69a0cd23c61ea0040283a9d78b3"}, + {file = "pycodestyle-2.12.1.tar.gz", hash = "sha256:6838eae08bbce4f6accd5d5572075c63626a15ee3e6f842df996bf62f6d73521"}, ] [[package]] @@ -3638,119 +3657,120 @@ files = [ [[package]] name = "pydantic" -version = "2.8.2" +version = "2.9.1" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.8.2-py3-none-any.whl", hash = "sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8"}, - {file = "pydantic-2.8.2.tar.gz", hash = "sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a"}, + {file = "pydantic-2.9.1-py3-none-any.whl", hash = "sha256:7aff4db5fdf3cf573d4b3c30926a510a10e19a0774d38fc4967f78beb6deb612"}, + {file = "pydantic-2.9.1.tar.gz", hash = "sha256:1363c7d975c7036df0db2b4a61f2e062fbc0aa5ab5f2772e0ffc7191a4f4bce2"}, ] [package.dependencies] -annotated-types = ">=0.4.0" -pydantic-core = "2.20.1" +annotated-types = ">=0.6.0" +pydantic-core = "2.23.3" typing-extensions = {version = ">=4.6.1", markers = "python_version < \"3.13\""} [package.extras] email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata"] [[package]] name = "pydantic-core" -version = "2.20.1" +version = "2.23.3" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.20.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3acae97ffd19bf091c72df4d726d552c473f3576409b2a7ca36b2f535ffff4a3"}, - {file = "pydantic_core-2.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41f4c96227a67a013e7de5ff8f20fb496ce573893b7f4f2707d065907bffdbd6"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f239eb799a2081495ea659d8d4a43a8f42cd1fe9ff2e7e436295c38a10c286a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53e431da3fc53360db73eedf6f7124d1076e1b4ee4276b36fb25514544ceb4a3"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1f62b2413c3a0e846c3b838b2ecd6c7a19ec6793b2a522745b0869e37ab5bc1"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d41e6daee2813ecceea8eda38062d69e280b39df793f5a942fa515b8ed67953"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d482efec8b7dc6bfaedc0f166b2ce349df0011f5d2f1f25537ced4cfc34fd98"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e93e1a4b4b33daed65d781a57a522ff153dcf748dee70b40c7258c5861e1768a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7c4ea22b6739b162c9ecaaa41d718dfad48a244909fe7ef4b54c0b530effc5a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4f2790949cf385d985a31984907fecb3896999329103df4e4983a4a41e13e840"}, - {file = "pydantic_core-2.20.1-cp310-none-win32.whl", hash = "sha256:5e999ba8dd90e93d57410c5e67ebb67ffcaadcea0ad973240fdfd3a135506250"}, - {file = "pydantic_core-2.20.1-cp310-none-win_amd64.whl", hash = "sha256:512ecfbefef6dac7bc5eaaf46177b2de58cdf7acac8793fe033b24ece0b9566c"}, - {file = "pydantic_core-2.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312"}, - {file = "pydantic_core-2.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:175873691124f3d0da55aeea1d90660a6ea7a3cfea137c38afa0a5ffabe37b88"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37eee5b638f0e0dcd18d21f59b679686bbd18917b87db0193ae36f9c23c355fc"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25e9185e2d06c16ee438ed39bf62935ec436474a6ac4f9358524220f1b236e43"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:150906b40ff188a3260cbee25380e7494ee85048584998c1e66df0c7a11c17a6"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ad4aeb3e9a97286573c03df758fc7627aecdd02f1da04516a86dc159bf70121"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3f3ed29cd9f978c604708511a1f9c2fdcb6c38b9aae36a51905b8811ee5cbf1"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0dae11d8f5ded51699c74d9548dcc5938e0804cc8298ec0aa0da95c21fff57b"}, - {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:faa6b09ee09433b87992fb5a2859efd1c264ddc37280d2dd5db502126d0e7f27"}, - {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9dc1b507c12eb0481d071f3c1808f0529ad41dc415d0ca11f7ebfc666e66a18b"}, - {file = "pydantic_core-2.20.1-cp311-none-win32.whl", hash = "sha256:fa2fddcb7107e0d1808086ca306dcade7df60a13a6c347a7acf1ec139aa6789a"}, - {file = "pydantic_core-2.20.1-cp311-none-win_amd64.whl", hash = "sha256:40a783fb7ee353c50bd3853e626f15677ea527ae556429453685ae32280c19c2"}, - {file = "pydantic_core-2.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:595ba5be69b35777474fa07f80fc260ea71255656191adb22a8c53aba4479231"}, - {file = "pydantic_core-2.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a4f55095ad087474999ee28d3398bae183a66be4823f753cd7d67dd0153427c9"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9aa05d09ecf4c75157197f27cdc9cfaeb7c5f15021c6373932bf3e124af029f"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e97fdf088d4b31ff4ba35db26d9cc472ac7ef4a2ff2badeabf8d727b3377fc52"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc633a9fe1eb87e250b5c57d389cf28998e4292336926b0b6cdaee353f89a237"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d573faf8eb7e6b1cbbcb4f5b247c60ca8be39fe2c674495df0eb4318303137fe"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26dc97754b57d2fd00ac2b24dfa341abffc380b823211994c4efac7f13b9e90e"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:33499e85e739a4b60c9dac710c20a08dc73cb3240c9a0e22325e671b27b70d24"}, - {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bebb4d6715c814597f85297c332297c6ce81e29436125ca59d1159b07f423eb1"}, - {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:516d9227919612425c8ef1c9b869bbbee249bc91912c8aaffb66116c0b447ebd"}, - {file = "pydantic_core-2.20.1-cp312-none-win32.whl", hash = "sha256:469f29f9093c9d834432034d33f5fe45699e664f12a13bf38c04967ce233d688"}, - {file = "pydantic_core-2.20.1-cp312-none-win_amd64.whl", hash = "sha256:035ede2e16da7281041f0e626459bcae33ed998cca6a0a007a5ebb73414ac72d"}, - {file = "pydantic_core-2.20.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0827505a5c87e8aa285dc31e9ec7f4a17c81a813d45f70b1d9164e03a813a686"}, - {file = "pydantic_core-2.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:19c0fa39fa154e7e0b7f82f88ef85faa2a4c23cc65aae2f5aea625e3c13c735a"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa223cd1e36b642092c326d694d8bf59b71ddddc94cdb752bbbb1c5c91d833b"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c336a6d235522a62fef872c6295a42ecb0c4e1d0f1a3e500fe949415761b8a19"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7eb6a0587eded33aeefea9f916899d42b1799b7b14b8f8ff2753c0ac1741edac"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70c8daf4faca8da5a6d655f9af86faf6ec2e1768f4b8b9d0226c02f3d6209703"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9fa4c9bf273ca41f940bceb86922a7667cd5bf90e95dbb157cbb8441008482c"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:11b71d67b4725e7e2a9f6e9c0ac1239bbc0c48cce3dc59f98635efc57d6dac83"}, - {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:270755f15174fb983890c49881e93f8f1b80f0b5e3a3cc1394a255706cabd203"}, - {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c81131869240e3e568916ef4c307f8b99583efaa60a8112ef27a366eefba8ef0"}, - {file = "pydantic_core-2.20.1-cp313-none-win32.whl", hash = "sha256:b91ced227c41aa29c672814f50dbb05ec93536abf8f43cd14ec9521ea09afe4e"}, - {file = "pydantic_core-2.20.1-cp313-none-win_amd64.whl", hash = "sha256:65db0f2eefcaad1a3950f498aabb4875c8890438bc80b19362cf633b87a8ab20"}, - {file = "pydantic_core-2.20.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4745f4ac52cc6686390c40eaa01d48b18997cb130833154801a442323cc78f91"}, - {file = "pydantic_core-2.20.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a8ad4c766d3f33ba8fd692f9aa297c9058970530a32c728a2c4bfd2616d3358b"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41e81317dd6a0127cabce83c0c9c3fbecceae981c8391e6f1dec88a77c8a569a"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04024d270cf63f586ad41fff13fde4311c4fc13ea74676962c876d9577bcc78f"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eaad4ff2de1c3823fddf82f41121bdf453d922e9a238642b1dedb33c4e4f98ad"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26ab812fa0c845df815e506be30337e2df27e88399b985d0bb4e3ecfe72df31c"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c5ebac750d9d5f2706654c638c041635c385596caf68f81342011ddfa1e5598"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2aafc5a503855ea5885559eae883978c9b6d8c8993d67766ee73d82e841300dd"}, - {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4868f6bd7c9d98904b748a2653031fc9c2f85b6237009d475b1008bfaeb0a5aa"}, - {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa2f457b4af386254372dfa78a2eda2563680d982422641a85f271c859df1987"}, - {file = "pydantic_core-2.20.1-cp38-none-win32.whl", hash = "sha256:225b67a1f6d602de0ce7f6c1c3ae89a4aa25d3de9be857999e9124f15dab486a"}, - {file = "pydantic_core-2.20.1-cp38-none-win_amd64.whl", hash = "sha256:6b507132dcfc0dea440cce23ee2182c0ce7aba7054576efc65634f080dbe9434"}, - {file = "pydantic_core-2.20.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b03f7941783b4c4a26051846dea594628b38f6940a2fdc0df00b221aed39314c"}, - {file = "pydantic_core-2.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1eedfeb6089ed3fad42e81a67755846ad4dcc14d73698c120a82e4ccf0f1f9f6"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:635fee4e041ab9c479e31edda27fcf966ea9614fff1317e280d99eb3e5ab6fe2"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:77bf3ac639c1ff567ae3b47f8d4cc3dc20f9966a2a6dd2311dcc055d3d04fb8a"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ed1b0132f24beeec5a78b67d9388656d03e6a7c837394f99257e2d55b461611"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6514f963b023aeee506678a1cf821fe31159b925c4b76fe2afa94cc70b3222b"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10d4204d8ca33146e761c79f83cc861df20e7ae9f6487ca290a97702daf56006"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d036c7187b9422ae5b262badb87a20a49eb6c5238b2004e96d4da1231badef1"}, - {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9ebfef07dbe1d93efb94b4700f2d278494e9162565a54f124c404a5656d7ff09"}, - {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6b9d9bb600328a1ce523ab4f454859e9d439150abb0906c5a1983c146580ebab"}, - {file = "pydantic_core-2.20.1-cp39-none-win32.whl", hash = "sha256:784c1214cb6dd1e3b15dd8b91b9a53852aed16671cc3fbe4786f4f1db07089e2"}, - {file = "pydantic_core-2.20.1-cp39-none-win_amd64.whl", hash = "sha256:d2fe69c5434391727efa54b47a1e7986bb0186e72a41b203df8f5b0a19a4f669"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a45f84b09ac9c3d35dfcf6a27fd0634d30d183205230a0ebe8373a0e8cfa0906"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d02a72df14dfdbaf228424573a07af10637bd490f0901cee872c4f434a735b94"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2b27e6af28f07e2f195552b37d7d66b150adbaa39a6d327766ffd695799780f"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:084659fac3c83fd674596612aeff6041a18402f1e1bc19ca39e417d554468482"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:242b8feb3c493ab78be289c034a1f659e8826e2233786e36f2893a950a719bb6"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7"}, - {file = "pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4"}, + {file = "pydantic_core-2.23.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:7f10a5d1b9281392f1bf507d16ac720e78285dfd635b05737c3911637601bae6"}, + {file = "pydantic_core-2.23.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3c09a7885dd33ee8c65266e5aa7fb7e2f23d49d8043f089989726391dd7350c5"}, + {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6470b5a1ec4d1c2e9afe928c6cb37eb33381cab99292a708b8cb9aa89e62429b"}, + {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9172d2088e27d9a185ea0a6c8cebe227a9139fd90295221d7d495944d2367700"}, + {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86fc6c762ca7ac8fbbdff80d61b2c59fb6b7d144aa46e2d54d9e1b7b0e780e01"}, + {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0cb80fd5c2df4898693aa841425ea1727b1b6d2167448253077d2a49003e0ed"}, + {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03667cec5daf43ac4995cefa8aaf58f99de036204a37b889c24a80927b629cec"}, + {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:047531242f8e9c2db733599f1c612925de095e93c9cc0e599e96cf536aaf56ba"}, + {file = "pydantic_core-2.23.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5499798317fff7f25dbef9347f4451b91ac2a4330c6669821c8202fd354c7bee"}, + {file = "pydantic_core-2.23.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bbb5e45eab7624440516ee3722a3044b83fff4c0372efe183fd6ba678ff681fe"}, + {file = "pydantic_core-2.23.3-cp310-none-win32.whl", hash = "sha256:8b5b3ed73abb147704a6e9f556d8c5cb078f8c095be4588e669d315e0d11893b"}, + {file = "pydantic_core-2.23.3-cp310-none-win_amd64.whl", hash = "sha256:2b603cde285322758a0279995b5796d64b63060bfbe214b50a3ca23b5cee3e83"}, + {file = "pydantic_core-2.23.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:c889fd87e1f1bbeb877c2ee56b63bb297de4636661cc9bbfcf4b34e5e925bc27"}, + {file = "pydantic_core-2.23.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea85bda3189fb27503af4c45273735bcde3dd31c1ab17d11f37b04877859ef45"}, + {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7f7f72f721223f33d3dc98a791666ebc6a91fa023ce63733709f4894a7dc611"}, + {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b2b55b0448e9da68f56b696f313949cda1039e8ec7b5d294285335b53104b61"}, + {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c24574c7e92e2c56379706b9a3f07c1e0c7f2f87a41b6ee86653100c4ce343e5"}, + {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2b05e6ccbee333a8f4b8f4d7c244fdb7a979e90977ad9c51ea31261e2085ce0"}, + {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2c409ce1c219c091e47cb03feb3c4ed8c2b8e004efc940da0166aaee8f9d6c8"}, + {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d965e8b325f443ed3196db890d85dfebbb09f7384486a77461347f4adb1fa7f8"}, + {file = "pydantic_core-2.23.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f56af3a420fb1ffaf43ece3ea09c2d27c444e7c40dcb7c6e7cf57aae764f2b48"}, + {file = "pydantic_core-2.23.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5b01a078dd4f9a52494370af21aa52964e0a96d4862ac64ff7cea06e0f12d2c5"}, + {file = "pydantic_core-2.23.3-cp311-none-win32.whl", hash = "sha256:560e32f0df04ac69b3dd818f71339983f6d1f70eb99d4d1f8e9705fb6c34a5c1"}, + {file = "pydantic_core-2.23.3-cp311-none-win_amd64.whl", hash = "sha256:c744fa100fdea0d000d8bcddee95213d2de2e95b9c12be083370b2072333a0fa"}, + {file = "pydantic_core-2.23.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:e0ec50663feedf64d21bad0809f5857bac1ce91deded203efc4a84b31b2e4305"}, + {file = "pydantic_core-2.23.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:db6e6afcb95edbe6b357786684b71008499836e91f2a4a1e55b840955b341dbb"}, + {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98ccd69edcf49f0875d86942f4418a4e83eb3047f20eb897bffa62a5d419c8fa"}, + {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a678c1ac5c5ec5685af0133262103defb427114e62eafeda12f1357a12140162"}, + {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:01491d8b4d8db9f3391d93b0df60701e644ff0894352947f31fff3e52bd5c801"}, + {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fcf31facf2796a2d3b7fe338fe8640aa0166e4e55b4cb108dbfd1058049bf4cb"}, + {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7200fd561fb3be06827340da066df4311d0b6b8eb0c2116a110be5245dceb326"}, + {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dc1636770a809dee2bd44dd74b89cc80eb41172bcad8af75dd0bc182c2666d4c"}, + {file = "pydantic_core-2.23.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:67a5def279309f2e23014b608c4150b0c2d323bd7bccd27ff07b001c12c2415c"}, + {file = "pydantic_core-2.23.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:748bdf985014c6dd3e1e4cc3db90f1c3ecc7246ff5a3cd4ddab20c768b2f1dab"}, + {file = "pydantic_core-2.23.3-cp312-none-win32.whl", hash = "sha256:255ec6dcb899c115f1e2a64bc9ebc24cc0e3ab097775755244f77360d1f3c06c"}, + {file = "pydantic_core-2.23.3-cp312-none-win_amd64.whl", hash = "sha256:40b8441be16c1e940abebed83cd006ddb9e3737a279e339dbd6d31578b802f7b"}, + {file = "pydantic_core-2.23.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:6daaf5b1ba1369a22c8b050b643250e3e5efc6a78366d323294aee54953a4d5f"}, + {file = "pydantic_core-2.23.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d015e63b985a78a3d4ccffd3bdf22b7c20b3bbd4b8227809b3e8e75bc37f9cb2"}, + {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3fc572d9b5b5cfe13f8e8a6e26271d5d13f80173724b738557a8c7f3a8a3791"}, + {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f6bd91345b5163ee7448bee201ed7dd601ca24f43f439109b0212e296eb5b423"}, + {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc379c73fd66606628b866f661e8785088afe2adaba78e6bbe80796baf708a63"}, + {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbdce4b47592f9e296e19ac31667daed8753c8367ebb34b9a9bd89dacaa299c9"}, + {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc3cf31edf405a161a0adad83246568647c54404739b614b1ff43dad2b02e6d5"}, + {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8e22b477bf90db71c156f89a55bfe4d25177b81fce4aa09294d9e805eec13855"}, + {file = "pydantic_core-2.23.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:0a0137ddf462575d9bce863c4c95bac3493ba8e22f8c28ca94634b4a1d3e2bb4"}, + {file = "pydantic_core-2.23.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:203171e48946c3164fe7691fc349c79241ff8f28306abd4cad5f4f75ed80bc8d"}, + {file = "pydantic_core-2.23.3-cp313-none-win32.whl", hash = "sha256:76bdab0de4acb3f119c2a4bff740e0c7dc2e6de7692774620f7452ce11ca76c8"}, + {file = "pydantic_core-2.23.3-cp313-none-win_amd64.whl", hash = "sha256:37ba321ac2a46100c578a92e9a6aa33afe9ec99ffa084424291d84e456f490c1"}, + {file = "pydantic_core-2.23.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d063c6b9fed7d992bcbebfc9133f4c24b7a7f215d6b102f3e082b1117cddb72c"}, + {file = "pydantic_core-2.23.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6cb968da9a0746a0cf521b2b5ef25fc5a0bee9b9a1a8214e0a1cfaea5be7e8a4"}, + {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edbefe079a520c5984e30e1f1f29325054b59534729c25b874a16a5048028d16"}, + {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cbaaf2ef20d282659093913da9d402108203f7cb5955020bd8d1ae5a2325d1c4"}, + {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fb539d7e5dc4aac345846f290cf504d2fd3c1be26ac4e8b5e4c2b688069ff4cf"}, + {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e6f33503c5495059148cc486867e1d24ca35df5fc064686e631e314d959ad5b"}, + {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:04b07490bc2f6f2717b10c3969e1b830f5720b632f8ae2f3b8b1542394c47a8e"}, + {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:03795b9e8a5d7fda05f3873efc3f59105e2dcff14231680296b87b80bb327295"}, + {file = "pydantic_core-2.23.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c483dab0f14b8d3f0df0c6c18d70b21b086f74c87ab03c59250dbf6d3c89baba"}, + {file = "pydantic_core-2.23.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8b2682038e255e94baf2c473dca914a7460069171ff5cdd4080be18ab8a7fd6e"}, + {file = "pydantic_core-2.23.3-cp38-none-win32.whl", hash = "sha256:f4a57db8966b3a1d1a350012839c6a0099f0898c56512dfade8a1fe5fb278710"}, + {file = "pydantic_core-2.23.3-cp38-none-win_amd64.whl", hash = "sha256:13dd45ba2561603681a2676ca56006d6dee94493f03d5cadc055d2055615c3ea"}, + {file = "pydantic_core-2.23.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:82da2f4703894134a9f000e24965df73cc103e31e8c31906cc1ee89fde72cbd8"}, + {file = "pydantic_core-2.23.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dd9be0a42de08f4b58a3cc73a123f124f65c24698b95a54c1543065baca8cf0e"}, + {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89b731f25c80830c76fdb13705c68fef6a2b6dc494402987c7ea9584fe189f5d"}, + {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c6de1ec30c4bb94f3a69c9f5f2182baeda5b809f806676675e9ef6b8dc936f28"}, + {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb68b41c3fa64587412b104294b9cbb027509dc2f6958446c502638d481525ef"}, + {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c3980f2843de5184656aab58698011b42763ccba11c4a8c35936c8dd6c7068c"}, + {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94f85614f2cba13f62c3c6481716e4adeae48e1eaa7e8bac379b9d177d93947a"}, + {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:510b7fb0a86dc8f10a8bb43bd2f97beb63cffad1203071dc434dac26453955cd"}, + {file = "pydantic_core-2.23.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1eba2f7ce3e30ee2170410e2171867ea73dbd692433b81a93758ab2de6c64835"}, + {file = "pydantic_core-2.23.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4b259fd8409ab84b4041b7b3f24dcc41e4696f180b775961ca8142b5b21d0e70"}, + {file = "pydantic_core-2.23.3-cp39-none-win32.whl", hash = "sha256:40d9bd259538dba2f40963286009bf7caf18b5112b19d2b55b09c14dde6db6a7"}, + {file = "pydantic_core-2.23.3-cp39-none-win_amd64.whl", hash = "sha256:5a8cd3074a98ee70173a8633ad3c10e00dcb991ecec57263aacb4095c5efb958"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f399e8657c67313476a121a6944311fab377085ca7f490648c9af97fc732732d"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:6b5547d098c76e1694ba85f05b595720d7c60d342f24d5aad32c3049131fa5c4"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0dda0290a6f608504882d9f7650975b4651ff91c85673341789a476b1159f211"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65b6e5da855e9c55a0c67f4db8a492bf13d8d3316a59999cfbaf98cc6e401961"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:09e926397f392059ce0afdcac920df29d9c833256354d0c55f1584b0b70cf07e"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:87cfa0ed6b8c5bd6ae8b66de941cece179281239d482f363814d2b986b79cedc"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e61328920154b6a44d98cabcb709f10e8b74276bc709c9a513a8c37a18786cc4"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ce3317d155628301d649fe5e16a99528d5680af4ec7aa70b90b8dacd2d725c9b"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e89513f014c6be0d17b00a9a7c81b1c426f4eb9224b15433f3d98c1a071f8433"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:4f62c1c953d7ee375df5eb2e44ad50ce2f5aff931723b398b8bc6f0ac159791a"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2718443bc671c7ac331de4eef9b673063b10af32a0bb385019ad61dcf2cc8f6c"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0d90e08b2727c5d01af1b5ef4121d2f0c99fbee692c762f4d9d0409c9da6541"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2b676583fc459c64146debea14ba3af54e540b61762dfc0613dc4e98c3f66eeb"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:50e4661f3337977740fdbfbae084ae5693e505ca2b3130a6d4eb0f2281dc43b8"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:68f4cf373f0de6abfe599a38307f4417c1c867ca381c03df27c873a9069cda25"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:59d52cf01854cb26c46958552a21acb10dd78a52aa34c86f284e66b209db8cab"}, + {file = "pydantic_core-2.23.3.tar.gz", hash = "sha256:3cb0f65d8b4121c1b015c60104a685feb929a29d7cf204387c7f2688c7974690"}, ] [package.dependencies] @@ -3927,13 +3947,13 @@ extra = ["pygments (>=2.12)"] [[package]] name = "pyparsing" -version = "3.1.2" +version = "3.1.4" description = "pyparsing module - Classes and methods to define and execute parsing grammars" optional = true python-versions = ">=3.6.8" files = [ - {file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"}, - {file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"}, + {file = "pyparsing-3.1.4-py3-none-any.whl", hash = "sha256:a6a7ee4235a3f944aa1fa2249307708f893fe5717dc603503c6c7969c070fb7c"}, + {file = "pyparsing-3.1.4.tar.gz", hash = "sha256:f86ec8d1a83f11977c9a6ea7598e8c27fc5cddfa5b07ea2241edbbde1d7bc032"}, ] [package.extras] @@ -4577,19 +4597,23 @@ files = [ [[package]] name = "setuptools" -version = "73.0.1" +version = "74.1.2" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-73.0.1-py3-none-any.whl", hash = "sha256:b208925fcb9f7af924ed2dc04708ea89791e24bde0d3020b27df0e116088b34e"}, - {file = "setuptools-73.0.1.tar.gz", hash = "sha256:d59a3e788ab7e012ab2c4baed1b376da6366883ee20d7a5fc426816e3d7b1193"}, + {file = "setuptools-74.1.2-py3-none-any.whl", hash = "sha256:5f4c08aa4d3ebcb57a50c33b1b07e94315d7fc7230f7115e47fc99776c8ce308"}, + {file = "setuptools-74.1.2.tar.gz", hash = "sha256:95b40ed940a1c67eb70fc099094bd6e99c6ee7c23aa2306f4d2697ba7916f9c6"}, ] [package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"] core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.11.*)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (<0.4)", "pytest-ruff (>=0.2.1)", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.11.*)", "pytest-mypy"] [[package]] name = "singledispatch" @@ -5204,18 +5228,22 @@ dev = ["doc8", "flake8", "flake8-import-order", "rstcheck[sphinx]", "sphinx"] [[package]] name = "zipp" -version = "3.20.0" +version = "3.20.1" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.20.0-py3-none-any.whl", hash = "sha256:58da6168be89f0be59beb194da1250516fdaa062ccebd30127ac65d30045e10d"}, - {file = "zipp-3.20.0.tar.gz", hash = "sha256:0145e43d89664cfe1a2e533adc75adafed82fe2da404b4bbb6b026c0157bdb31"}, + {file = "zipp-3.20.1-py3-none-any.whl", hash = "sha256:9960cd8967c8f85a56f920d5d507274e74f9ff813a0ab8889a5b5be2daf44064"}, + {file = "zipp-3.20.1.tar.gz", hash = "sha256:c22b14cc4763c5a5b04134207736c107db42e9d3ef2d9779d465f5f1bcba572b"}, ] [package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] [extras] aci = ["PyYAML"] @@ -5232,4 +5260,4 @@ servicenow = ["Jinja2", "PyYAML", "ijson", "oauthlib", "python-magic", "pytz", " [metadata] lock-version = "2.0" python-versions = ">=3.8,<3.13" -content-hash = "0969d1260d94aabfb9612f6faae17bb0c7f76dbe37ceda3474817c53e3257157" +content-hash = "8d8defc6b94cfb1d178aef14dcbd58036a360933fc6e50de9bfdf58ab9124138" diff --git a/pyproject.toml b/pyproject.toml index 1b83287e0..26602caae 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -58,7 +58,7 @@ dnacentersdk = { version = "^2.5.6", optional = true } [tool.poetry.group.dev.dependencies] coverage = "*" -django-debug-toolbar = "*" +django-debug-toolbar = "<4.4" invoke = "*" ipython = "*" jedi = "^0.17.2" diff --git a/tasks.py b/tasks.py index 3006f401b..58a4484fd 100644 --- a/tasks.py +++ b/tasks.py @@ -72,9 +72,7 @@ def _is_compose_included(context, name): def _await_healthy_service(context, service): - container_id = docker_compose( - context, f"ps -q -- {service}", pty=False, echo=False, hide=True - ).stdout.strip() + container_id = docker_compose(context, f"ps -q -- {service}", pty=False, echo=False, hide=True).stdout.strip() _await_healthy_container(context, container_id) @@ -166,9 +164,7 @@ def docker_compose(context, command, **kwargs): ] for compose_file in context.nautobot_ssot.compose_files: - compose_file_path = os.path.join( - context.nautobot_ssot.compose_dir, compose_file - ) + compose_file_path = os.path.join(context.nautobot_ssot.compose_dir, compose_file) compose_command_tokens.append(f' -f "{compose_file_path}"') compose_command_tokens.append(command) @@ -244,20 +240,10 @@ def _get_docker_nautobot_version(context, nautobot_ver=None, python_ver=None): if python_ver is None: python_ver = context.nautobot_ssot.python_ver dockerfile_path = os.path.join(context.nautobot_ssot.compose_dir, "Dockerfile") - base_image = ( - context.run(f"grep --max-count=1 '^FROM ' {dockerfile_path}", hide=True) - .stdout.strip() - .split(" ")[1] - ) - base_image = base_image.replace(r"${NAUTOBOT_VER}", nautobot_ver).replace( - r"${PYTHON_VER}", python_ver - ) - pip_nautobot_ver = context.run( - f"docker run --rm --entrypoint '' {base_image} pip show nautobot", hide=True - ) - match_version = re.search( - r"^Version: (.+)$", pip_nautobot_ver.stdout.strip(), flags=re.MULTILINE - ) + base_image = context.run(f"grep --max-count=1 '^FROM ' {dockerfile_path}", hide=True).stdout.strip().split(" ")[1] + base_image = base_image.replace(r"${NAUTOBOT_VER}", nautobot_ver).replace(r"${PYTHON_VER}", python_ver) + pip_nautobot_ver = context.run(f"docker run --rm --entrypoint '' {base_image} pip show nautobot", hide=True) + match_version = re.search(r"^Version: (.+)$", pip_nautobot_ver.stdout.strip(), flags=re.MULTILINE) if match_version: return match_version.group(1) else: @@ -282,9 +268,7 @@ def _get_docker_nautobot_version(context, nautobot_ver=None, python_ver=None): ), } ) -def lock( - context, check=False, constrain_nautobot_ver=False, constrain_python_ver=False -): +def lock(context, check=False, constrain_nautobot_ver=False, constrain_python_ver=False): """Generate poetry.lock file.""" if constrain_nautobot_ver: docker_nautobot_version = _get_docker_nautobot_version(context) @@ -324,9 +308,7 @@ def restart(context, service=""): def stop(context, service=""): """Stop specified or all services, if service is not specified, remove all containers.""" print("Stopping Nautobot...") - docker_compose( - context, "stop" if service else "down --remove-orphans", service=service - ) + docker_compose(context, "stop" if service else "down --remove-orphans", service=service) @task( @@ -345,9 +327,7 @@ def destroy(context, volumes=True, import_db_file=""): return if not volumes: - raise ValueError( - "Cannot specify `--no-volumes` and `--import-db-file` arguments at the same time." - ) + raise ValueError("Cannot specify `--no-volumes` and `--import-db-file` arguments at the same time.") print(f"Importing database file: {import_db_file}...") @@ -364,16 +344,12 @@ def destroy(context, volumes=True, import_db_file=""): "db", ] - container_id = docker_compose( - context, " ".join(command), pty=False, echo=False, hide=True - ).stdout.strip() + container_id = docker_compose(context, " ".join(command), pty=False, echo=False, hide=True).stdout.strip() _await_healthy_container(context, container_id) print("Stopping database container...") context.run(f"docker stop {container_id}", pty=False, echo=False, hide=True) - print( - "Database import complete, you can start Nautobot with the following command:" - ) + print("Database import complete, you can start Nautobot with the following command:") print("invoke start") @@ -549,9 +525,7 @@ def dbshell(context, db_name="", input_file="", output_file="", query=""): if input_file and query: raise ValueError("Cannot specify both, `input_file` and `query` arguments") if output_file and not (input_file or query): - raise ValueError( - "`output_file` argument requires `input_file` or `query` argument" - ) + raise ValueError("`output_file` argument requires `input_file` or `query` argument") env = {} if query: @@ -689,9 +663,7 @@ def backup_db(context, db_name="", output_file="dump.sql", readable=True): docker_compose(context, " ".join(command), pty=False) print(50 * "=") - print( - "The database backup has been successfully completed and saved to the following file:" - ) + print("The database backup has been successfully completed and saved to the following file:") print(output_file) print("You can import this database backup with the following command:") print(f"invoke import-db --input-file '{output_file}'") @@ -860,9 +832,7 @@ def unittest( # noqa: PLR0913 @task def unittest_coverage(context): """Report on code test coverage as measured by 'invoke unittest'.""" - command = ( - "coverage report --skip-covered --include 'nautobot_ssot/*' --omit *migrations*" - ) + command = "coverage report --skip-covered --include 'nautobot_ssot/*' --omit *migrations*" run_command(context, command) From a80a0caf9116dc3480835cd9584fa55f378733d8 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Mon, 9 Sep 2024 20:24:05 -0500 Subject: [PATCH 04/19] =?UTF-8?q?fix:=20=F0=9F=9A=A8=20Correct=20complaint?= =?UTF-8?q?=20from=20pylint?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nautobot_ssot/tests/bootstrap/test_setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nautobot_ssot/tests/bootstrap/test_setup.py b/nautobot_ssot/tests/bootstrap/test_setup.py index 7f8d198af..8a56c11e7 100644 --- a/nautobot_ssot/tests/bootstrap/test_setup.py +++ b/nautobot_ssot/tests/bootstrap/test_setup.py @@ -110,10 +110,10 @@ def load_json(path): ] -def is_valid_timezone(tz): +def is_valid_timezone(timezone): """Return whether timezone passed is a valid timezone in pytz.""" try: - pytz.timezone(tz) + pytz.timezone(timezone) return True except pytz.UnknownTimeZoneError: return False From f9285f389da948bf27127b77d0490d9b2ac977ac Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Mon, 9 Sep 2024 20:24:32 -0500 Subject: [PATCH 05/19] =?UTF-8?q?docs:=20=F0=9F=93=9D=20Fix=20documentatio?= =?UTF-8?q?n?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docs/admin/integrations/index.md | 1 + docs/admin/release_notes/version_1.5.md | 2 +- docs/user/integrations/index.md | 1 + docs/user/performance.md | 2 +- mkdocs.yml | 2 +- 5 files changed, 5 insertions(+), 3 deletions(-) diff --git a/docs/admin/integrations/index.md b/docs/admin/integrations/index.md index 1f28b95e8..0353e67b4 100644 --- a/docs/admin/integrations/index.md +++ b/docs/admin/integrations/index.md @@ -3,6 +3,7 @@ This Nautobot app supports the following integrations: - [Cisco ACI](./aci_setup.md) +- [Bootstrap](./bootstrap_setup.md) - [Arista CloudVision](./aristacv_setup.md) - [Device42](./device42_setup.md) - [Infoblox](./infoblox_setup.md) diff --git a/docs/admin/release_notes/version_1.5.md b/docs/admin/release_notes/version_1.5.md index 68f81cf46..736747ea2 100644 --- a/docs/admin/release_notes/version_1.5.md +++ b/docs/admin/release_notes/version_1.5.md @@ -5,4 +5,4 @@ ## Changed - [206](https://github.com/nautobot/nautobot-app-ssot/pull/206) - Update docs pins for py3.7 compatibility by @cmsirbu -- [207][https://github.com/nautobot/nautobot-app-ssot/pull/207] Drop Python 3.7 Support by @jdrew82 +- [207](https://github.com/nautobot/nautobot-app-ssot/pull/207) Drop Python 3.7 Support by @jdrew82 diff --git a/docs/user/integrations/index.md b/docs/user/integrations/index.md index ebd3a8df9..3297227e7 100644 --- a/docs/user/integrations/index.md +++ b/docs/user/integrations/index.md @@ -3,6 +3,7 @@ This Nautobot app supports the following integrations: - [Cisco ACI](./aci.md) +- [Bootstrap](./bootstrap.md) - [Arista CloudVision](./aristacv.md) - [Bootstrap](./bootstrap.md) - [Device42](./device42.md) diff --git a/docs/user/performance.md b/docs/user/performance.md index 60b6d3198..fe2356693 100644 --- a/docs/user/performance.md +++ b/docs/user/performance.md @@ -17,7 +17,7 @@ In brief, the following general steps can be followed: 2. Define a `DiffSync` adapter class for loading initial data from Nautobot and constructing instances of each `DiffSyncModel` class to represent that data. 3. Define a `DiffSync` adapter class for loading initial data from the Data Source or Data Target system and constructing instances of the `DiffSyncModel` classes to represent that data. -4. Develop a Job class, derived from either the `DataSource` or `DataTarget` classes provided by this app, and implement the adapters to populate the `self.source_adapter` and `self.target_adapter` that are used by the built-in implementation of `sync_data`. This `sync_data` method is an opinionated way of running the process including some performance data, more in [next section](#analyze-job-performance), but you could overwrite it completely or any of the key hooks that it calls: +4. Develop a Job class, derived from either the `DataSource` or `DataTarget` classes provided by this app, and implement the adapters to populate the `self.source_adapter` and `self.target_adapter` that are used by the built-in implementation of `sync_data`. This `sync_data` method is an opinionated way of running the process including some performance data, more in [next section](#optimizing-for-execution-time), but you could overwrite it completely or any of the key hooks that it calls: - `self.load_source_adapter`: This is mandatory to be implemented. As an example: diff --git a/mkdocs.yml b/mkdocs.yml index 53eb099e4..c40d947b0 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -126,7 +126,7 @@ nav: - "admin/integrations/index.md" - Cisco ACI: "admin/integrations/aci_setup.md" - Arista CloudVision: "admin/integrations/aristacv_setup.md" - - Bootstrap: "admin/integrations/bootstrap.md" + - Bootstrap: "admin/integrations/bootstrap_setup.md" - Device42: "admin/integrations/device42_setup.md" - Infoblox: "admin/integrations/infoblox_setup.md" - IPFabric: "admin/integrations/ipfabric_setup.md" From dd36475f96a6cc4bca9cc975a6971fa1a052cb5e Mon Sep 17 00:00:00 2001 From: bile0026 Date: Tue, 10 Sep 2024 07:53:05 -0500 Subject: [PATCH 06/19] =?UTF-8?q?chore:=20=F0=9F=93=9D=20add=20change=20fr?= =?UTF-8?q?agment?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- changes/{495.added => 541.added} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename changes/{495.added => 541.added} (100%) diff --git a/changes/495.added b/changes/541.added similarity index 100% rename from changes/495.added rename to changes/541.added From b64457be41d03d7e5193a09f4d5b51dadabb0f34 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Tue, 10 Sep 2024 07:58:03 -0500 Subject: [PATCH 07/19] =?UTF-8?q?docs:=20=F0=9F=93=9D=20Add=20changelog=20?= =?UTF-8?q?fragment=20for=20documentation=20changes.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- changes/541.documentation | 1 + 1 file changed, 1 insertion(+) create mode 100644 changes/541.documentation diff --git a/changes/541.documentation b/changes/541.documentation new file mode 100644 index 000000000..3aab3dcb6 --- /dev/null +++ b/changes/541.documentation @@ -0,0 +1 @@ +Fixed documentation errors with 1.5 release notes and missing links to integration setup and user sections. From 69923d675926001a6168712aab101665d3eed5ff Mon Sep 17 00:00:00 2001 From: bile0026 Date: Tue, 10 Sep 2024 12:04:01 -0500 Subject: [PATCH 08/19] =?UTF-8?q?fix:=20=F0=9F=90=9B=20adjust=20data=20typ?= =?UTF-8?q?e?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../integrations/bootstrap/diffsync/adapters/nautobot.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautobot_ssot/integrations/bootstrap/diffsync/adapters/nautobot.py b/nautobot_ssot/integrations/bootstrap/diffsync/adapters/nautobot.py index 0ab1c6e29..89462f131 100755 --- a/nautobot_ssot/integrations/bootstrap/diffsync/adapters/nautobot.py +++ b/nautobot_ssot/integrations/bootstrap/diffsync/adapters/nautobot.py @@ -436,7 +436,7 @@ def load_location(self): status=nb_location.status.name, facility=nb_location.facility, asn=nb_location.asn, - time_zone=_time_zone, + time_zone=str(_time_zone), description=nb_location.description, tenant=_tenant, physical_address=nb_location.physical_address, From 435a75847a6977bd5548c06317984aa24d3f5c0f Mon Sep 17 00:00:00 2001 From: bile0026 Date: Wed, 11 Sep 2024 09:31:38 -0500 Subject: [PATCH 09/19] =?UTF-8?q?fix:=20=F0=9F=90=9B=20logging=20debug=20m?= =?UTF-8?q?essages=20obey=20debug=20toggle=20on=20job?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../bootstrap/diffsync/adapters/bootstrap.py | 94 ++++++++++++------ .../bootstrap/diffsync/adapters/nautobot.py | 98 ++++++++++++------- 2 files changed, 126 insertions(+), 66 deletions(-) diff --git a/nautobot_ssot/integrations/bootstrap/diffsync/adapters/bootstrap.py b/nautobot_ssot/integrations/bootstrap/diffsync/adapters/bootstrap.py index 89eaa9eeb..133491cf5 100755 --- a/nautobot_ssot/integrations/bootstrap/diffsync/adapters/bootstrap.py +++ b/nautobot_ssot/integrations/bootstrap/diffsync/adapters/bootstrap.py @@ -209,7 +209,8 @@ def __init__(self, *args, job=None, sync=None, client=None, **kwargs): # noqa: def load_tenant_group(self, bs_tenant_group, branch_vars): """Load TenantGroup objects from Bootstrap into DiffSync models.""" - self.job.logger.debug(f"Loading Bootstrap TenantGroup: {bs_tenant_group}") + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap TenantGroup: {bs_tenant_group}") try: self.get(self.tenant_group, bs_tenant_group["name"]) @@ -224,7 +225,8 @@ def load_tenant_group(self, bs_tenant_group, branch_vars): def load_tenant(self, bs_tenant, branch_vars): """Load Tenant objects from Bootstrap into DiffSync models.""" - self.job.logger.debug(f"Loading Bootstrap Tenant: {bs_tenant}") + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap Tenant: {bs_tenant}") try: self.get(self.tenant, bs_tenant["name"]) @@ -240,7 +242,8 @@ def load_tenant(self, bs_tenant, branch_vars): def load_role(self, bs_role, branch_vars): """Load Role objects from Bootstrap into DiffSync models.""" - self.job.logger.debug(f"Loading Bootstrap Role {bs_role}") + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap Role {bs_role}") if len(bs_role["content_types"]) > 1: _content_types = bs_role["content_types"] @@ -262,7 +265,8 @@ def load_role(self, bs_role, branch_vars): def load_manufacturer(self, bs_manufacturer, branch_vars): """Load Manufacturer objects from Bootstrap into DiffSync models.""" - self.job.logger.debug(f"Loading Boostrap Manufacturer {bs_manufacturer}") + if self.job.debug: + self.job.logger.debug(f"Loading Boostrap Manufacturer {bs_manufacturer}") try: self.get(self.manufacturer, bs_manufacturer["name"]) @@ -276,7 +280,8 @@ def load_manufacturer(self, bs_manufacturer, branch_vars): def load_platform(self, bs_platform, branch_vars): """Load Platform objects from Bootstrap into DiffSync models.""" - self.job.logger.debug(f"Loading Bootstrap Platform {bs_platform}") + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap Platform {bs_platform}") try: self.get(self.platform, bs_platform["name"]) @@ -294,7 +299,8 @@ def load_platform(self, bs_platform, branch_vars): def load_location_type(self, bs_location_type, branch_vars): """Load LocationType objects from Bootstrap into DiffSync models.""" - self.job.logger.debug(f"Loading Bootstrap LocationType {bs_location_type}") + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap LocationType {bs_location_type}") try: self.get(self.location_type, bs_location_type["name"]) @@ -319,7 +325,8 @@ def load_location_type(self, bs_location_type, branch_vars): def load_location(self, bs_location, branch_vars): """Load Location objects from Bootstrap into DiffSync models.""" - self.job.logger.debug(f"Loading Bootstrap Location {bs_location}") + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap Location {bs_location}") try: self.get(self.location, bs_location["name"]) @@ -356,7 +363,8 @@ def load_location(self, bs_location, branch_vars): def load_team(self, bs_team, branch_vars): """Load Team objects from Bootstrap into DiffSync models.""" - self.job.logger.debug(f"Loading Bootstrap Team {bs_team}") + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap Team {bs_team}") if "contacts" in bs_team: _contacts = [] @@ -379,7 +387,8 @@ def load_team(self, bs_team, branch_vars): def load_contact(self, bs_contact, branch_vars): """Load Contact objects from Bootstrap into DiffSync models.""" - self.job.logger.debug(f"Loading Boostrap Contact {bs_contact}") + if self.job.debug: + self.job.logger.debug(f"Loading Boostrap Contact {bs_contact}") if "teams" in bs_contact: _teams = [] @@ -401,7 +410,8 @@ def load_contact(self, bs_contact, branch_vars): def load_provider(self, bs_provider, branch_vars): """Load Provider objects from Bootstrap into DiffSync models.""" - self.job.logger.debug(f"Loading Bootstrap Provider {bs_provider}") + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap Provider {bs_provider}") try: self.get(self.provider, bs_provider["name"]) @@ -420,7 +430,8 @@ def load_provider(self, bs_provider, branch_vars): def load_provider_network(self, bs_provider_network, branch_vars): """Load ProviderNetwork objects from Bootstrap into DiffSync models.""" - self.job.logger.debug(f"Loading Bootstrap ProviderNetwork {bs_provider_network}") + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap ProviderNetwork {bs_provider_network}") try: self.get(self.provider_network, bs_provider_network["name"]) @@ -437,7 +448,8 @@ def load_provider_network(self, bs_provider_network, branch_vars): def load_circuit_type(self, bs_circuit_type, branch_vars): """Load CircuitType objects from Bootstrap into DiffSync models.""" - self.job.logger.debug(f"Loading Bootstrap CircuitType {bs_circuit_type} into DiffSync models.") + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap CircuitType {bs_circuit_type} into DiffSync models.") try: self.get(self.circuit_type, bs_circuit_type["name"]) @@ -451,7 +463,8 @@ def load_circuit_type(self, bs_circuit_type, branch_vars): def load_circuit(self, bs_circuit, branch_vars): """Load Circuit objects from Bootstrap into DiffSync models.""" - self.job.logger.debug(f"Loading Bootstrap Circuit {bs_circuit} into DiffSync models.") + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap Circuit {bs_circuit} into DiffSync models.") try: self.get(self.circuit, bs_circuit["circuit_id"]) @@ -471,7 +484,8 @@ def load_circuit(self, bs_circuit, branch_vars): def load_circuit_termination(self, bs_circuit_termination, branch_vars): """Load CircuitTermination objects from Bootstrap into DiffSync models.""" - self.job.logger.debug(f"Loading Bootstrap CircuitTermination {bs_circuit_termination} into DiffSync models.") + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap CircuitTermination {bs_circuit_termination} into DiffSync models.") _parts = bs_circuit_termination["name"].split("__") _circuit_id = _parts[0] _provider = _parts[1] @@ -509,7 +523,8 @@ def load_circuit_termination(self, bs_circuit_termination, branch_vars): def load_namespace(self, bs_namespace, branch_vars): """Load Namespace objects from Bootstrap into DiffSync models.""" - self.job.logger.debug(f"Loading Bootstrap Namespace {bs_namespace}.") + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap Namespace {bs_namespace}.") try: self.get(self.namespace, bs_namespace["name"]) except ObjectNotFound: @@ -523,7 +538,8 @@ def load_namespace(self, bs_namespace, branch_vars): def load_rir(self, bs_rir, branch_vars): """Load RiR objects from Bootstrap into DiffSync models.""" - self.job.logger.debug(f"Loading Bootstrap RiR {bs_rir}.") + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap RiR {bs_rir}.") try: self.get(self.rir, bs_rir["name"]) except ObjectNotFound: @@ -537,7 +553,8 @@ def load_rir(self, bs_rir, branch_vars): def load_vlan_group(self, bs_vlan_group, branch_vars): """Load VLANGroup objects from Bootstrap into DiffSync models.""" - self.job.logger.debug(f"Loading Bootstrap VLANGroup {bs_vlan_group}.") + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap VLANGroup {bs_vlan_group}.") try: self.get(self.vlan_group, bs_vlan_group["name"]) except ObjectNotFound: @@ -551,7 +568,8 @@ def load_vlan_group(self, bs_vlan_group, branch_vars): def load_vlan(self, bs_vlan, branch_vars): """Load VLAN objects from Bootstrap into DiffSync models.""" - self.job.logger.debug(f"Loading Bootstrap VLAN {bs_vlan}.") + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap VLAN {bs_vlan}.") try: self.get( self.vlan, @@ -578,7 +596,8 @@ def load_vlan(self, bs_vlan, branch_vars): def load_vrf(self, bs_vrf, branch_vars): """Load VRF objects from Bootstrap into DiffSync models.""" - self.job.logger.debug(f"Loading Bootstrap VRF {bs_vrf}.") + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap VRF {bs_vrf}.") try: self.get( self.vrf, @@ -601,7 +620,8 @@ def load_vrf(self, bs_vrf, branch_vars): def load_prefix(self, bs_prefix, branch_vars): """Load Prefix objects from Bootstrap into DiffSync models.""" - self.job.logger.debug(f"Loading Bootstrap Prefix {bs_prefix}.") + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap Prefix {bs_prefix}.") try: self.get( self.prefix, @@ -651,6 +671,8 @@ def load_prefix(self, bs_prefix, branch_vars): def load_secret(self, bs_secret, branch_vars): """Load Secret objects from Bootstrap into DiffSync models.""" + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap Secret: {bs_secret}") if bs_secret["provider"] == "environment-variable": params = {"variable": bs_secret["parameters"]["variable"]} elif bs_secret["provider"] == "text-file": @@ -659,8 +681,6 @@ def load_secret(self, bs_secret, branch_vars): self.job.logger.warning(f"Secret: {bs_secret} is not formatted correctly in the yaml file.") return - self.job.logger.debug(f"Loading Bootstrap Secret: {bs_secret}, params: {params}") - try: self.get(self.secret, bs_secret["name"]) except ObjectNotFound: @@ -675,7 +695,8 @@ def load_secret(self, bs_secret, branch_vars): def load_secrets_group(self, bs_sg, branch_vars): """Load SecretsGroup objects from Bootstrap into DiffSync models.""" _secrets = [] - self.job.logger.debug(f"Loading Bootstrap SecretsGroup: {bs_sg}") + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap SecretsGroup: {bs_sg}") try: self.get(self.secrets_group, bs_sg["name"]) except ObjectNotFound: @@ -691,7 +712,8 @@ def load_secrets_group(self, bs_sg, branch_vars): def load_git_repository(self, git_repo, branch_vars): """Load GitRepository objects from Bootstrap into DiffSync models.""" - self.job.logger.debug(f"Loading Bootstrap GitRepository: {git_repo}") + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap GitRepository: {git_repo}") try: self.get(self.git_repository, git_repo["name"]) except ObjectNotFound: @@ -716,7 +738,8 @@ def load_git_repository(self, git_repo, branch_vars): def load_dynamic_group(self, dyn_group): """Load DynamicGroup objects from Bootstrap into DiffSync models.""" - self.job.logger.debug(f"Loading Bootstrap DynamicGroup: {dyn_group}") + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap DynamicGroup: {dyn_group}") try: self.get(self.dynamic_group, dyn_group["name"]) except ObjectNotFound: @@ -731,7 +754,8 @@ def load_dynamic_group(self, dyn_group): def load_computed_field(self, comp_field): """Load ComputedField objects from Bootstrap into DiffSync Models.""" - self.job.logger.debug(f"Loading Bootstrap ComputedField: {comp_field}") + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap ComputedField: {comp_field}") try: self.get(self.computed_field, comp_field["label"]) except ObjectNotFound: @@ -744,7 +768,8 @@ def load_computed_field(self, comp_field): def load_tag(self, tag): """Load Tag objects from Bootstrap into DiffSync Models.""" - self.job.logger.debug(f"Loading Bootstrap Tag: {tag}") + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap Tag: {tag}") if len(tag["content_types"]) > 1: _content_types = tag["content_types"] _content_types.sort() @@ -764,7 +789,8 @@ def load_tag(self, tag): def load_graph_ql_query(self, query): """Load GraphQLQuery objects from Bootstrap into DiffSync Models.""" - self.job.logger.debug(f"Loading Bootstrap GraphQLQuery {query}") + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap GraphQLQuery {query}") try: self.get(self.graph_ql_query, query["name"]) except ObjectNotFound: @@ -773,7 +799,8 @@ def load_graph_ql_query(self, query): def load_software(self, software): """Load Software objects from Bootstrap into DiffSync Models.""" - self.job.logger.debug(f"Loading Bootstrap Software {software}") + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap Software {software}") try: self.get( self.software, @@ -811,7 +838,8 @@ def load_software(self, software): def load_software_image(self, software_image): """Load SoftwareImage objects from Bootstrap into DiffSync Models.""" - self.job.logger.debug(f"Loading Bootstrap SoftwareImage {software_image}") + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap SoftwareImage {software_image}") try: self.get(self.software_image, software_image["file_name"]) except ObjectNotFound: @@ -831,7 +859,8 @@ def load_software_image(self, software_image): def load_validated_software(self, validated_software): """Load ValidatedSoftware objects from Bootstrap into DiffSync Models.""" - self.job.logger.debug(f"Loading Bootstrap ValidatedSoftware {validated_software}") + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap ValidatedSoftware {validated_software}") try: self.get( self.validated_software, @@ -897,7 +926,8 @@ def load(self): ) else: repo = repo[0] - self.job.logger.debug(f"Sync the {repo.name} GitRepository.") + if self.job.debug: + self.job.logger.debug(f"Sync the {repo.name} GitRepository.") ensure_git_repository(repository_record=repo) self.job.logger.info(f"Parsing the {repo.name} GitRepository.") os.chdir(f"{repo.filesystem_path}") diff --git a/nautobot_ssot/integrations/bootstrap/diffsync/adapters/nautobot.py b/nautobot_ssot/integrations/bootstrap/diffsync/adapters/nautobot.py index 89462f131..7bfd543e9 100755 --- a/nautobot_ssot/integrations/bootstrap/diffsync/adapters/nautobot.py +++ b/nautobot_ssot/integrations/bootstrap/diffsync/adapters/nautobot.py @@ -194,7 +194,8 @@ def __init__(self, *args, job=None, sync=None, **kwargs): # noqa: D417 def load_tenant_group(self): """Method to load TenantGroup objects from Nautobot into NautobotTenantGroup DiffSync models.""" for nb_tenant_group in TenantGroup.objects.all(): - self.job.logger.debug(f"Loading Nautobot TenantGroup: {nb_tenant_group}, with ID: {nb_tenant_group.id}") + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot TenantGroup: {nb_tenant_group}, with ID: {nb_tenant_group.id}") try: self.get(self.tenant_group, nb_tenant_group.name) except ObjectNotFound: @@ -226,7 +227,8 @@ def load_tenant_group(self): def load_tenant(self): """Method to load Tenant objects from Nautobot into NautobotTenant DiffSync models.""" for nb_tenant in Tenant.objects.all(): - self.job.logger.debug(f"Loading Nautobot Tenant: {nb_tenant}, with ID: {nb_tenant.id}") + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot Tenant: {nb_tenant}, with ID: {nb_tenant.id}") _tags = sorted(list(nb_tenant.tags.all().values_list("name", flat=True))) try: self.get(self.tenant, nb_tenant.name) @@ -260,7 +262,8 @@ def load_tenant(self): def load_role(self): """Method to load Role objects from Nautobot into NautobotRole DiffSync models.""" for nb_role in Role.objects.all(): - self.job.logger.debug(f"Loading Nautobot Role: {nb_role}, with ID {nb_role.id}") + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot Role: {nb_role}, with ID {nb_role.id}") try: self.get(self.role, nb_role.name) except ObjectNotFound: @@ -294,7 +297,8 @@ def load_role(self): def load_manufacturer(self): """Method to load Manufacturer objects from Nautobot into NautobotManufacturer DiffSync models.""" for nb_manufacturer in Manufacturer.objects.all(): - self.job.logger.debug(f"Loading Nautobot Manufacturer: {nb_manufacturer}, with ID {nb_manufacturer.id}") + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot Manufacturer: {nb_manufacturer}, with ID {nb_manufacturer.id}") try: self.get(self.manufacturer, nb_manufacturer.name) except ObjectNotFound: @@ -320,7 +324,8 @@ def load_manufacturer(self): def load_platform(self): """Method to load Platform objects from Nautobot into NautobotPlatform DiffSync models.""" for nb_platform in Platform.objects.all(): - self.job.logger.debug(f"Loading Nautobot Platform: {nb_platform}, with ID {nb_platform.id}") + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot Platform: {nb_platform}, with ID {nb_platform.id}") try: self.get(self.platform, nb_platform.name) except ObjectNotFound: @@ -354,7 +359,8 @@ def load_platform(self): def load_location_type(self): """Method to load LocationType objects from Nautobot into NautobotLocationType DiffSync models.""" for nb_location_type in LocationType.objects.all(): - self.job.logger.debug(f"Loading Nautobot LocationType: {nb_location_type}, with ID {nb_location_type.id}") + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot LocationType: {nb_location_type}, with ID {nb_location_type.id}") try: self.get(self.location_type, nb_location_type.name) except ObjectNotFound: @@ -399,7 +405,8 @@ def load_location_type(self): def load_location(self): """Method to load Location objects from Nautobot into NautobotLocation DiffSync models.""" for nb_location in Location.objects.all(): - self.job.logger.debug(f"Loading Nautobot Location: {nb_location}, with ID {nb_location.id}") + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot Location: {nb_location}, with ID {nb_location.id}") try: self.get(self.location, nb_location.name) except ObjectNotFound: @@ -459,7 +466,8 @@ def load_location(self): def load_team(self): """Method to load Team objects from Nautobot into NautobotTeam DiffSync models.""" for nb_team in Team.objects.all(): - self.job.logger.debug(f"Loading Nautobot Team: {nb_team}, with ID: {nb_team.id}") + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot Team: {nb_team}, with ID: {nb_team.id}") try: self.get(self.team, nb_team.name) except ObjectNotFound: @@ -494,7 +502,8 @@ def load_team(self): def load_contact(self): """Method to load Contact Objects from Nautobot into NautobotContact DiffSync models.""" for nb_contact in Contact.objects.all(): - self.job.logger.debug(f"Loading Nautobot contact: {nb_contact}, with ID: {nb_contact.id}") + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot contact: {nb_contact}, with ID: {nb_contact.id}") try: self.get(self.contact, nb_contact.name) except ObjectNotFound: @@ -528,7 +537,8 @@ def load_contact(self): def load_provider(self): """Method to load Provider objects from Nautobot into NautobotProvider DiffSync models.""" for nb_provider in Provider.objects.all(): - self.job.logger.debug(f"Loading Nautobot Provider: {nb_provider}, with ID {nb_provider.id}") + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot Provider: {nb_provider}, with ID {nb_provider.id}") try: self.get(self.provider, nb_provider.name) except ObjectNotFound: @@ -566,9 +576,10 @@ def load_provider(self): def load_provider_network(self): """Method to load ProviderNetwork objects from Nautobot into NautobotProviderNetwork DiffSync models.""" for nb_provider_network in ProviderNetwork.objects.all(): - self.job.logger.debug( - f"Loading Nautobot ProviderNetwork: {nb_provider_network}, with ID {nb_provider_network.id}" - ) + if self.job.debug: + self.job.logger.debug( + f"Loading Nautobot ProviderNetwork: {nb_provider_network}, with ID {nb_provider_network.id}" + ) try: self.get(self.provider_network, nb_provider_network.name) except ObjectNotFound: @@ -604,7 +615,8 @@ def load_provider_network(self): def load_circuit_type(self): """Method to load CircuitType objects from Nautobot into NautobotCircuitType DiffSync models.""" for nb_circuit_type in CircuitType.objects.all(): - self.job.logger.debug(f"Loading Nautobot CircuitType: {nb_circuit_type}, with ID {nb_circuit_type.id}") + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot CircuitType: {nb_circuit_type}, with ID {nb_circuit_type.id}") try: self.get(self.circuit_type, nb_circuit_type.name) except ObjectNotFound: @@ -630,7 +642,8 @@ def load_circuit_type(self): def load_circuit(self): """Method to load Circuit objects from Nautobot into NautobotCircuit DiffSync models.""" for nb_circuit in Circuit.objects.all(): - self.job.logger.debug(f"Loading Nautobot Circuit: {nb_circuit}, with ID {nb_circuit.id}") + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot Circuit: {nb_circuit}, with ID {nb_circuit.id}") try: self.get(self.circuit, nb_circuit.cid) except ObjectNotFound: @@ -670,9 +683,10 @@ def load_circuit(self): def load_circuit_termination(self): """Method to load CircuitTermination objects from Nautobot into NautobotCircuitTermination DiffSync models.""" for nb_circuit_termination in CircuitTermination.objects.all(): - self.job.logger.debug( - f"Loading Nautobot CircuitTermination {nb_circuit_termination}, with ID: {nb_circuit_termination.id}" - ) + if self.job.debug: + self.job.logger.debug( + f"Loading Nautobot CircuitTermination {nb_circuit_termination}, with ID: {nb_circuit_termination.id}" + ) _term_name = f"{nb_circuit_termination.circuit.cid}__{nb_circuit_termination.circuit.provider.name}__{nb_circuit_termination.term_side}" try: self.get(self.circuit_termination, _term_name) @@ -737,7 +751,8 @@ def load_circuit_termination(self): def load_namespace(self): """Method to load Namespace objects from Nautobot into NautobotNamespace DiffSync models.""" for nb_namespace in Namespace.objects.all(): - self.job.logger.debug(f"Loading Nautobot Namespace {nb_namespace}, with ID: {nb_namespace.id}") + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot Namespace {nb_namespace}, with ID: {nb_namespace.id}") try: self.get(self.namespace, nb_namespace.name) except ObjectNotFound: @@ -760,7 +775,8 @@ def load_namespace(self): def load_rir(self): """Method to load RiR objects from Nautobot into NautobotRiR DiffSync models.""" for nb_rir in RIR.objects.all(): - self.job.logger.debug(f"Loading Nautobot RiR {nb_rir}, with ID {nb_rir.id}") + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot RiR {nb_rir}, with ID {nb_rir.id}") try: self.get(self.rir, nb_rir.name) except ObjectNotFound: @@ -779,7 +795,8 @@ def load_rir(self): def load_vlan_group(self): """Method to load VLANGroup objects from Nautobot into NautobotVLANGroup DiffSync models.""" for nb_vlan_group in VLANGroup.objects.all(): - self.job.logger.debug(f"Loading Nautobot VLANGroup {nb_vlan_group}, with ID {nb_vlan_group.id}") + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot VLANGroup {nb_vlan_group}, with ID {nb_vlan_group.id}") try: self.get(self.vlan_group, nb_vlan_group.name) except ObjectNotFound: @@ -802,7 +819,8 @@ def load_vlan_group(self): def load_vlan(self): """Method to load VLAN objects from Nautobot into NautobotVLAN DiffSync models.""" for nb_vlan in VLAN.objects.all(): - self.job.logger.debug(f"Loading Nautobot VLAN {nb_vlan}, with ID {nb_vlan.id}") + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot VLAN {nb_vlan}, with ID {nb_vlan.id}") try: self.get( self.vlan, @@ -842,7 +860,8 @@ def load_vlan(self): def load_vrf(self): """Method to load VRF objects from Nautobot into NautobotVRF DiffSync models.""" for nb_vrf in VRF.objects.all(): - self.job.logger.debug(f"Loading Nautobot VRF {nb_vrf}, with ID {nb_vrf.id}") + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot VRF {nb_vrf}, with ID {nb_vrf.id}") try: self.get( self.vrf, @@ -871,7 +890,8 @@ def load_vrf(self): def load_prefix(self): """Method to load Prefix objects from Nautobot into NautobotPrefix DiffSync models.""" for nb_prefix in Prefix.objects.all(): - self.job.logger.debug(f"Loading Nautobot Prefix {nb_prefix}, with ID {nb_prefix.id}") + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot Prefix {nb_prefix}, with ID {nb_prefix.id}") try: self.get( self.prefix, @@ -921,7 +941,8 @@ def load_prefix(self): def load_secret(self): """Method to load Secrets objects from Nautobot into NautobotSecrets DiffSync models.""" for nb_secret in Secret.objects.all(): - self.job.logger.debug(f"Loading Nautobot Secret: {nb_secret}, with ID: {nb_secret.id}") + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot Secret: {nb_secret}, with ID: {nb_secret.id}") try: self.get(self.secret, nb_secret.name) except ObjectNotFound: @@ -950,6 +971,8 @@ def load_secrets_group(self): """Method to load SecretsGroup objects from Nautobot into NautobotSecretsGroup DiffSync models.""" _secrets = [] for nb_sg in SecretsGroup.objects.all(): + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot SecretsGroup: {nb_sg}") for nb_secret in nb_sg.secrets_group_associations.all(): _secrets.append( { @@ -959,7 +982,6 @@ def load_secrets_group(self): } ) _secrets = sorted(_secrets, key=lambda x: x["name"]) - self.job.logger.debug(f"Loading Nautobot SecretsGroup: {nb_sg}") try: self.get(self.secrets_group, nb_sg.name) except ObjectNotFound: @@ -987,7 +1009,8 @@ def load_secrets_group(self): def load_git_repository(self): """Method to load GitRepository objects from Nautobot into NautobotGitRepository DiffSync models.""" for nb_gr in GitRepository.objects.all(): - self.job.logger.debug(f"Loading Nautobot GitRepository: {nb_gr}") + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot GitRepository: {nb_gr}") try: self.get(self.git_repository, nb_gr.name) except ObjectNotFound: @@ -1020,7 +1043,8 @@ def load_git_repository(self): def load_dynamic_group(self): """Method to load DynamicGroup objects from Nautobot into NautobotDynamicGroup DiffSync models.""" for nb_dyn_group in DynamicGroup.objects.all(): - self.job.logger.debug(f"Loading Nautobot DynamicGroup {nb_dyn_group}") + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot DynamicGroup {nb_dyn_group}") try: self.get(self.dynamic_group, nb_dyn_group.name) except ObjectNotFound: @@ -1055,7 +1079,8 @@ def load_dynamic_group(self): def load_computed_field(self): """Method to load ComputedField objects from Nautobot into NautobotComputedField DiffSync models.""" for nb_comp_field in ComputedField.objects.all(): - self.job.logger.debug(f"Loading Nautobot ComputedField {nb_comp_field}") + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot ComputedField {nb_comp_field}") try: self.get(self.computed_field, nb_comp_field.label) except ObjectNotFound: @@ -1077,7 +1102,8 @@ def load_computed_field(self): def load_tag(self): """Method to load Tag objects from Nautobot into NautobotTag DiffSync Models.""" for nb_tag in Tag.objects.all(): - self.job.logger.debug(f"Loading Nautobot Tag {nb_tag}") + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot Tag {nb_tag}") try: self.get(self.tag, nb_tag.name) except ObjectNotFound: @@ -1111,7 +1137,8 @@ def load_tag(self): def load_graph_ql_query(self): """Method to load GraphQLQuery objects from Nautobot into NautobotGraphQLQuery Models.""" for query in GraphQLQuery.objects.all(): - self.job.logger.debug(f"Loading Nautobot GraphQLQuery {query}") + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot GraphQLQuery {query}") try: self.get(self.graph_ql_query, query.name) except ObjectNotFound: @@ -1122,7 +1149,8 @@ def load_graph_ql_query(self): def load_software(self): """Method to load Software objects from Nautobot into NautobotSoftware Models.""" for nb_software in ORMSoftware.objects.all(): - self.job.logger.debug(f"Loading Nautobot SoftwareLCM {nb_software}") + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot SoftwareLCM {nb_software}") try: self.get( self.software, @@ -1169,7 +1197,8 @@ def load_software(self): def load_software_image(self): """Method to load SoftwareImage objects from Nautobot into NautobotSoftwareImage Models.""" for nb_software_image in ORMSoftwareImage.objects.all(): - self.job.logger.debug(f"Loading Nautobot SoftwareImageLCM {nb_software_image}") + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot SoftwareImageLCM {nb_software_image}") try: self.get(self.software_image, nb_software_image.image_file_name) except ObjectNotFound: @@ -1207,7 +1236,8 @@ def load_software_image(self): def load_validated_software(self): """Method to load ValidatedSoftware objects from Nautobot into NautobotValidatedSoftware Models.""" for nb_validated_software in ORMValidatedSoftware.objects.all(): - self.job.logger.debug(f"Loading Nautobot ValidatedSoftwareLCM {nb_validated_software}") + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot ValidatedSoftwareLCM {nb_validated_software}") try: _software = ORMSoftware.objects.get( version=nb_validated_software.software.version, From 66ab39c6b32dfefc9fd1f0ad690a1b3de62fe517 Mon Sep 17 00:00:00 2001 From: bile0026 Date: Wed, 11 Sep 2024 09:32:10 -0500 Subject: [PATCH 10/19] =?UTF-8?q?docs:=20=F0=9F=93=9D=20update=20plugin=20?= =?UTF-8?q?options=20for=20combined=20app=20and=20some=20spelling=20fixes?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docs/admin/integrations/bootstrap_setup.md | 40 +++++++++++++--------- docs/user/integrations/bootstrap.md | 2 +- 2 files changed, 24 insertions(+), 18 deletions(-) diff --git a/docs/admin/integrations/bootstrap_setup.md b/docs/admin/integrations/bootstrap_setup.md index e59999a31..dbd4572c6 100644 --- a/docs/admin/integrations/bootstrap_setup.md +++ b/docs/admin/integrations/bootstrap_setup.md @@ -1,28 +1,27 @@ -# NautobotSsotBootstrap +# Bootstrap ## Description -This plugin will sync data from yaml files into Nautobot to create baseline environments. Most items will receive a custom field associated with them called "System of Record", which will be set to "Bootstrap". These items are then the only ones managed by the Bootstrap SSoT App. Other items within the Nautobot instance will not be affected unless there's items with overlapping names. There is currently two exceptions to this and those are the ComputedField, and GraphQLQuery models since they can't have a custom field associated. If you choose to manage ComputedField or GraphQLQuery objects with the Bootstrap SSoT App, make sure to define them all within the yaml file, since any "locally defined" Computed Fields and GraphQL Queries within Nautobot will end up getting deleted when the job runs. If an item exists in Nautobot by it's identifiers but it does not have the "System of Record" custom field on it, the item will be updated with "Bootstrap" (or `SYSTEM_OF_RECORD` environment variable value) when the plugin runs. This way no duplicates are created, and the plugin will not delete any items that are not definied in the Bootstrap data but were manually created in Nautobot. +This App will sync data from yaml files into Nautobot to create baseline environments. Most items will receive a custom field associated with them called "System of Record", which will be set to "Bootstrap". These items are then the only ones managed by the Bootstrap SSoT App. Other items within the Nautobot instance will not be affected unless there's items with overlapping names. There is currently two exceptions to this and those are the ComputedField, and GraphQLQuery models since they can't have a custom field associated. If you choose to manage ComputedField or GraphQLQuery objects with the Bootstrap SSoT App, make sure to define them all within the yaml file, since any "locally defined" Computed Fields and GraphQL Queries within Nautobot will end up getting deleted when the job runs. If an item exists in Nautobot by it's identifiers but it does not have the "System of Record" custom field on it, the item will be updated with "Bootstrap" (or `SYSTEM_OF_RECORD` environment variable value) when the App runs. This way no duplicates are created, and the App will not delete any items that are not defined in the Bootstrap data but were manually created in Nautobot. ## Installation -Add the plugin to your poetry environment `poetry install nautobot-ssot-bootstrap`, then configure your `nautobot_config.py` to include the app and the settings. +Add the Nautobot SSoT App to your poetry environment `poetry install nautobot-ssot`, then configure your `nautobot_config.py` to include the app and the settings. ### nautobot_config.py -The settings here are pretty straightforward, `nautobot_environment_branch` will be loaded from the environment variable `NAUTOBOT_BOOTSTRAP_SSOT_ENVIRONMENT_BRANCH`, or default to develop. The rest of the settings define which models/objects you want to have the plugin sync to Nautobot. There are a couple of caveats to these. For example, for DynamicGroup objects to sync, the filter criteria need to already exist in Nautobot. So, if you are going to have groups that are filtered on platforms/regions/sites/etc make sure not to include DynamicGroup objects in the "models_to_sync" until those items exist. Same for Git Repositories when you want to sync Golden Config-related repositories. The Golden Config plugin needs to be installed, for the `provided_contents` items to be able to be found. This also goes for the Lifecycle Management app with `Software/ValidatedSoftware` models. +The settings here are pretty straightforward, `nautobot_environment_branch` will be loaded from the environment variable `NAUTOBOT_BOOTSTRAP_SSOT_ENVIRONMENT_BRANCH`, or default to develop. The rest of the settings define which models/objects you want to have the App sync to Nautobot. There are a couple of caveats to these. For example, for DynamicGroup objects to sync, the filter criteria need to already exist in Nautobot. So, if you are going to have groups that are filtered on platforms/regions/sites/etc make sure not to include DynamicGroup objects in the "models_to_sync" until those items exist. Same for Git Repositories when you want to sync Golden Config-related repositories. The Golden Config App needs to be installed, for the `provided_contents` items to be able to be found. This also goes for the Lifecycle Management app with `Software/ValidatedSoftware` models. ```python -PLUGINS = ["nautobot_ssot", "nautobot_ssot_bootstrap"] +PLUGINS = ["nautobot_ssot"] PLUGINS_CONFIG = { - "nautobot_ssot_bootstrap": { - # What to assign to the System of Record custom field. - "nautobot_environment_branch": os.getenv("NAUTOBOT_BOOTSTRAP_SSOT_ENVIRONMENT_BRANCH", "develop"), - # Which models to import from YAML into Nautobot - "models_to_sync": { + "nautobot_ssot": { + # Other nautobot_ssot settings ommitted. + "bootstrap_nautobot_environment_branch": os.getenv("NAUTOBOT_BOOTSTRAP_SSOT_ENVIRONMENT_BRANCH", "develop"), + "bootstrap_models_to_sync": { "secret": True, "secrets_group": True, "git_repository": True, @@ -30,9 +29,9 @@ PLUGINS_CONFIG = { "computed_field": True, "tag": True, "graph_ql_query": True, - "software": True, - "software_image": True, - "validated_software": True, + "software": False, + "software_image": False, + "validated_software": False, "tenant_group": True, "tenant": True, "role": True, @@ -47,8 +46,15 @@ PLUGINS_CONFIG = { "circuit_type": True, "circuit": True, "circuit_termination": True, + "namespace": True, + "rir": True, + "vlan_group": True, + "vlan": True, + "vrf": True, + "prefix": True, }, - } + "enable_bootstrap": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_BOOTSTRAP", "false")), + } } ``` @@ -56,7 +62,7 @@ PLUGINS_CONFIG = { ### Bootstrap data -Bootstrap data can be stored in 2 fashions. Firstly, it can be stored within the `nautobot_ssot_bootstrap/fixtures` directory, or you may create a Git Repository within an existing Nautobot instance that contains the word `Bootstrap` in the name and provides `config context` data. The data structure is flat files, there is a naming scheme to these files. The first one required is `global_settings.yml`. This contains the main data structures of what data can be loaded `Secrets,SecretsGroups,GitRepository,DynamicGroup,Tag,etc`. You can then create additional `.yml` files with naming of your CI environments, i.e. production, development, etc. This is where the environment variables described below would be matched to pull in additional data from the other yaml files defined in the directory. A simple structure would look something like this: +Bootstrap data can be stored in 2 fashions. Firstly, it can be stored within the `nautobot_ssot_bootstrap/fixtures` directory, or you may create a Git Repository within an existing Nautobot instance that contains the word `Bootstrap` in the name and provides `config context` data. Using local files is not recommended as this requires a fork of the plugin and locally editing the YAML data files in the fixtures folder. The suggested method is to use the Git Datasource. The data structure is flat files, and there is a naming scheme to these files. The first one required is `global_settings.yml`. This contains the main data structures of what data can be loaded `Secrets,SecretsGroups,GitRepository,DynamicGroup,Tag,etc`. You can then create additional `.yml` files with naming of your CI environments, i.e. production, development, etc. This is where the environment variables described below would be matched to pull in additional data from the other yaml files defined in the directory. A simple structure would look something like this: ```text global_settings.yml @@ -67,7 +73,7 @@ staging.yml There are 2 environment variables that control how certain things are loaded in the app. - 1. `NAUTOBOT_BOOTSTRAP_SSOT_LOAD_SOURCE` - defines whether to load from the local `fixtures` folder or a GitRepository already present in Nautobot. + 1. `NAUTOBOT_BOOTSTRAP_SSOT_LOAD_SOURCE` - defines whether to load from the local `fixtures` folder or a GitRepository already present in Nautobot. This setting will get overridden if the user selects something other than `env_var` in the job's GUI settings. - Acceptable options are `file` or `git`. 2. `NAUTOBOT_BOOTSTRAP_SSOT_ENVIRONMENT_BRANCH` - Defines the environment and settings you want to import. I.e. production, develop, staging. @@ -76,7 +82,7 @@ There are 2 environment variables that control how certain things are loaded in ### Bootstrap as DataSource Synchronization of data follows this workflow: -1. Load data from Bootstrap yaml file (limited to `models_to_sync`) +1. Load data from Bootstrap YAML file (limited to `models_to_sync`) 2. Load data from Nautobot (limited to `models_to_sync`, and objects that also have the `CustomField` `system_of_record` set to "Bootstrap".) 3. DiffSync determines Creates, Updates, Deletes 4. If an object is being created (an object loaded from Bootstrap was not loaded from Nautobot) Bootstrap will first check to see if an object with the same name exists in Nautobot but does not have the `system_of_record` field set. If it finds an object, it will update it with the Bootstrap values and set the `system_of_record` field to "Bootstrap". diff --git a/docs/user/integrations/bootstrap.md b/docs/user/integrations/bootstrap.md index 669bf722a..4fc532585 100644 --- a/docs/user/integrations/bootstrap.md +++ b/docs/user/integrations/bootstrap.md @@ -2,7 +2,7 @@ ### Data structures -#### global_settings.yml (see '../nautobot_ssot_bootstrap/fixtures/global_settings.yml for examples of supported models) +#### global_settings.yml (see '../bootstrap/fixtures/global_settings.yml for examples of supported models) ```yaml secret: From 0f4436cc22e5b214dedb7fa5d670fe56cc4e8106 Mon Sep 17 00:00:00 2001 From: bile0026 Date: Wed, 11 Sep 2024 09:32:25 -0500 Subject: [PATCH 11/19] =?UTF-8?q?fix:=20=F0=9F=90=9B=20code=20cleanup?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nautobot_ssot/integrations/bootstrap/constants.py | 3 --- nautobot_ssot/integrations/bootstrap/jobs.py | 8 ++++---- nautobot_ssot/integrations/bootstrap/utils/__init__.py | 4 ---- 3 files changed, 4 insertions(+), 11 deletions(-) delete mode 100644 nautobot_ssot/integrations/bootstrap/constants.py diff --git a/nautobot_ssot/integrations/bootstrap/constants.py b/nautobot_ssot/integrations/bootstrap/constants.py deleted file mode 100644 index eb25bf1a7..000000000 --- a/nautobot_ssot/integrations/bootstrap/constants.py +++ /dev/null @@ -1,3 +0,0 @@ -"""Constants to be used with the nautobot_ssot_bootstrap plugin.""" - -content_model_path_mapping = {} diff --git a/nautobot_ssot/integrations/bootstrap/jobs.py b/nautobot_ssot/integrations/bootstrap/jobs.py index a98535e3a..60d13e0f2 100644 --- a/nautobot_ssot/integrations/bootstrap/jobs.py +++ b/nautobot_ssot/integrations/bootstrap/jobs.py @@ -109,13 +109,13 @@ class BootstrapDataTarget(DataTarget): """bootstrap SSoT Data Target.""" debug = BooleanVar(description="Enable for more verbose debug logging", default=False) - write_destination = ChoiceVar( + read_destination = ChoiceVar( choices=( ("file", "File"), ("git", "Git"), ("env_var", "Environment Variable"), ), - description="Where to load the yaml files from", + description="Where to load the YAML files from", label="Load Source", default="env_var", ) @@ -148,12 +148,12 @@ def load_target_adapter(self): self.target_adapter = bootstrap.BootstrapAdapter(job=self, sync=self.sync) self.target_adapter.load() - def run(self, write_destination, dryrun, memory_profiling, debug, *args, **kwargs): # pylint: disable=arguments-differ + def run(self, read_destination, dryrun, memory_profiling, debug, *args, **kwargs): # pylint: disable=arguments-differ """Perform data synchronization.""" self.debug = debug self.dryrun = dryrun self.memory_profiling = memory_profiling - self.write_destination = write_destination + self.read_destination = read_destination super().run(dryrun=self.dryrun, memory_profiling=self.memory_profiling, *args, **kwargs) diff --git a/nautobot_ssot/integrations/bootstrap/utils/__init__.py b/nautobot_ssot/integrations/bootstrap/utils/__init__.py index c0e85e958..0b65fdc96 100644 --- a/nautobot_ssot/integrations/bootstrap/utils/__init__.py +++ b/nautobot_ssot/integrations/bootstrap/utils/__init__.py @@ -11,8 +11,6 @@ from nautobot.extras.models import Contact, Team from nautobot.extras.utils import FeatureQuery, RoleModelsQuery, TaggableClassesQuery -from nautobot_ssot.integrations.bootstrap.constants import content_model_path_mapping - def is_running_tests(): """Check whether running unittests or actual job.""" @@ -45,8 +43,6 @@ def get_sor_field_nautobot_object(nb_object): def lookup_content_type(content_model_path, content_type): """Lookup content type for a GitRepository object.""" - if content_type in content_model_path_mapping: - return content_model_path_mapping[content_type] _choices = get_datasource_content_choices(content_model_path) _found_type = None for _element in _choices: From ce1727b9cbd96cb5c8c28e2c5f1b89599e492a01 Mon Sep 17 00:00:00 2001 From: bile0026 Date: Wed, 11 Sep 2024 09:38:10 -0500 Subject: [PATCH 12/19] =?UTF-8?q?fix:=20=F0=9F=90=9B=20fix=20indentation?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../integrations/bootstrap/diffsync/adapters/bootstrap.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/nautobot_ssot/integrations/bootstrap/diffsync/adapters/bootstrap.py b/nautobot_ssot/integrations/bootstrap/diffsync/adapters/bootstrap.py index 133491cf5..b40c55e3d 100755 --- a/nautobot_ssot/integrations/bootstrap/diffsync/adapters/bootstrap.py +++ b/nautobot_ssot/integrations/bootstrap/diffsync/adapters/bootstrap.py @@ -221,7 +221,7 @@ def load_tenant_group(self, bs_tenant_group, branch_vars): description=bs_tenant_group["description"], system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), ) - self.add(new_tenant_group) + self.add(new_tenant_group) def load_tenant(self, bs_tenant, branch_vars): """Load Tenant objects from Bootstrap into DiffSync models.""" @@ -238,7 +238,7 @@ def load_tenant(self, bs_tenant, branch_vars): tags=bs_tenant["tags"], system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), ) - self.add(new_tenant) + self.add(new_tenant) def load_role(self, bs_role, branch_vars): """Load Role objects from Bootstrap into DiffSync models.""" @@ -785,7 +785,7 @@ def load_tag(self, tag): description=tag["description"], system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), ) - self.add(_new_tag) + self.add(_new_tag) def load_graph_ql_query(self, query): """Load GraphQLQuery objects from Bootstrap into DiffSync Models.""" @@ -795,7 +795,7 @@ def load_graph_ql_query(self, query): self.get(self.graph_ql_query, query["name"]) except ObjectNotFound: _new_graphqlq = self.graph_ql_query(name=query["name"], query=query["query"]) - self.add(_new_graphqlq) + self.add(_new_graphqlq) def load_software(self, software): """Load Software objects from Bootstrap into DiffSync Models.""" From 9f9636e7ef234e09b22d161481078586b67b965a Mon Sep 17 00:00:00 2001 From: bile0026 Date: Wed, 11 Sep 2024 15:06:01 -0500 Subject: [PATCH 13/19] =?UTF-8?q?style:=20=F0=9F=93=9D=20update=20spelling?= =?UTF-8?q?s?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docs/admin/integrations/bootstrap_setup.md | 4 ++-- docs/user/integrations/bootstrap.md | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/admin/integrations/bootstrap_setup.md b/docs/admin/integrations/bootstrap_setup.md index dbd4572c6..f2abada04 100644 --- a/docs/admin/integrations/bootstrap_setup.md +++ b/docs/admin/integrations/bootstrap_setup.md @@ -3,7 +3,7 @@ ## Description -This App will sync data from yaml files into Nautobot to create baseline environments. Most items will receive a custom field associated with them called "System of Record", which will be set to "Bootstrap". These items are then the only ones managed by the Bootstrap SSoT App. Other items within the Nautobot instance will not be affected unless there's items with overlapping names. There is currently two exceptions to this and those are the ComputedField, and GraphQLQuery models since they can't have a custom field associated. If you choose to manage ComputedField or GraphQLQuery objects with the Bootstrap SSoT App, make sure to define them all within the yaml file, since any "locally defined" Computed Fields and GraphQL Queries within Nautobot will end up getting deleted when the job runs. If an item exists in Nautobot by it's identifiers but it does not have the "System of Record" custom field on it, the item will be updated with "Bootstrap" (or `SYSTEM_OF_RECORD` environment variable value) when the App runs. This way no duplicates are created, and the App will not delete any items that are not defined in the Bootstrap data but were manually created in Nautobot. +This App will sync data from YAML files into Nautobot to create baseline environments. Most items will receive a custom field associated with them called "System of Record", which will be set to "Bootstrap". These items are then the only ones managed by the Bootstrap SSoT App. Other items within the Nautobot instance will not be affected unless there's items with overlapping names. There is currently two exceptions to this and those are the ComputedField, and GraphQLQuery models since they can't have a custom field associated. If you choose to manage ComputedField or GraphQLQuery objects with the Bootstrap SSoT App, make sure to define them all within the YAML file, since any "locally defined" Computed Fields and GraphQL Queries within Nautobot will end up getting deleted when the job runs. If an item exists in Nautobot by it's identifiers but it does not have the "System of Record" custom field on it, the item will be updated with "Bootstrap" (or `SYSTEM_OF_RECORD` environment variable value) when the App runs. This way no duplicates are created, and the App will not delete any items that are not defined in the Bootstrap data but were manually created in Nautobot. ## Installation @@ -62,7 +62,7 @@ PLUGINS_CONFIG = { ### Bootstrap data -Bootstrap data can be stored in 2 fashions. Firstly, it can be stored within the `nautobot_ssot_bootstrap/fixtures` directory, or you may create a Git Repository within an existing Nautobot instance that contains the word `Bootstrap` in the name and provides `config context` data. Using local files is not recommended as this requires a fork of the plugin and locally editing the YAML data files in the fixtures folder. The suggested method is to use the Git Datasource. The data structure is flat files, and there is a naming scheme to these files. The first one required is `global_settings.yml`. This contains the main data structures of what data can be loaded `Secrets,SecretsGroups,GitRepository,DynamicGroup,Tag,etc`. You can then create additional `.yml` files with naming of your CI environments, i.e. production, development, etc. This is where the environment variables described below would be matched to pull in additional data from the other yaml files defined in the directory. A simple structure would look something like this: +Bootstrap data can be stored in 2 fashions. Firstly, it can be stored within the `nautobot_ssot_bootstrap/fixtures` directory, or you may create a Git Repository within an existing Nautobot instance that contains the word `Bootstrap` in the name and provides `config context` data. Using local files is not recommended as this requires a fork of the plugin and locally editing the YAML data files in the fixtures folder. The suggested method is to use the Git Datasource. The data structure is flat files, and there is a naming scheme to these files. The first one required is `global_settings.yml`. This contains the main data structures of what data can be loaded `Secrets,SecretsGroups,GitRepository,DynamicGroup,Tag,etc`. You can then create additional `.yml` files with naming of your CI environments, i.e. production, development, etc. This is where the environment variables described below would be matched to pull in additional data from the other YAML files defined in the directory. A simple structure would look something like this: ```text global_settings.yml diff --git a/docs/user/integrations/bootstrap.md b/docs/user/integrations/bootstrap.md index 4fc532585..b4c171f58 100644 --- a/docs/user/integrations/bootstrap.md +++ b/docs/user/integrations/bootstrap.md @@ -108,7 +108,7 @@ git_branch: develop ## Content Types -There are a couple models like Tags and Git Repositories that have associated content types. These require a specific format when listing them in the yaml file. The format of these is the `app_label`.`model`, though models can somewhat vary from App to App. Here is a list of some of the most common ones: +There are a couple models like Tags and Git Repositories that have associated content types. These require a specific format when listing them in the YAML file. The format of these is the `app_label`.`model`, though models can somewhat vary from App to App. Here is a list of some of the most common ones: ```yaml - "circuits.circuit" From e5e68f94ab7219b7831880f426ce482e14bdbad0 Mon Sep 17 00:00:00 2001 From: bile0026 Date: Thu, 12 Sep 2024 09:44:21 -0500 Subject: [PATCH 14/19] =?UTF-8?q?fix:=20=F0=9F=93=9D=20remove=20duplicate?= =?UTF-8?q?=20entry?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docs/user/integrations/index.md | 1 - 1 file changed, 1 deletion(-) diff --git a/docs/user/integrations/index.md b/docs/user/integrations/index.md index 3297227e7..ebd3a8df9 100644 --- a/docs/user/integrations/index.md +++ b/docs/user/integrations/index.md @@ -3,7 +3,6 @@ This Nautobot app supports the following integrations: - [Cisco ACI](./aci.md) -- [Bootstrap](./bootstrap.md) - [Arista CloudVision](./aristacv.md) - [Bootstrap](./bootstrap.md) - [Device42](./device42.md) From 38e7281bb0a552364fe9204881e3032a15adf498 Mon Sep 17 00:00:00 2001 From: bile0026 Date: Thu, 12 Sep 2024 10:29:36 -0500 Subject: [PATCH 15/19] =?UTF-8?q?docs:=20=F0=9F=93=9D=20update=20documenta?= =?UTF-8?q?tion?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docs/admin/integrations/bootstrap_setup.md | 25 ++++++---------------- docs/user/integrations/bootstrap.md | 17 +++++++++++++++ 2 files changed, 24 insertions(+), 18 deletions(-) diff --git a/docs/admin/integrations/bootstrap_setup.md b/docs/admin/integrations/bootstrap_setup.md index f2abada04..9255ed766 100644 --- a/docs/admin/integrations/bootstrap_setup.md +++ b/docs/admin/integrations/bootstrap_setup.md @@ -62,7 +62,13 @@ PLUGINS_CONFIG = { ### Bootstrap data -Bootstrap data can be stored in 2 fashions. Firstly, it can be stored within the `nautobot_ssot_bootstrap/fixtures` directory, or you may create a Git Repository within an existing Nautobot instance that contains the word `Bootstrap` in the name and provides `config context` data. Using local files is not recommended as this requires a fork of the plugin and locally editing the YAML data files in the fixtures folder. The suggested method is to use the Git Datasource. The data structure is flat files, and there is a naming scheme to these files. The first one required is `global_settings.yml`. This contains the main data structures of what data can be loaded `Secrets,SecretsGroups,GitRepository,DynamicGroup,Tag,etc`. You can then create additional `.yml` files with naming of your CI environments, i.e. production, development, etc. This is where the environment variables described below would be matched to pull in additional data from the other YAML files defined in the directory. A simple structure would look something like this: +Bootstrap data can be stored in 2 fashions. + +1. (Reccomended) Bootstrap data can be stored in a Git Repository and referenced in the app as a Git Datasource. A user should create a Git Repository in Nautobot (including any necessary Secrets and SecretsGroups for access) with the word "Bootstrap" in the name, and with a provided content type of `config contexts`. This is how the App will locate the correct repository. The data structure is flat files, and there is a naming scheme to these files. The first one required is `global_settings.yml`. This contains the main data structures of what data can be loaded `Secrets,SecretsGroups,GitRepository,DynamicGroup,Tag,etc`. You can then create additional `.yml` files with naming of your CI environments, i.e. production, development, etc for default values for specific items. This is where the environment variables described below would be matched to pull in additional data from the other YAML files defined in the directory. + +2. Bootstrap data can be stored within the `nautobot_ssot/bootstrap/fixtures` directory. Using local files is not recommended as this requires a fork of the plugin and locally editing the YAML data files in the fixtures folder. + +A simple structure would look something like this: ```text global_settings.yml @@ -76,20 +82,3 @@ There are 2 environment variables that control how certain things are loaded in 1. `NAUTOBOT_BOOTSTRAP_SSOT_LOAD_SOURCE` - defines whether to load from the local `fixtures` folder or a GitRepository already present in Nautobot. This setting will get overridden if the user selects something other than `env_var` in the job's GUI settings. - Acceptable options are `file` or `git`. 2. `NAUTOBOT_BOOTSTRAP_SSOT_ENVIRONMENT_BRANCH` - Defines the environment and settings you want to import. I.e. production, develop, staging. - -## Process - -### Bootstrap as DataSource - -Synchronization of data follows this workflow: -1. Load data from Bootstrap YAML file (limited to `models_to_sync`) -2. Load data from Nautobot (limited to `models_to_sync`, and objects that also have the `CustomField` `system_of_record` set to "Bootstrap".) -3. DiffSync determines Creates, Updates, Deletes -4. If an object is being created (an object loaded from Bootstrap was not loaded from Nautobot) Bootstrap will first check to see if an object with the same name exists in Nautobot but does not have the `system_of_record` field set. If it finds an object, it will update it with the Bootstrap values and set the `system_of_record` field to "Bootstrap". -5. If an object needs to be updated it will be updated with the values provided by Bootstrap data. -6. If an object needs to be deleted it will be deleted. - - -### Bootstrap as DataTarget - -NotYetImplemented diff --git a/docs/user/integrations/bootstrap.md b/docs/user/integrations/bootstrap.md index b4c171f58..94b89dade 100644 --- a/docs/user/integrations/bootstrap.md +++ b/docs/user/integrations/bootstrap.md @@ -1,5 +1,22 @@ ## Usage +## Process + +### Bootstrap as DataSource + +Synchronization of data follows this workflow: +1. Load data from Bootstrap YAML file (limited to `models_to_sync`) +2. Load data from Nautobot (limited to `models_to_sync`, and objects that also have the `CustomField` `system_of_record` set to "Bootstrap".) +3. DiffSync determines Creates, Updates, Deletes +4. If an object is being created (an object loaded from Bootstrap was not loaded from Nautobot) Bootstrap will first check to see if an object with the same name exists in Nautobot but does not have the `system_of_record` field set. If it finds an object, it will update it with the Bootstrap values and set the `system_of_record` field to "Bootstrap". +5. If an object needs to be updated it will be updated with the values provided by Bootstrap data. +6. If an object needs to be deleted it will be deleted. + + +### Bootstrap as DataTarget + +NotYetImplemented + ### Data structures #### global_settings.yml (see '../bootstrap/fixtures/global_settings.yml for examples of supported models) From e210b2c7c26201c814a1ceb563d848b0f62742b7 Mon Sep 17 00:00:00 2001 From: Zach Biles Date: Thu, 19 Sep 2024 17:08:39 -0500 Subject: [PATCH 16/19] Update docs/admin/integrations/bootstrap_setup.md Co-authored-by: Gary Snider <75227981+gsnider2195@users.noreply.github.com> --- docs/admin/integrations/bootstrap_setup.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/admin/integrations/bootstrap_setup.md b/docs/admin/integrations/bootstrap_setup.md index 9255ed766..537c6bd50 100644 --- a/docs/admin/integrations/bootstrap_setup.md +++ b/docs/admin/integrations/bootstrap_setup.md @@ -64,7 +64,7 @@ PLUGINS_CONFIG = { Bootstrap data can be stored in 2 fashions. -1. (Reccomended) Bootstrap data can be stored in a Git Repository and referenced in the app as a Git Datasource. A user should create a Git Repository in Nautobot (including any necessary Secrets and SecretsGroups for access) with the word "Bootstrap" in the name, and with a provided content type of `config contexts`. This is how the App will locate the correct repository. The data structure is flat files, and there is a naming scheme to these files. The first one required is `global_settings.yml`. This contains the main data structures of what data can be loaded `Secrets,SecretsGroups,GitRepository,DynamicGroup,Tag,etc`. You can then create additional `.yml` files with naming of your CI environments, i.e. production, development, etc for default values for specific items. This is where the environment variables described below would be matched to pull in additional data from the other YAML files defined in the directory. +1. (Recommended) Bootstrap data can be stored in a Git Repository and referenced in the app as a Git Datasource. A user should create a Git Repository in Nautobot (including any necessary Secrets and SecretsGroups for access) with the word "Bootstrap" in the name, and with a provided content type of `config contexts`. This is how the App will locate the correct repository. The data structure is flat files, and there is a naming scheme to these files. The first one required is `global_settings.yml`. This contains the main data structures of what data can be loaded `Secrets,SecretsGroups,GitRepository,DynamicGroup,Tag,etc`. You can then create additional `.yml` files with naming of your CI environments, i.e. production, development, etc for default values for specific items. This is where the environment variables described below would be matched to pull in additional data from the other YAML files defined in the directory. 2. Bootstrap data can be stored within the `nautobot_ssot/bootstrap/fixtures` directory. Using local files is not recommended as this requires a fork of the plugin and locally editing the YAML data files in the fixtures folder. From 0b42e7b60368946fa0de8e7950371f90d0dabe66 Mon Sep 17 00:00:00 2001 From: bile0026 Date: Thu, 26 Sep 2024 21:34:27 -0500 Subject: [PATCH 17/19] =?UTF-8?q?fix:=20=F0=9F=90=9B=20fix=20bug=20tagging?= =?UTF-8?q?=20tenants?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../integrations/bootstrap/diffsync/models/nautobot.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/nautobot_ssot/integrations/bootstrap/diffsync/models/nautobot.py b/nautobot_ssot/integrations/bootstrap/diffsync/models/nautobot.py index 95aa88156..474bfef64 100755 --- a/nautobot_ssot/integrations/bootstrap/diffsync/models/nautobot.py +++ b/nautobot_ssot/integrations/bootstrap/diffsync/models/nautobot.py @@ -182,9 +182,6 @@ def create(cls, adapter, ids, attrs): ) new_tenant.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) new_tenant.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) - if "tags" in attrs: - for _tag in attrs["tags"]: - new_tenant.tags.add(_tag) new_tenant.validated_save() return super().create(adapter=adapter, ids=ids, attrs=attrs) @@ -396,7 +393,7 @@ def create(cls, adapter, ids, attrs): _parent = ORMLocationType.objects.get(name=attrs["parent"]) except ORMLocationType.DoesNotExist: adapter.job.logger.warning( - f'Could not find LocationType {ids["parent"]} in Nautobot, ensure it exists.' + f'Could not find LocationType {attrs["parent"]} in Nautobot, ensure it exists.' ) _new_location_type = ORMLocationType( name=ids["name"], From e889dc3e2b89297a69f34d2d5cdd68d87f20bd7c Mon Sep 17 00:00:00 2001 From: bile0026 Date: Thu, 3 Oct 2024 15:37:07 -0500 Subject: [PATCH 18/19] =?UTF-8?q?chore:=20=F0=9F=91=B7=20add=20codeowner?= =?UTF-8?q?=20for=20bootstrap=20integration?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/CODEOWNERS | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 7ac44267f..cd890a910 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -2,6 +2,7 @@ * @nautobot/plugin-ssot /nautobot_ssot/integrations/aci/ @chadell @nautobot/plugin-ssot /nautobot_ssot/integrations/aristacv/ @qduk @jdrew82 @nautobot/plugin-ssot +/nautobot_ssot/integrations/bootstrap/ @bile0026 @nautobot/plugin-ssot /nautobot_ssot/integrations/device42/ @jdrew82 @nautobot/plugin-ssot /nautobot_ssot/integrations/infoblox/ @qduk @jdrew82 @nautobot/plugin-ssot /nautobot_ssot/integrations/ipfabric/ @alhogan @nautobot/plugin-ssot From 9b02f0435a4544857e87897e2afbcc21fd63f301 Mon Sep 17 00:00:00 2001 From: bile0026 Date: Thu, 3 Oct 2024 15:37:22 -0500 Subject: [PATCH 19/19] =?UTF-8?q?docs:=20=F0=9F=93=9D=20update=20installat?= =?UTF-8?q?ion=20instructions?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docs/admin/integrations/bootstrap_setup.md | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/docs/admin/integrations/bootstrap_setup.md b/docs/admin/integrations/bootstrap_setup.md index 537c6bd50..eea250ae6 100644 --- a/docs/admin/integrations/bootstrap_setup.md +++ b/docs/admin/integrations/bootstrap_setup.md @@ -7,7 +7,11 @@ This App will sync data from YAML files into Nautobot to create baseline environ ## Installation -Add the Nautobot SSoT App to your poetry environment `poetry install nautobot-ssot`, then configure your `nautobot_config.py` to include the app and the settings. +Before configuring the integration, please ensure, that `nautobot-ssot` app was [installed with Bootstrap integration extra dependencies](../install.md#install-guide). + +```shell +pip install nautobot-ssot[bootstrap] +``` ### nautobot_config.py