Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[PLINT-469] Add support for pagination in octopus integration #19228

Merged
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions octopus_deploy/assets/configuration/spec.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,13 @@ files:
- name: interval
type: integer
example: {}
- name: paginated_limit
description: |
Sets the number of items API calls should return at a time. Default is 30.
value:
example: 30
type: integer
required: false
- template: instances/default
- template: instances/http
overrides:
Expand Down
62 changes: 44 additions & 18 deletions octopus_deploy/datadog_checks/octopus_deploy/check.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ def _update_times(self):

def _process_endpoint(self, endpoint, params=None, report_service_check=False):
try:
params = {} if params is None else params
response = self.http.get(f"{self.config.octopus_endpoint}/{endpoint}", params=params)
response.raise_for_status()
if report_service_check:
Expand All @@ -73,10 +74,31 @@ def _process_endpoint(self, endpoint, params=None, report_service_check=False):
self.warning("Failed to access endpoint: %s: %s", endpoint, e)
return {}

def _process_paginated_endpoint(self, endpoint, params=None, report_service_check=False):
skip = 0
take = self.config.paginated_limit
num_pages = 1
num_pages_seen = 0
all_items = []
params = {} if params is None else params
while num_pages_seen < num_pages:
params['skip'] = skip
params['take'] = take

response_json = self._process_endpoint(endpoint, params=params, report_service_check=report_service_check)
if response_json == {}:
return response_json
items = response_json.get("Items")
num_pages_seen += 1
num_pages = response_json.get("NumberOfPages", num_pages)
sarah-witt marked this conversation as resolved.
Show resolved Hide resolved
skip += self.config.paginated_limit
all_items = items + all_items
sarah-witt marked this conversation as resolved.
Show resolved Hide resolved
return {"Items": all_items}

def _init_spaces_discovery(self):
self.log.info("Spaces discovery: %s", self.config.spaces)
self._spaces_discovery = Discovery(
lambda: self._process_endpoint("api/spaces", report_service_check=True).get('Items', []),
lambda: self._process_paginated_endpoint("api/spaces", report_service_check=True).get('Items', []),
limit=self.config.spaces.limit,
include=normalize_discover_config_include(self.config.spaces),
exclude=self.config.spaces.exclude,
Expand All @@ -88,9 +110,9 @@ def _init_default_project_groups_discovery(self, space_id):
self.log.info("Default Project Groups discovery: %s", self.config.project_groups)
if space_id not in self._default_project_groups_discovery:
self._default_project_groups_discovery[space_id] = Discovery(
lambda: self._process_endpoint(f"api/{space_id}/projectgroups", report_service_check=True).get(
'Items', []
),
lambda: self._process_paginated_endpoint(
f"api/{space_id}/projectgroups", report_service_check=True
).get('Items', []),
limit=self.config.project_groups.limit,
include=normalize_discover_config_include(self.config.project_groups),
exclude=self.config.project_groups.exclude,
Expand All @@ -102,9 +124,9 @@ def _init_project_groups_discovery(self, space_id, project_groups_config):
self.log.info("Project Groups discovery: %s", project_groups_config)
if space_id not in self._project_groups_discovery:
self._project_groups_discovery[space_id] = Discovery(
lambda: self._process_endpoint(f"api/{space_id}/projectgroups", report_service_check=True).get(
'Items', []
),
lambda: self._process_paginated_endpoint(
f"api/{space_id}/projectgroups", report_service_check=True
).get('Items', []),
limit=project_groups_config.limit,
include=normalize_discover_config_include(project_groups_config),
exclude=project_groups_config.exclude,
Expand All @@ -118,8 +140,9 @@ def _init_default_projects_discovery(self, space_id, project_group_id):
self._default_projects_discovery[space_id] = {}
if project_group_id not in self._default_projects_discovery[space_id]:
self._default_projects_discovery[space_id][project_group_id] = Discovery(
lambda: self._process_endpoint(
f"api/{space_id}/projectgroups/{project_group_id}/projects", report_service_check=True
lambda: self._process_paginated_endpoint(
f"api/{space_id}/projectgroups/{project_group_id}/projects",
report_service_check=True,
).get('Items', []),
limit=self.config.projects.limit,
include=normalize_discover_config_include(self.config.projects),
Expand All @@ -134,8 +157,9 @@ def _init_projects_discovery(self, space_id, project_group_id, projects_config):
self._projects_discovery[space_id] = {}
if project_group_id not in self._projects_discovery[space_id]:
self._projects_discovery[space_id][project_group_id] = Discovery(
lambda: self._process_endpoint(
f"api/{space_id}/projectgroups/{project_group_id}/projects", report_service_check=True
lambda: self._process_paginated_endpoint(
f"api/{space_id}/projectgroups/{project_group_id}/projects",
report_service_check=True,
).get('Items', []),
limit=projects_config.limit,
include=normalize_discover_config_include(projects_config),
Expand All @@ -152,7 +176,7 @@ def _process_spaces(self):
else:
spaces = [
(None, space.get("Name"), space, None)
for space in self._process_endpoint("api/spaces", report_service_check=True).get('Items', [])
for space in self._process_paginated_endpoint("api/spaces", report_service_check=True).get('Items', [])
]
self.log.debug("Monitoring %s spaces", len(spaces))
for _, _, space, space_config in spaces:
Expand All @@ -178,7 +202,9 @@ def _process_project_groups(self, space_id, space_name, project_groups_config):
else:
project_groups = [
(None, project_group.get("Name"), project_group, None)
for project_group in self._process_endpoint(f"api/{space_id}/projectgroups").get('Items', [])
for project_group in self._process_paginated_endpoint(f"api/{space_id}/projectgroups").get(
'Items', []
)
]
self.log.debug("Monitoring %s Project Groups", len(project_groups))
for _, _, project_group, project_group_config in project_groups:
Expand Down Expand Up @@ -209,7 +235,7 @@ def _process_projects(self, space_id, space_name, project_group_id, project_grou
else:
projects = [
(None, project.get("Name"), project, None)
for project in self._process_endpoint(
for project in self._process_paginated_endpoint(
f"api/{space_id}/projectgroups/{project_group_id}/projects"
).get('Items', [])
]
Expand All @@ -230,7 +256,7 @@ def _process_projects(self, space_id, space_name, project_group_id, project_grou
def _process_queued_and_running_tasks(self, space_id, space_name, project_id, project_name):
self.log.debug("Collecting running and queued tasks for project %s", project_name)
params = {'project': project_id, 'states': ["Queued", "Executing"]}
response_json = self._process_endpoint(f"api/{space_id}/tasks", params)
response_json = self._process_paginated_endpoint(f"api/{space_id}/tasks", params)
self._process_tasks(space_id, space_name, project_name, response_json.get('Items', []))

def _process_completed_tasks(self, space_id, space_name, project_id, project_name):
Expand All @@ -240,7 +266,7 @@ def _process_completed_tasks(self, space_id, space_name, project_id, project_nam
'fromCompletedDate': self._from_completed_time,
'toCompletedDate': self._to_completed_time,
}
response_json = self._process_endpoint(f"api/{space_id}/tasks", params)
response_json = self._process_paginated_endpoint(f"api/{space_id}/tasks", params)
self._process_tasks(space_id, space_name, project_name, response_json.get('Items', []))

def _calculate_task_times(self, task):
Expand Down Expand Up @@ -302,7 +328,7 @@ def _process_tasks(self, space_id, space_name, project_name, tasks_json):
def _collect_server_nodes_metrics(self):
self.log.debug("Collecting server node metrics.")
url = "api/octopusservernodes"
response_json = self._process_endpoint(url)
response_json = self._process_paginated_endpoint(url)
server_nodes = response_json.get('Items', [])

for server_node in server_nodes:
Expand Down Expand Up @@ -345,7 +371,7 @@ def _collect_new_events(self, space_id, space_name):
'to': self._to_completed_time,
'eventCategories': list(EVENT_TO_ALERT_TYPE.keys()),
}
events = self._process_endpoint(url, params=params).get('Items', [])
events = self._process_paginated_endpoint(url, params=params).get('Items', [])
tags = self._base_tags + [f"space_name:{space_name}"]

for event in events:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,10 @@ def instance_min_collection_interval():
return 15


def instance_paginated_limit():
return 30


def instance_persist_connections():
return False

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -110,6 +110,7 @@ class InstanceConfig(BaseModel):
min_collection_interval: Optional[float] = None
ntlm_domain: Optional[str] = None
octopus_endpoint: str
paginated_limit: Optional[int] = None
password: Optional[str] = None
persist_connections: Optional[bool] = None
project_groups: Optional[ProjectGroups] = None
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,11 @@ instances:
# <OPTION_1>: <VALUE_1>
# <OPTION_2>: <VALUE_2>

## @param paginated_limit - integer - optional - default: 30
## Sets the number of items API calls should return at a time. Default is 30.
#
# paginated_limit: 30

## @param tags - list of strings - optional
## A list of tags to attach to every metric and service check emitted by this instance.
##
Expand Down
15 changes: 13 additions & 2 deletions octopus_deploy/tests/conftest.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# (C) Datadog, Inc. 2024-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
import copy
import json
import os
from pathlib import Path
Expand All @@ -18,6 +19,14 @@
from .constants import COMPOSE_FILE, INSTANCE, LAB_INSTANCE, USE_OCTOPUS_LAB


# https://docs.python.org/3/library/unittest.mock-examples.html#coping-with-mutable-arguments
class CopyingMock(mock.MagicMock):
def __call__(self, /, *args, **kwargs):
args = copy.deepcopy(args)
kwargs = copy.deepcopy(kwargs)
return super().__call__(*args, **kwargs)


@pytest.fixture(scope='session')
def dd_environment():
if USE_OCTOPUS_LAB:
Expand Down Expand Up @@ -117,6 +126,8 @@ def mock_http_get(request, monkeypatch, mock_http_call):
elapsed_total_seconds = param.pop('elapsed_total_seconds', {})

def get(url, *args, **kwargs):
args = copy.deepcopy(args)
kwargs = copy.deepcopy(kwargs)
method = 'GET'
url = get_url_path(url)
if http_error and url in http_error:
Expand All @@ -128,8 +139,8 @@ def get(url, *args, **kwargs):
mock_elapsed = mock.MagicMock(total_seconds=mock.MagicMock(return_value=elapsed_total_seconds.get(url, 0.0)))
mock_json = mock.MagicMock(return_value=mock_http_call(method, url, headers=headers, params=params))
mock_status_code = mock.MagicMock(return_value=200)
return mock.MagicMock(elapsed=mock_elapsed, json=mock_json, status_code=mock_status_code)
return CopyingMock(elapsed=mock_elapsed, json=mock_json, status_code=mock_status_code)

mock_get = mock.MagicMock(side_effect=get)
mock_get = CopyingMock(side_effect=get)
monkeypatch.setattr('requests.get', mock_get)
return mock_get
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"ItemType": "Event",
"TotalResults": 1,
"TotalResults": 0,
"ItemsPerPage": 0,
"NumberOfPages": 1,
"LastPageNumber": 0,
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
{
"ItemType": "Event",
"TotalResults": 0,
"ItemsPerPage": 0,
"NumberOfPages": 1,
"LastPageNumber": 0,
"Items": [],
"Links": {
"Self": "/api/events?from=2024-09-23 14:45:00.123000+00:00&to=2024-09-23 14:45:00.123000+00:00&eventCategories=MachineUnhealthy,MachineUnavailable,MachineHealthy,CertificateExpired,DeploymentFailed,DeploymentSucceeded,LoginFailed,MachineAdded,MachineDeleted",
"Template": "/api/events{?skip,regarding,regardingAny,user,users,projects,projectGroups,environments,eventGroups,eventCategories,eventAgents,tags,tenants,from,to,internal,fromAutoId,toAutoId,documentTypes,asCsv,take,ids,spaces,includeSystem,excludeDifference}",
"Page.All": "/api/events?from=2024-09-23 14:45:00.123000+00:00&to=2024-09-23 14:45:00.123000+00:00&eventCategories=MachineUnhealthy,MachineUnavailable,MachineHealthy,CertificateExpired,DeploymentFailed,DeploymentSucceeded,LoginFailed,MachineAdded,MachineDeleted",
"Page.Next": "/api/events?from=2024-09-23 14:45:00.123000+00:00&to=2024-09-23 14:45:00.123000+00:00&eventCategories=MachineUnhealthy,MachineUnavailable,MachineHealthy,CertificateExpired,DeploymentFailed,DeploymentSucceeded,LoginFailed,MachineAdded,MachineDeleted",
"Page.Current": "/api/events?from=2024-09-23 14:45:00.123000+00:00&to=2024-09-23 14:45:00.123000+00:00&eventCategories=MachineUnhealthy,MachineUnavailable,MachineHealthy,CertificateExpired,DeploymentFailed,DeploymentSucceeded,LoginFailed,MachineAdded,MachineDeleted",
"Page.Last": "/api/events?from=2024-09-23 14:45:00.123000+00:00&to=2024-09-23 14:45:00.123000+00:00&eventCategories=MachineUnhealthy,MachineUnavailable,MachineHealthy,CertificateExpired,DeploymentFailed,DeploymentSucceeded,LoginFailed,MachineAdded,MachineDeleted"
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,108 @@
{
"ItemType": "Event",
"TotalResults": 3,
"ItemsPerPage": 2,
"NumberOfPages": 2,
"LastPageNumber": 1,
"Items": [
{
"Id": "Events-4873",
"RelatedDocumentIds": [
"Deployments-206",
"Projects-1",
"Releases-164",
"Environments-1",
"ServerTasks-118976",
"Channels-61"
],
"Category": "MachineUnhealthy",
"UserId": "users-system",
"Username": "system",
"IsService": false,
"IdentityEstablishedWith": "",
"UserAgent": "Server",
"Occurred": "2024-09-23T14:45:38.109+00:00",
"Message": "Machine test is unhealthy",
"MessageHtml": "Machine <a href='#/machines/Machines-1'>test</a> is unhealthy",
"MessageReferences": [
{
"ReferencedDocumentId": "Machines-1",
"StartIndex": 65,
"Length": 3
}
],
"Comments": null,
"Details": null,
"ChangeDetails": {
"DocumentContext": null,
"Differences": null
},
"IpAddress": null,
"SpaceId": "Spaces-1",
"Links": {
"Self": "/api/events/Events-4873"
}
},
{
"Id": "Events-4865",
"RelatedDocumentIds": [
"Deployments-204",
"Projects-1",
"Releases-163",
"Environments-1",
"ServerTasks-118973",
"Channels-61"
],
"Category": "DeploymentFailed",
"UserId": "users-system",
"Username": "system",
"IsService": false,
"IdentityEstablishedWith": "",
"UserAgent": "Server",
"Occurred": "2024-09-23T14:45:12.391+00:00",
"Message": "Deploy to dev failed for new-project-from-group release 0.0.2 to dev",
"MessageHtml": "<a href='#/deployments/Deployments-204'>Deploy to dev</a> failed for <a href='#/projects/Projects-61'>new-project-from-group</a> release <a href='#/releases/Releases-163'>0.0.2</a> to <a href='#/environments/Environments-1'>dev</a>",
"MessageReferences": [
{
"ReferencedDocumentId": "Deployments-204",
"StartIndex": 0,
"Length": 13
},
{
"ReferencedDocumentId": "Projects-61",
"StartIndex": 25,
"Length": 22
},
{
"ReferencedDocumentId": "Releases-163",
"StartIndex": 56,
"Length": 5
},
{
"ReferencedDocumentId": "Environments-1",
"StartIndex": 65,
"Length": 3
}
],
"Comments": null,
"Details": null,
"ChangeDetails": {
"DocumentContext": null,
"Differences": null
},
"IpAddress": null,
"SpaceId": "Spaces-1",
"Links": {
"Self": "/api/events/Events-4865"
}
}
],
"Links": {
"Self": "/api/events?from=2024-09-23 14:45:00.123000+00:00&to=2024-09-23 14:45:15.123000+00:00&eventCategories=MachineUnhealthy,MachineUnavailable,MachineHealthy,CertificateExpired,DeploymentFailed,DeploymentSucceeded,LoginFailed,MachineAdded,MachineDeleted",
"Template": "/api/events{?skip,regarding,regardingAny,user,users,projects,projectGroups,environments,eventGroups,eventCategories,eventAgents,tags,tenants,from,to,internal,fromAutoId,toAutoId,documentTypes,asCsv,take,ids,spaces,includeSystem,excludeDifference}",
"Page.All": "/api/events?from=2024-09-23 14:45:00.123000+00:00&to=2024-09-23 14:45:15.123000+00:00&eventCategories=MachineUnhealthy,MachineUnavailable,MachineHealthy,CertificateExpired,DeploymentFailed,DeploymentSucceeded,LoginFailed,MachineAdded,MachineDeleted",
"Page.Next": "/api/events?from=2024-09-23 14:45:00.123000+00:00&to=2024-09-23 14:45:15.123000+00:00&eventCategories=MachineUnhealthy,MachineUnavailable,MachineHealthy,CertificateExpired,DeploymentFailed,DeploymentSucceeded,LoginFailed,MachineAdded,MachineDeleted",
"Page.Current": "/api/events?from=2024-09-23 14:45:00.123000+00:00&to=2024-09-23 14:45:15.123000+00:00&eventCategories=MachineUnhealthy,MachineUnavailable,MachineHealthy,CertificateExpired,DeploymentFailed,DeploymentSucceeded,LoginFailed,MachineAdded,MachineDeleted",
"Page.Last": "/api/events?from=2024-09-23 14:45:00.123000+00:00&to=2024-09-23 14:45:15.123000+00:00&eventCategories=MachineUnhealthy,MachineUnavailable,MachineHealthy,CertificateExpired,DeploymentFailed,DeploymentSucceeded,LoginFailed,MachineAdded,MachineDeleted"
}
}
Loading
Loading