diff --git a/octopus_deploy/assets/configuration/spec.yaml b/octopus_deploy/assets/configuration/spec.yaml index 51fff1090d038..ddc9211d6ba79 100644 --- a/octopus_deploy/assets/configuration/spec.yaml +++ b/octopus_deploy/assets/configuration/spec.yaml @@ -17,15 +17,30 @@ files: example: http://localhost:80/api type: string required: true - - name: space - display_priority: 7 + - name: spaces + display_priority: 5 description: | - Space to monitor + Filter your integration by spaces. value: - example: Default - type: string - enabled: true - required: true + type: object + properties: + - name: limit + description: | + Maximum number of spaces to be processed. + type: integer + - name: include + type: array + items: + anyOf: + - type: string + - type: object + - name: exclude + type: array + items: + type: string + - name: interval + type: integer + example: {} - name: project_groups display_priority: 5 description: | @@ -50,6 +65,30 @@ files: - name: interval type: integer example: {} + - name: projects + display_priority: 5 + description: | + Filter your integration by projects. + value: + type: object + properties: + - name: limit + description: | + Maximum number of projects to be processed. + type: integer + - name: include + type: array + items: + anyOf: + - type: string + - type: object + - name: exclude + type: array + items: + type: string + - name: interval + type: integer + example: {} - template: instances/default - template: instances/http overrides: diff --git a/octopus_deploy/datadog_checks/octopus_deploy/check.py b/octopus_deploy/datadog_checks/octopus_deploy/check.py index eec3b057579b5..1c141df6da16d 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/check.py +++ b/octopus_deploy/datadog_checks/octopus_deploy/check.py @@ -1,239 +1,285 @@ # (C) Datadog, Inc. 2024-present # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) -from datetime import datetime, timedelta + +import datetime +from collections.abc import Iterable from requests.exceptions import ConnectionError, HTTPError, InvalidURL, Timeout from datadog_checks.base import AgentCheck from datadog_checks.base.errors import CheckException -from datadog_checks.base.utils.discovery import Discovery -from datadog_checks.base.utils.models.types import copy_raw +from datadog_checks.base.utils.discovery.discovery import Discovery +from datadog_checks.base.utils.time import get_current_datetime +from datadog_checks.octopus_deploy.config_models.instance import ProjectGroups, Projects from .config_models import ConfigMixin -from .constants import ( - API_UP_METRIC, - DEPLOY_COUNT_METRIC, - DEPLOY_DURATION_METRIC, - DEPLOY_QUEUE_TIME_METRIC, - DEPLOY_RERUN_METRIC, - DEPLOY_SUCCESS_METRIC, - DEPLOY_SUCCESS_STATE, - DEPLOY_WARNINGS_METRIC, - PROJECT_COUNT_METRIC, - PROJECT_GROUP_COUNT_METRIC, - SERVER_COUNT_METRIC, - SERVER_MAINTENANCE_MODE_METRIC, - SERVER_MAX_TASKS_METRIC, -) -from .error import handle_error -from .project_groups import Project, ProjectGroup class OctopusDeployCheck(AgentCheck, ConfigMixin): - __NAMESPACE__ = 'octopus_deploy' def __init__(self, name, init_config, instances): super(OctopusDeployCheck, self).__init__(name, init_config, instances) + self._from_completed_time = None + self._to_completed_time = None + self.current_datetime = None + self._spaces_discovery = None + self._default_project_groups_discovery = {} self._project_groups_discovery = {} + self._default_projects_discovery = {} self._projects_discovery = {} - self.space_id = None - space_name = self.instance.get("space") - self.base_tags = self.instance.get("tags", []) + [f"space_name:{space_name}"] - self.check_initializations.append(self._get_space_id) - self.check_initializations.append(self._initialize_caches) - - def _initialize_caches(self): - self._initialize_project_groups() - for _, _, project_group, project_group_config in self.project_groups(): - self._initialize_projects(project_group, project_group_config) - - @handle_error - def _get_new_tasks_for_project(self, project): - self.log.debug("Getting new tasks for project %s", project.name) - params = {'project': project.id, 'fromCompletedDate': project.last_task_time} - url = f"{self.config.octopus_endpoint}/{self.space_id}/tasks" - response = self.http.get(url, params=params) - response.raise_for_status() - tasks_json = response.json().get('Items', []) - new_completed_time = project.last_task_time - self.log.debug("Found %s new tasks for project %s", len(tasks_json), project.name) - - for task in tasks_json: - task_id = task.get("Id") - task_name = task.get("Name") - state = task.get("State") - completed_time = task.get("CompletedTime") - start_time = task.get("StartTime") - queue_time = task.get("QueueTime") - can_rerun = int(task.get("CanRerun", False)) - has_warnings = int(task.get("HasWarningsOrErrors", False)) + self._base_tags = self.instance.get("tags", []) - self.log.debug("Found task id=%s, name=%s", task_id, task_name) + def check(self, _): + self._update_times() + self._process_spaces() + self._collect_server_nodes_metrics() + + def _update_times(self): + self.current_datetime = get_current_datetime() + self._from_completed_time = ( + self._to_completed_time if self._to_completed_time is not None else self.current_datetime + ) + self._to_completed_time = self.current_datetime - completed_time_converted = datetime.fromisoformat(completed_time) - start_time_converted = datetime.fromisoformat(start_time) - queue_time_converted = datetime.fromisoformat(queue_time) + def _process_endpoint(self, endpoint, params=None, report_service_check=False): + try: + response = self.http.get(f"{self.config.octopus_endpoint}/{endpoint}", params=params) + response.raise_for_status() + if report_service_check: + self.gauge('api.can_connect', 1, tags=self._base_tags) + return response.json() + except (Timeout, HTTPError, InvalidURL, ConnectionError) as e: + if report_service_check: + self.gauge('api.can_connect', 0, tags=self._base_tags) + raise CheckException( + f"Could not connect to octopus API {self.config.octopus_endpoint} octopus_endpoint: {e}" + ) from e + else: + self.warning("Failed to access endpoint: %s: %s", endpoint, e) + return {} + + def _init_spaces_discovery(self): + self.log.info("Spaces discovery: %s", self.config.spaces) + self._spaces_discovery = Discovery( + lambda: self._process_endpoint("api/spaces", report_service_check=True).get('Items', []), + limit=self.config.spaces.limit, + include=normalize_discover_config_include(self.config.spaces), + exclude=self.config.spaces.exclude, + interval=self.config.spaces.interval, + key=lambda space: space.get("Name"), + ) - duration = completed_time_converted - start_time_converted - duration_seconds = duration.total_seconds() + def _init_default_project_groups_discovery(self, space_id): + self.log.info("Default Project Groups discovery: %s", self.config.project_groups) + if space_id not in self._default_project_groups_discovery: + self._default_project_groups_discovery[space_id] = Discovery( + lambda: self._process_endpoint(f"api/{space_id}/projectgroups", report_service_check=True).get( + 'Items', [] + ), + limit=self.config.project_groups.limit, + include=normalize_discover_config_include(self.config.project_groups), + exclude=self.config.project_groups.exclude, + interval=self.config.project_groups.interval, + key=lambda project_group: project_group.get("Name"), + ) - queue_time = start_time_converted - queue_time_converted - queue_time_seconds = queue_time.total_seconds() + def _init_project_groups_discovery(self, space_id, project_groups_config): + self.log.info("Project Groups discovery: %s", project_groups_config) + if space_id not in self._project_groups_discovery: + self._project_groups_discovery[space_id] = Discovery( + lambda: self._process_endpoint(f"api/{space_id}/projectgroups", report_service_check=True).get( + 'Items', [] + ), + limit=project_groups_config.limit, + include=normalize_discover_config_include(project_groups_config), + exclude=project_groups_config.exclude, + interval=project_groups_config.interval, + key=lambda project_group: project_group.get("Name"), + ) - if completed_time_converted > new_completed_time: - new_completed_time = completed_time_converted + def _init_default_projects_discovery(self, space_id, project_group_id): + self.log.info("Default Projects discovery: %s", self.config.projects) + if space_id not in self._default_projects_discovery: + self._default_projects_discovery[space_id] = {} + if project_group_id not in self._default_projects_discovery[space_id]: + self._default_projects_discovery[space_id][project_group_id] = Discovery( + lambda: self._process_endpoint( + f"api/{space_id}/projectgroups/{project_group_id}/projects", report_service_check=True + ).get('Items', []), + limit=self.config.projects.limit, + include=normalize_discover_config_include(self.config.projects), + exclude=self.config.projects.exclude, + interval=self.config.projects.interval, + key=lambda project: project.get("Name"), + ) - succeeded = int(state == DEPLOY_SUCCESS_STATE) + def _init_projects_discovery(self, space_id, project_group_id, projects_config): + self.log.info("Projects discovery: %s", projects_config) + if space_id not in self._projects_discovery: + self._projects_discovery[space_id] = {} + if project_group_id not in self._projects_discovery[space_id]: + self._projects_discovery[space_id][project_group_id] = Discovery( + lambda: self._process_endpoint( + f"api/{space_id}/projectgroups/{project_group_id}/projects", report_service_check=True + ).get('Items', []), + limit=projects_config.limit, + include=normalize_discover_config_include(projects_config), + exclude=projects_config.exclude, + interval=projects_config.interval, + key=lambda project: project.get("Name"), + ) - project_tags = [ - f"project_id:{project.id}", - f"project_name:{project.name}", - f"project_group_id:{project.project_group.id}", - f"project_group_name:{project.project_group.name}", + def _process_spaces(self): + if self.config.spaces: + if self._spaces_discovery is None: + self._init_spaces_discovery() + spaces = list(self._spaces_discovery.get_items()) + else: + spaces = [ + (None, space.get("Name"), space, None) + for space in self._process_endpoint("api/spaces", report_service_check=True).get('Items', []) ] - - tags = [f'task_name:{task_name}', f'task_state:{state}'] - - self.gauge(DEPLOY_COUNT_METRIC, 1, tags=self.base_tags + project_tags + tags) - self.gauge(DEPLOY_DURATION_METRIC, duration_seconds, tags=self.base_tags + project_tags + tags) - self.gauge(DEPLOY_QUEUE_TIME_METRIC, queue_time_seconds, tags=self.base_tags + project_tags + tags) - self.gauge(DEPLOY_SUCCESS_METRIC, succeeded, tags=self.base_tags + project_tags + tags) - self.gauge(DEPLOY_RERUN_METRIC, can_rerun, tags=self.base_tags + project_tags + tags) - self.gauge(DEPLOY_WARNINGS_METRIC, has_warnings, tags=self.base_tags + project_tags + tags) - - new_completed_time = new_completed_time + timedelta(milliseconds=1) - project.last_completed_time = new_completed_time - - def _initialize_projects(self, project_group, project_group_config): - normalized_projects = normalize_discover_config_include( - self.log, project_group_config.get("projects") if project_group_config else None - ) - self.log.debug( - "Projects discovery for project_group %s: %s", - project_group.name, - normalized_projects, - ) - if normalized_projects: - self._projects_discovery[project_group.name] = Discovery( - lambda: self._get_new_projects(project_group), - limit=project_group_config.get('projects').get('limit') if project_group_config else None, - include=normalized_projects, - exclude=project_group_config.get('projects').get('exclude') if project_group_config else None, - interval=(project_group_config.get('projects').get('interval') if project_group_config else None), - key=lambda project: project.name, + self.log.debug("Monitoring %s spaces", len(spaces)) + for _, _, space, space_config in spaces: + space_id = space.get("Id") + space_name = space.get("Name") + tags = self._base_tags + [f'space_id:{space_id}', f'space_name:{space_name}'] + self.gauge("space.count", 1, tags=tags) + self.log.debug("Processing space %s", space_name) + self._process_project_groups( + space_id, space_name, space_config.get("project_groups") if space_config else None ) - else: - self._projects_discovery[project_group.name] = None - - self.log.debug("Discovered projects: %s", self._projects_discovery) - - def _initialize_project_groups(self): - self._project_groups_discovery = None - if self.config.project_groups: - normalized_project_groups = normalize_discover_config_include(self.log, self.config.project_groups) - self.log.info("Project groups discovery: %s", self.config.project_groups) - if normalized_project_groups: - self._project_groups_discovery = Discovery( - lambda: self._get_new_project_groups(), - limit=self.config.project_groups.limit, - include=normalized_project_groups, - exclude=self.config.project_groups.exclude, - interval=self.config.project_groups.interval, - key=lambda project_group: project_group.name, - ) - def projects(self, project_group): - if self._projects_discovery.get(project_group.name): - projects = list(self._projects_discovery[project_group.name].get_items()) + def _process_project_groups(self, space_id, space_name, project_groups_config): + if project_groups_config: + self._init_project_groups_discovery(space_id, ProjectGroups(**project_groups_config)) + project_groups = list(self._project_groups_discovery[space_id].get_items()) else: - projects = [(None, project.name, project, None) for project in self._get_new_projects(project_group)] - - return projects - - def collect_project_metrics(self, project_group): - project_group_tags = [ - f"project_group_id:{project_group.id}", - f"project_group_name:{project_group.name}", - ] - self.gauge(PROJECT_GROUP_COUNT_METRIC, 1, tags=self.base_tags + project_group_tags) - - projects = self.projects(project_group) - all_project_names = [project.name for _, _, project, _ in projects] - self.log.info( - "Collecting data from project group: %s, for projects: %s", project_group.name, ",".join(all_project_names) - ) - - for _, _, project, _ in projects: - project_tags = [ - f"project_id:{project.id}", - f"project_name:{project.name}", + if self.config.project_groups: + self._init_default_project_groups_discovery(space_id) + project_groups = list(self._default_project_groups_discovery[space_id].get_items()) + else: + project_groups = [ + (None, project_group.get("Name"), project_group, None) + for project_group in self._process_endpoint(f"api/{space_id}/projectgroups").get('Items', []) + ] + self.log.debug("Monitoring %s Project Groups", len(project_groups)) + for _, _, project_group, project_group_config in project_groups: + project_group_id = project_group.get("Id") + project_group_name = project_group.get("Name") + tags = self._base_tags + [ + f'space_name:{space_name}', + f'project_group_id:{project_group_id}', + f'project_group_name:{project_group_name}', ] - self.gauge(PROJECT_COUNT_METRIC, 1, tags=self.base_tags + project_group_tags + project_tags) - - def _get_new_projects(self, project_group): - projects_endpoint = f"{self.config.octopus_endpoint}/{self.space_id}/projectgroups/{project_group.id}/projects" - response = self.http.get(projects_endpoint) - response.raise_for_status() - projects_json = response.json().get('Items', []) - projects = [] - for project in projects_json: - new_project = Project(project, project_group) - projects.append(new_project) - return projects - - def _get_new_project_groups(self): - project_groups_endpoint = f"{self.config.octopus_endpoint}/{self.space_id}/projectgroups" - response = self.http.get(project_groups_endpoint) - response.raise_for_status() - project_groups_json = response.json().get('Items', []) - project_groups = [] - for project_group in project_groups_json: - new_project_group = ProjectGroup(project_group) - project_groups.append(new_project_group) - - all_project_group_names = [project_group.name for project_group in project_groups] - self.log.debug("Found new project groups: %s", all_project_group_names) - return project_groups - - def _get_space_id(self): - spaces_endpoint = f"{self.config.octopus_endpoint}/spaces" - try: - response = self.http.get(spaces_endpoint) - response.raise_for_status() - spaces_json = response.json().get('Items', []) - for space in spaces_json: - space_name = space.get("Name") - if space_name == self.config.space: - self.space_id = space.get("Id") - self.log.debug("Space id for %s found: %s ", self.config.space, self.space_id) - except (Timeout, HTTPError, InvalidURL, ConnectionError): - self.gauge(API_UP_METRIC, 0, tags=self.base_tags) - - raise CheckException(f"Could not connect to octopus API {self.config.octopus_endpoint}octopus_endpoint") - - self.gauge(API_UP_METRIC, 1, tags=self.base_tags) - - if self.space_id is None: - raise CheckException(f"Space ID not found for provided space name {self.config.space}, does it exist?") + self.gauge("project_group.count", 1, tags=tags) + self._process_projects( + space_id, + space_name, + project_group_id, + project_group_name, + project_group_config.get("projects") if project_group_config else None, + ) - def project_groups(self): - if self._project_groups_discovery: - project_groups = list(self._project_groups_discovery.get_items()) + def _process_projects(self, space_id, space_name, project_group_id, project_group_name, projects_config): + if projects_config: + self._init_projects_discovery(space_id, project_group_id, Projects(**projects_config)) + projects = list(self._projects_discovery[space_id][project_group_id].get_items()) else: - project_groups = [ - (None, project_groups.name, project_groups, None) for project_groups in self._get_new_project_groups() + if self.config.projects: + self._init_default_projects_discovery(space_id, project_group_id) + projects = list(self._default_projects_discovery[space_id][project_group_id].get_items()) + else: + projects = [ + (None, project.get("Name"), project, None) + for project in self._process_endpoint( + f"api/{space_id}/projectgroups/{project_group_id}/projects" + ).get('Items', []) + ] + self.log.debug("Monitoring %s Projects", len(projects)) + for _, _, project, _ in projects: + project_id = project.get("Id") + project_name = project.get("Name") + tags = self._base_tags + [ + f'space_name:{space_name}', + f'project_group_name:{project_group_name}', + f'project_id:{project_id}', + f'project_name:{project_name}', ] - return project_groups + self.gauge("project.count", 1, tags=tags) + self._process_queued_and_running_tasks(space_id, space_name, project_id, project_name) + self._process_completed_tasks(space_id, space_name, project_id, project_name) + + def _process_queued_and_running_tasks(self, space_id, space_name, project_id, project_name): + self.log.debug("Collecting running and queued tasks for project %s", project_name) + params = {'project': project_id, 'states': ["Queued", "Executing"]} + response_json = self._process_endpoint(f"api/{space_id}/tasks", params) + self._process_tasks(space_name, project_name, response_json.get('Items', [])) + + def _process_completed_tasks(self, space_id, space_name, project_id, project_name): + self.log.debug("Collecting completed tasks for project %s", project_name) + params = { + 'project': project_id, + 'fromCompletedDate': self._from_completed_time, + 'toCompletedDate': self._to_completed_time, + } + response_json = self._process_endpoint(f"api/{space_id}/tasks", params) + self._process_tasks(space_name, project_name, response_json.get('Items', [])) + + def _calculate_task_times(self, task): + task_queue_time = task.get("QueueTime") + task_start_time = task.get("StartTime") + task_completed_time = task.get("CompletedTime") + if task_start_time: + queued_time = ( + datetime.datetime.fromisoformat(task_start_time) - datetime.datetime.fromisoformat(task_queue_time) + ).total_seconds() + if task_completed_time: + executing_time = ( + datetime.datetime.fromisoformat(task_completed_time) + - datetime.datetime.fromisoformat(task_start_time) + ).total_seconds() + completed_time = ( + self.current_datetime - datetime.datetime.fromisoformat(task_completed_time) + ).total_seconds() + else: + executing_time = ( + self.current_datetime - datetime.datetime.fromisoformat(task_start_time) + ).total_seconds() + completed_time = -1 + else: + queued_time = (self.current_datetime - datetime.datetime.fromisoformat(task_queue_time)).total_seconds() + executing_time = -1 + completed_time = -1 + return queued_time, executing_time, completed_time - @handle_error - def collect_server_nodes_metrics(self): + def _process_tasks(self, space_name, project_name, tasks_json): + self.log.debug("Discovered %s tasks for project %s", len(tasks_json), project_name) + for task in tasks_json: + task_id = task.get("Id") + tags = self._base_tags + [ + f'space_name:{space_name}', + f'project_name:{project_name}', + f'task_id:{task_id}', + f'task_name:{task.get("Name")}', + f'task_state:{task.get("State")}', + ] + self.log.debug("Processing task id %s for project %s", task_id, project_name) + queued_time, executing_time, completed_time = self._calculate_task_times(task) + self.gauge("deployment.count", 1, tags=tags) + self.gauge("deployment.queued_time", queued_time, tags=tags) + if executing_time != -1: + self.gauge("deployment.executing_time", executing_time, tags=tags) + if executing_time != -1: + self.gauge("deployment.completed_time", completed_time, tags=tags) + + def _collect_server_nodes_metrics(self): self.log.debug("Collecting server node metrics.") - url = f"{self.config.octopus_endpoint}/octopusservernodes" - response = self.http.get(url) - response.raise_for_status() - server_nodes = response.json().get('Items', []) + url = "api/octopusservernodes" + response_json = self._process_endpoint(url) + server_nodes = response_json.get('Items', []) for server_node in server_nodes: node_id = server_node.get("Id") @@ -241,35 +287,23 @@ def collect_server_nodes_metrics(self): maintenance_mode = int(server_node.get("IsInMaintenanceMode", False)) max_tasks = int(server_node.get("MaxConcurrentTasks", 0)) server_tags = [f"server_node_id:{node_id}", f"server_node_name:{node_name}"] - - self.gauge(SERVER_COUNT_METRIC, 1, tags=self.base_tags + server_tags) - self.gauge(SERVER_MAINTENANCE_MODE_METRIC, maintenance_mode, tags=self.base_tags + server_tags) - self.gauge(SERVER_MAX_TASKS_METRIC, max_tasks, tags=self.base_tags + server_tags) - - def check(self, _): - for _, _, project_group, _ in self.project_groups(): - self.collect_project_metrics(project_group) - for _, _, project, _ in self.projects(project_group): - self._get_new_tasks_for_project(project) - - self.collect_server_nodes_metrics() + self.gauge("server_node.count", 1, tags=self._base_tags + server_tags) + self.gauge("server_node.in_maintenance_mode", maintenance_mode, tags=self._base_tags + server_tags) + self.gauge("server_node.max_concurrent_tasks", max_tasks, tags=self._base_tags + server_tags) # Discovery class requires 'include' to be a dict, so this function is needed to normalize the config -def normalize_discover_config_include(log, config): +def normalize_discover_config_include(config): normalized_config = {} - log.debug("normalize_discover_config_include config: %s", config) - include_list = config.get('include') if isinstance(config, dict) else copy_raw(config.include) if config else [] - log.debug("normalize_discover_config_include include_list: %s", include_list) - if not isinstance(include_list, list): - raise TypeError('Setting `include` must be an array') + include_list = ( + list(getattr(config, 'include', [])) if isinstance(getattr(config, 'include', None), Iterable) else [] + ) if len(include_list) == 0: return {} for entry in include_list: if isinstance(entry, str): normalized_config[entry] = None - # entry is dict - else: + elif hasattr(entry, 'items'): for key, value in entry.items(): normalized_config[key] = value.copy() return normalized_config diff --git a/octopus_deploy/datadog_checks/octopus_deploy/config_models/instance.py b/octopus_deploy/datadog_checks/octopus_deploy/config_models/instance.py index daf4d9c57ee21..c03756d3e67bb 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/config_models/instance.py +++ b/octopus_deploy/datadog_checks/octopus_deploy/config_models/instance.py @@ -49,6 +49,17 @@ class ProjectGroups(BaseModel): limit: Optional[int] = Field(None, description='Maximum number of project groups to be processed.\n') +class Projects(BaseModel): + model_config = ConfigDict( + arbitrary_types_allowed=True, + frozen=True, + ) + exclude: Optional[tuple[str, ...]] = None + include: Optional[tuple[Union[str, MappingProxyType[str, Any]], ...]] = None + interval: Optional[int] = None + limit: Optional[int] = Field(None, description='Maximum number of projects to be processed.\n') + + class Proxy(BaseModel): model_config = ConfigDict( arbitrary_types_allowed=True, @@ -59,6 +70,17 @@ class Proxy(BaseModel): no_proxy: Optional[tuple[str, ...]] = None +class Spaces(BaseModel): + model_config = ConfigDict( + arbitrary_types_allowed=True, + frozen=True, + ) + exclude: Optional[tuple[str, ...]] = None + include: Optional[tuple[Union[str, MappingProxyType[str, Any]], ...]] = None + interval: Optional[int] = None + limit: Optional[int] = Field(None, description='Maximum number of spaces to be processed.\n') + + class InstanceConfig(BaseModel): model_config = ConfigDict( validate_default=True, @@ -91,12 +113,13 @@ class InstanceConfig(BaseModel): password: Optional[str] = None persist_connections: Optional[bool] = None project_groups: Optional[ProjectGroups] = None + projects: Optional[Projects] = None proxy: Optional[Proxy] = None read_timeout: Optional[float] = None request_size: Optional[float] = None service: Optional[str] = None skip_proxy: Optional[bool] = None - space: str + spaces: Optional[Spaces] = None tags: Optional[tuple[str, ...]] = None timeout: Optional[float] = None tls_ca_cert: Optional[str] = None diff --git a/octopus_deploy/datadog_checks/octopus_deploy/constants.py b/octopus_deploy/datadog_checks/octopus_deploy/constants.py deleted file mode 100644 index 6bc726d3be640..0000000000000 --- a/octopus_deploy/datadog_checks/octopus_deploy/constants.py +++ /dev/null @@ -1,23 +0,0 @@ -# (C) Datadog, Inc. 2024-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) - -API_UP_METRIC = "api.can_connect" -SPACE_COUNT_METRIC = "space.count" -PROJECT_GROUP_COUNT_METRIC = "project_group.count" -PROJECT_COUNT_METRIC = "project.count" -DEPLOY_PREFIX = "deployment" -DEPLOY_COUNT_METRIC = f"{DEPLOY_PREFIX}.count" -DEPLOY_DURATION_METRIC = f"{DEPLOY_PREFIX}.duration" -DEPLOY_QUEUE_TIME_METRIC = f"{DEPLOY_PREFIX}.queue_time" -DEPLOY_SUCCESS_METRIC = f"{DEPLOY_PREFIX}.succeeded" -DEPLOY_RERUN_METRIC = f"{DEPLOY_PREFIX}.can_rerun" -DEPLOY_WARNINGS_METRIC = f"{DEPLOY_PREFIX}.has_warnings_or_errors" - -SERVER_PREFIX = "server_node" -SERVER_COUNT_METRIC = f"{SERVER_PREFIX}.count" -SERVER_MAINTENANCE_MODE_METRIC = f"{SERVER_PREFIX}.in_maintenance_mode" -SERVER_MAX_TASKS_METRIC = f"{SERVER_PREFIX}.max_concurrent_tasks" - - -DEPLOY_SUCCESS_STATE = "Success" diff --git a/octopus_deploy/datadog_checks/octopus_deploy/data/conf.yaml.example b/octopus_deploy/datadog_checks/octopus_deploy/data/conf.yaml.example index 25d32dd938c3c..ba891f699d5a6 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/data/conf.yaml.example +++ b/octopus_deploy/datadog_checks/octopus_deploy/data/conf.yaml.example @@ -51,11 +51,6 @@ instances: # - octopus_endpoint: http://localhost:80/api - ## @param space - string - required - ## Space to monitor - # - space: Default - ## @param headers - mapping - optional ## Headers to use for every request. An Authorization header including the Octopus Deploy API key token is required ## for authentication for the REST API. @@ -64,11 +59,21 @@ instances: headers: X-Octopus-ApiKey: + ## @param spaces - mapping - optional + ## Filter your integration by spaces. + # + # spaces: {} + ## @param project_groups - mapping - optional ## Filter your integration by project groups and projects. # # project_groups: {} + ## @param projects - mapping - optional + ## Filter your integration by projects. + # + # projects: {} + ## @param auth_token - mapping - optional ## This allows for the use of authentication information from dynamic sources. ## Both a reader and writer must be configured. diff --git a/octopus_deploy/datadog_checks/octopus_deploy/error.py b/octopus_deploy/datadog_checks/octopus_deploy/error.py deleted file mode 100644 index 0fc6840589693..0000000000000 --- a/octopus_deploy/datadog_checks/octopus_deploy/error.py +++ /dev/null @@ -1,23 +0,0 @@ -# (C) Datadog, Inc. 2024-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from functools import wraps - -import requests - - -def handle_error(f): - @wraps(f) - def wrapper(check, *args, **kwargs): - try: - result = f(check, *args, **kwargs) - return result - except requests.exceptions.RequestException as e: - check.log.info( - "Encountered a RequestException in '%s' [%s]: %s", - f.__name__, - type(e), - e, - ) - - return wrapper diff --git a/octopus_deploy/datadog_checks/octopus_deploy/project_groups.py b/octopus_deploy/datadog_checks/octopus_deploy/project_groups.py deleted file mode 100644 index 1c1a561a0df1e..0000000000000 --- a/octopus_deploy/datadog_checks/octopus_deploy/project_groups.py +++ /dev/null @@ -1,19 +0,0 @@ -# (C) Datadog, Inc. 2024-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from datadog_checks.base.utils.time import get_current_datetime - - -class ProjectGroup: - def __init__(self, project_group_json): - self.id = project_group_json.get("Id") - self.name = project_group_json.get("Name") - self.projects = None - - -class Project: - def __init__(self, project_json, project_group): - self.id = project_json.get("Id") - self.name = project_json.get("Name") - self.project_group = project_group - self.last_task_time = get_current_datetime() diff --git a/octopus_deploy/metadata.csv b/octopus_deploy/metadata.csv index 7d38572c8996d..a41002aa00758 100644 --- a/octopus_deploy/metadata.csv +++ b/octopus_deploy/metadata.csv @@ -1,11 +1,9 @@ metric_name,metric_type,interval,unit_name,per_unit_name,description,orientation,integration,short_name,curated_metric,sample_tags octopus_deploy.api.can_connect,gauge,,,,Whether or not the check can connect to the Octopus Deploy API.,-1,octopus_deploy,octopus_deploy api,, -octopus_deploy.deployment.can_rerun,gauge,,,,Whether or not the deployment can be rerun.,-1,octopus_deploy,octopus_deploy deploy rerun,, +octopus_deploy.deployment.completed_time,gauge,,second,,Duration of deployment.,-1,octopus_deploy,octopus_deploy deploy dur,, octopus_deploy.deployment.count,gauge,,,,Number of deployments monitored.,-1,octopus_deploy,octopus_deploy deploy count,, -octopus_deploy.deployment.duration,gauge,,second,,Duration of deployment.,-1,octopus_deploy,octopus_deploy deploy dur,, -octopus_deploy.deployment.has_warnings_or_errors,gauge,,,,Whether or not the deployment can be rerun.,-1,octopus_deploy,octopus_deploy warnings,, -octopus_deploy.deployment.queue_time,gauge,,second,,Time deployment was in queue.,-1,octopus_deploy,octopus_deploy deploy queue,, -octopus_deploy.deployment.succeeded,gauge,,,,Whether or not the deployment succeeded.,-1,octopus_deploy,octopus_deploy deploy success,, +octopus_deploy.deployment.executing_time,gauge,,second,,How long the deployment has been executing.,-1,octopus_deploy,octopus_deploy deploy dur,, +octopus_deploy.deployment.queued_time,gauge,,second,,Time deployment was in queue.,-1,octopus_deploy,octopus_deploy deploy queue,, octopus_deploy.project.count,gauge,,,,Number of projects discovered.,-1,octopus_deploy,octopus_deploy projects count,, octopus_deploy.project_group.count,gauge,,,,Number of project groups discovered.,-1,octopus_deploy,octopus_deploy project group count,, octopus_deploy.server_node.count,gauge,,,,Number of Octopus server nodes discovered.,-1,octopus_deploy,octopus_deploy server count,, diff --git a/octopus_deploy/tests/conftest.py b/octopus_deploy/tests/conftest.py index 3aaf2a4d80725..47ae09e002512 100644 --- a/octopus_deploy/tests/conftest.py +++ b/octopus_deploy/tests/conftest.py @@ -13,6 +13,7 @@ from datadog_checks.dev import docker_run from datadog_checks.dev.conditions import CheckDockerLogs, CheckEndpoints from datadog_checks.dev.fs import get_here +from datadog_checks.dev.http import MockResponse from .constants import COMPOSE_FILE, INSTANCE, LAB_INSTANCE, USE_OCTOPUS_LAB @@ -26,7 +27,7 @@ def dd_environment(): endpoint = INSTANCE["octopus_endpoint"] conditions = [ CheckDockerLogs(identifier='octopus-api', patterns=['server running']), - CheckEndpoints(f'{endpoint}/spaces'), + CheckEndpoints(f'{endpoint}/api/spaces'), ] with docker_run(compose_file, conditions=conditions): yield INSTANCE @@ -34,7 +35,7 @@ def dd_environment(): @pytest.fixture def instance(): - return {'octopus_endpoint': 'http://localhost:80/api', 'space': 'Default'} + return INSTANCE def get_json_value_from_file(file_path): @@ -55,9 +56,7 @@ def process_files(dir, response_parent): for file in dir.rglob('*'): if file.is_file() and file.stem != ".slash": relative_dir_path = ( - "/" - + (str(file.parent.relative_to(dir)) if str(file.parent.relative_to(dir)) != "." else "") - + ("/" if (file.parent / ".slash").is_file() else "") + "/" + str(file.parent.relative_to(dir)) + ("/" if (file.parent / ".slash").is_file() else "") ) if relative_dir_path not in response_parent: response_parent[relative_dir_path] = {} @@ -70,14 +69,24 @@ def process_dir(dir, response_parent): def create_responses_tree(): root_dir_path = os.path.join(get_here(), 'fixtures') - method_subdirs = [d for d in Path(root_dir_path).iterdir() if d.is_dir() and d.name == 'GET'] + method_subdirs = [d for d in Path(root_dir_path).iterdir() if d.is_dir() and d.name in ['GET', 'POST']] for method_subdir in method_subdirs: process_dir(method_subdir, responses_map) def method(method, url, file='response', headers=None, params=None): filename = file request_path = url - + request_path = request_path.replace('?', '/') + if params: + param_string = "" + for key, val in params.items(): + if type(val) is list: + val_string = ','.join(f'{str(val_item)}' for val_item in val) + else: + val_string = str(val) + param_string += ("/" if param_string else "") + f'{key}={val_string}' + request_path = '{}/{}'.format(url, param_string) + print(request_path) response = responses_map.get(method, {}).get(request_path, {}).get(filename) return response @@ -88,7 +97,6 @@ def method(method, url, file='response', headers=None, params=None): @pytest.fixture def mock_http_call(mock_responses): def call(method, url, file='response', headers=None, params=None): - response = mock_responses(method, url, file=file, headers=headers, params=params) if response is not None: return response @@ -105,25 +113,22 @@ def call(method, url, file='response', headers=None, params=None): def mock_http_get(request, monkeypatch, mock_http_call): param = request.param if hasattr(request, 'param') and request.param is not None else {} http_error = param.pop('http_error', {}) + data = param.pop('mock_data', {}) + elapsed_total_seconds = param.pop('elapsed_total_seconds', {}) def get(url, *args, **kwargs): method = 'GET' url = get_url_path(url) - request_path = url.replace('?', '/') + if http_error and url in http_error: + return http_error[url] + if data and url in data: + return MockResponse(json_data=data[url], status_code=200) + headers = kwargs.get('headers') params = kwargs.get('params') - if params: - param_string = '/'.join(f'{key}={str(val)}' for key, val in params.items()) - request_path = f'{url}/{param_string}' - - request_path = request_path.replace(" ", "") - if http_error and request_path in http_error: - return http_error[request_path] - + mock_elapsed = mock.MagicMock(total_seconds=mock.MagicMock(return_value=elapsed_total_seconds.get(url, 0.0))) + mock_json = mock.MagicMock(return_value=mock_http_call(method, url, headers=headers, params=params)) mock_status_code = mock.MagicMock(return_value=200) - headers = kwargs.get('headers') - - mock_json = mock.MagicMock(return_value=mock_http_call(method, request_path, headers=headers)) - return mock.MagicMock(json=mock_json, status_code=mock_status_code) + return mock.MagicMock(elapsed=mock_elapsed, json=mock_json, status_code=mock_status_code) mock_get = mock.MagicMock(side_effect=get) monkeypatch.setattr('requests.get', mock_get) diff --git a/octopus_deploy/tests/constants.py b/octopus_deploy/tests/constants.py index 10bc8288b0523..23095bff383b3 100644 --- a/octopus_deploy/tests/constants.py +++ b/octopus_deploy/tests/constants.py @@ -13,11 +13,10 @@ OCTOPUS_SPACE = os.environ.get('OCTOPUS_SPACE', 'Default') COMPOSE_FILE = os.path.join(get_here(), 'docker', 'docker-compose.yaml') -INSTANCE = {'octopus_endpoint': 'http://localhost:80/api', 'space': 'Default'} +INSTANCE = {'octopus_endpoint': 'http://localhost:80'} LAB_INSTANCE = { 'octopus_endpoint': OCTOPUS_LAB_ENDPOINT, - 'space': OCTOPUS_SPACE, 'headers': {'X-Octopus-ApiKey': OCTOPUS_API_KEY}, } @@ -27,913 +26,14 @@ ALL_METRICS = [ + "octopus_deploy.space.count", "octopus_deploy.project_group.count", "octopus_deploy.project.count", "octopus_deploy.deployment.count", - "octopus_deploy.deployment.duration", - "octopus_deploy.deployment.has_warnings_or_errors", - "octopus_deploy.deployment.queue_time", - "octopus_deploy.deployment.succeeded", - "octopus_deploy.deployment.can_rerun", + "octopus_deploy.deployment.queued_time", + "octopus_deploy.deployment.executing_time", + "octopus_deploy.deployment.completed_time", "octopus_deploy.server_node.count", "octopus_deploy.server_node.in_maintenance_mode", "octopus_deploy.server_node.max_concurrent_tasks", ] - -PROJECT_GROUP_ALL_METRICS = [ - { - 'name': 'octopus_deploy.project_group.count', - 'tags': ["project_group_name:Default Project Group", "project_group_id:ProjectGroups-1", "space_name:Default"], - 'count': 1, - }, - { - 'name': 'octopus_deploy.project_group.count', - 'tags': ["project_group_name:hello", "project_group_id:ProjectGroups-3", "space_name:Default"], - 'count': 1, - }, - { - 'name': 'octopus_deploy.project_group.count', - 'tags': ["project_group_name:test-group", "project_group_id:ProjectGroups-2", "space_name:Default"], - 'count': 1, - }, -] - -PROJECT_GROUP_ONLY_TEST_GROUP_METRICS = [ - { - 'name': 'octopus_deploy.project_group.count', - 'tags': ["project_group_name:Default Project Group", "project_group_id:ProjectGroups-1", "space_name:Default"], - 'count': 0, - }, - { - 'name': 'octopus_deploy.project_group.count', - 'tags': ["project_group_name:hello", "project_group_id:ProjectGroups-3", "space_name:Default"], - 'count': 0, - }, - { - 'name': 'octopus_deploy.project_group.count', - 'tags': ["project_group_name:test-group", "project_group_id:ProjectGroups-2", "space_name:Default"], - 'count': 1, - }, -] - -PROJECT_GROUP_NO_METRICS = [ - { - 'name': 'octopus_deploy.project_group.count', - 'tags': ["project_group_name:Default Project Group", "project_group_id:ProjectGroups-1", "space_name:Default"], - 'count': 0, - }, - { - 'name': 'octopus_deploy.project_group.count', - 'tags': ["project_group_name:hello", "project_group_id:ProjectGroups-3", "space_name:Default"], - 'count': 0, - }, - { - 'name': 'octopus_deploy.project_group.count', - 'tags': ["project_group_name:test-group", "project_group_id:ProjectGroups-2", "space_name:Default"], - 'count': 0, - }, -] -PROJECT_GROUP_NO_TEST_GROUP_METRICS = [ - { - 'name': 'octopus_deploy.project_group.count', - 'tags': ["project_group_name:Default Project Group", "project_group_id:ProjectGroups-1", "space_name:Default"], - 'count': 1, - }, - { - 'name': 'octopus_deploy.project_group.count', - 'tags': ["project_group_name:hello", "project_group_id:ProjectGroups-3", "space_name:Default"], - 'count': 1, - }, - { - 'name': 'octopus_deploy.project_group.count', - 'tags': ["project_group_name:test-group", "project_group_id:ProjectGroups-2", "space_name:Default"], - 'count': 0, - }, -] - -PROJECT_ALL_METRICS = [ - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:test-group", - "project_group_id:ProjectGroups-2", - "space_name:Default", - "project_name:hi", - "project_id:Projects-4", - ], - 'count': 1, - }, - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:my-project", - "project_id:Projects-2", - ], - 'count': 1, - }, - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - ], - 'count': 1, - }, - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test-api", - "project_id:Projects-1", - ], - 'count': 1, - }, -] - -PROJECT_ONLY_TEST_GROUP_METRICS = [ - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:test-group", - "project_group_id:ProjectGroups-2", - "space_name:Default", - "project_name:hi", - "project_id:Projects-4", - ], - 'count': 1, - }, - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:my-project", - "project_id:Projects-2", - ], - 'count': 0, - }, - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - ], - 'count': 0, - }, - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test-api", - "project_id:Projects-1", - ], - 'count': 0, - }, -] - -PROJECT_ONLY_DEFAULT_GROUP_METRICS = [ - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:test-group", - "project_group_id:ProjectGroups-2", - "space_name:Default", - "project_name:hi", - "project_id:Projects-4", - ], - 'count': 0, - }, - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:my-project", - "project_id:Projects-2", - ], - 'count': 1, - }, - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - ], - 'count': 1, - }, - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test-api", - "project_id:Projects-1", - ], - 'count': 1, - }, -] - -PROJECT_ONLY_TEST_METRICS = [ - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:test-group", - "project_group_id:ProjectGroups-2", - "space_name:Default", - "project_name:hi", - "project_id:Projects-4", - ], - 'count': 1, - }, - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:my-project", - "project_id:Projects-2", - ], - 'count': 0, - }, - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - ], - 'count': 1, - }, - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test-api", - "project_id:Projects-1", - ], - 'count': 1, - }, -] - -PROJECT_ONLY_HI_METRICS = [ - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:test-group", - "project_group_id:ProjectGroups-2", - "space_name:Default", - "project_name:hi", - "project_id:Projects-4", - ], - 'count': 1, - }, - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:my-project", - "project_id:Projects-2", - ], - 'count': 0, - }, - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - ], - 'count': 0, - }, - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test-api", - "project_id:Projects-1", - ], - 'count': 0, - }, -] - -PROJECT_ONLY_HI_MY_PROJECT_METRICS = [ - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:test-group", - "project_group_id:ProjectGroups-2", - "space_name:Default", - "project_name:hi", - "project_id:Projects-4", - ], - 'count': 1, - }, - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:my-project", - "project_id:Projects-2", - ], - 'count': 1, - }, - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - ], - 'count': 0, - }, - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test-api", - "project_id:Projects-1", - ], - 'count': 0, - }, -] - -PROJECT_EXCLUDE_TEST_API_METRICS = [ - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:test-group", - "project_group_id:ProjectGroups-2", - "space_name:Default", - "project_name:hi", - "project_id:Projects-4", - ], - 'count': 1, - }, - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:my-project", - "project_id:Projects-2", - ], - 'count': 1, - }, - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - ], - 'count': 1, - }, - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test-api", - "project_id:Projects-1", - ], - 'count': 0, - }, -] - -PROJECT_NO_METRICS = [ - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:test-group", - "project_group_id:ProjectGroups-2", - "space_name:Default", - "project_name:hi", - "project_id:Projects-4", - ], - 'count': 0, - }, - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:my-project", - "project_id:Projects-2", - ], - 'count': 0, - }, - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - ], - 'count': 0, - }, - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test-api", - "project_id:Projects-1", - ], - 'count': 0, - }, -] - - -DEPLOYMENT_METRICS = [ - { - 'name': 'octopus_deploy.deployment.duration', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_name:Deploy", - "task_state:Success", - ], - 'count': 1, - 'value': 2.073, - }, - { - 'name': 'octopus_deploy.deployment.queue_time', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_name:Deploy", - "task_state:Success", - ], - 'count': 1, - 'value': 0.639, - }, - { - 'name': 'octopus_deploy.deployment.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_name:Deploy", - "task_state:Success", - ], - 'count': 2, - 'value': 1, - }, - { - 'name': 'octopus_deploy.deployment.succeeded', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_name:Deploy", - "task_state:Success", - ], - 'count': 2, - 'value': 1, - }, - { - 'name': 'octopus_deploy.deployment.can_rerun', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_name:Deploy", - "task_state:Success", - ], - 'count': 2, - 'value': 0, - }, - { - 'name': 'octopus_deploy.deployment.has_warnings_or_errors', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_name:Deploy", - "task_state:Success", - ], - 'count': 2, - 'value': 0, - }, - { - 'name': 'octopus_deploy.deployment.duration', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_name:Deploy", - "task_state:Success", - ], - 'count': 1, - 'value': 2.134, - }, - { - 'name': 'octopus_deploy.deployment.queue_time', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_name:Deploy", - "task_state:Success", - ], - 'count': 1, - 'value': 0.67, - }, - { - 'name': 'octopus_deploy.deployment.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_name:Deploy", - "task_state:Failed", - ], - 'count': 1, - 'value': 1, - }, - { - 'name': 'octopus_deploy.deployment.succeeded', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_name:Deploy", - "task_state:Failed", - ], - 'count': 1, - 'value': 0, - }, - { - 'name': 'octopus_deploy.deployment.can_rerun', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_name:Deploy", - "task_state:Failed", - ], - 'count': 1, - 'value': 0, - }, - { - 'name': 'octopus_deploy.deployment.has_warnings_or_errors', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_name:Deploy", - "task_state:Failed", - ], - 'count': 1, - 'value': 1, - }, - { - 'name': 'octopus_deploy.deployment.duration', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_name:Deploy", - "task_state:Failed", - ], - 'count': 1, - 'value': 6.267, - }, - { - 'name': 'octopus_deploy.deployment.queue_time', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_name:Deploy", - "task_state:Failed", - ], - 'count': 1, - 'value': 0.631, - }, - { - 'name': 'octopus_deploy.deployment.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test-api", - "project_id:Projects-1", - "task_name:Deploy", - "task_state:Success", - ], - 'count': 1, - 'value': 1, - }, - { - 'name': 'octopus_deploy.deployment.succeeded', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test-api", - "project_id:Projects-1", - "task_name:Deploy", - "task_state:Success", - ], - 'count': 1, - 'value': 1, - }, - { - 'name': 'octopus_deploy.deployment.can_rerun', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test-api", - "project_id:Projects-1", - "task_name:Deploy", - "task_state:Success", - ], - 'count': 1, - 'value': 0, - }, - { - 'name': 'octopus_deploy.deployment.has_warnings_or_errors', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test-api", - "project_id:Projects-1", - "task_name:Deploy", - "task_state:Success", - ], - 'count': 1, - 'value': 0, - }, - { - 'name': 'octopus_deploy.deployment.duration', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test-api", - "project_id:Projects-1", - "task_name:Deploy", - "task_state:Success", - ], - 'count': 1, - 'value': 3.192, - }, - { - 'name': 'octopus_deploy.deployment.queue_time', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test-api", - "project_id:Projects-1", - "task_name:Deploy", - "task_state:Success", - ], - 'count': 1, - 'value': 0.613, - }, -] - - -DEPLOYMENT_METRICS_NO_PROJECT_1 = [ - { - 'name': 'octopus_deploy.deployment.duration', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_name:Deploy", - "task_state:Success", - ], - 'count': 2, - }, - { - 'name': 'octopus_deploy.deployment.queue_time', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_name:Deploy", - "task_state:Success", - ], - 'count': 2, - }, - { - 'name': 'octopus_deploy.deployment.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_name:Deploy", - "task_state:Success", - ], - 'count': 2, - }, - { - 'name': 'octopus_deploy.deployment.succeeded', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_name:Deploy", - "task_state:Success", - ], - 'count': 2, - }, - { - 'name': 'octopus_deploy.deployment.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_name:Deploy", - "task_state:Failed", - ], - 'count': 1, - }, - { - 'name': 'octopus_deploy.deployment.succeeded', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_name:Deploy", - "task_state:Failed", - ], - 'count': 1, - }, - { - 'name': 'octopus_deploy.deployment.can_rerun', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_name:Deploy", - "task_state:Failed", - ], - 'count': 1, - }, - { - 'name': 'octopus_deploy.deployment.has_warnings_or_errors', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_name:Deploy", - "task_state:Failed", - ], - 'count': 1, - }, - { - 'name': 'octopus_deploy.deployment.duration', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_name:Deploy", - "task_state:Failed", - ], - 'count': 1, - }, - { - 'name': 'octopus_deploy.deployment.queue_time', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_name:Deploy", - "task_state:Failed", - ], - 'count': 1, - }, -] - -SERVER_NODES_METRICS = [ - { - 'name': 'octopus_deploy.server_node.count', - 'tags': [ - "space_name:Default", - "server_node_name:octopus-i8932-79236734bc234-09h234n", - "server_node_id:OctopusServerNodes-octopus-i8932-79236734bc234-09h234n", - ], - 'count': 1, - 'value': 1, - }, - { - 'name': 'octopus_deploy.server_node.in_maintenance_mode', - 'tags': [ - "space_name:Default", - "server_node_name:octopus-i8932-79236734bc234-09h234n", - "server_node_id:OctopusServerNodes-octopus-i8932-79236734bc234-09h234n", - ], - 'count': 1, - 'value': 0, - }, - { - 'name': 'octopus_deploy.server_node.max_concurrent_tasks', - 'tags': [ - "space_name:Default", - "server_node_name:octopus-i8932-79236734bc234-09h234n", - "server_node_id:OctopusServerNodes-octopus-i8932-79236734bc234-09h234n", - ], - 'count': 1, - 'value': 5, - }, -] diff --git a/octopus_deploy/tests/docker/Caddyfile b/octopus_deploy/tests/docker/Caddyfile index 2316a2a225c0c..541b9e3f4d112 100644 --- a/octopus_deploy/tests/docker/Caddyfile +++ b/octopus_deploy/tests/docker/Caddyfile @@ -4,117 +4,116 @@ } :8080 { root * /usr/share/caddy/ - @get_tasks_project_1 { + @get_spaces { method GET - path /api/Spaces-1/tasks* - expression {uri}.contains('?') - expression {uri}.contains('Projects-1') + path /api/spaces } - route @get_tasks_project_1 { - rewrite * /GET/api/Spaces-1/tasks/project=Projects-1/fromCompletedDate=2024-09-2314:45:58.888492+00:00/response.json + route @get_spaces { + rewrite * "/GET/api/spaces/response.json" file_server } - @get_tasks_project_2 { + @get_projectgroups { method GET - path /api/Spaces-1/tasks* - expression {uri}.contains('?') - expression {uri}.contains('Projects-2') + path /api/Spaces-1/projectgroups } - route @get_tasks_project_2 { - rewrite * /GET/api/Spaces-1/tasks/project=Projects-2/fromCompletedDate=2024-09-2314:45:58.888492+00:00/response.json + route @get_projectgroups { + rewrite * /GET/api/Spaces-1/projectgroups/response.json file_server } - @get_tasks_project_3 { + @get_projectgroups_1_projects { method GET - path /api/Spaces-1/tasks* - expression {uri}.contains('?') - expression {uri}.contains('Projects-3') + path /api/Spaces-1/projectgroups/ProjectGroups-1/projects } - route @get_tasks_project_3 { - rewrite * /GET/api/Spaces-1/tasks/project=Projects-3/fromCompletedDate=2024-09-2314:45:58.888492+00:00/response.json + route @get_projectgroups_1_projects { + rewrite * /GET/api/Spaces-1/projectgroups/ProjectGroups-1/projects/response.json file_server } - @get_tasks_project_4 { + @get_projectgroups_2_projects { method GET - path /api/Spaces-1/tasks* - expression {uri}.contains('?') - expression {uri}.contains('Projects-4') + path /api/Spaces-1/projectgroups/ProjectGroups-2/projects } - route @get_tasks_project_4 { - rewrite * /GET/api/Spaces-1/tasks/project=Projects-4/fromCompletedDate=2024-09-2314:45:58.888492+00:00/response.json + route @get_projectgroups_2_projects { + rewrite * /GET/api/Spaces-1/projectgroups/ProjectGroups-2/projects/response.json file_server } - @get_spaces { + @get_projectgroups_3_projects { method GET - path /api/spaces + path /api/Spaces-1/projectgroups/ProjectGroups-3/projects } - route @get_spaces { - rewrite * /GET/api/spaces/response.json + route @get_projectgroups_3_projects { + rewrite * /GET/api/Spaces-1/projectgroups/ProjectGroups-3/projects/response.json file_server } - @get_spaces_1 { + @get_running_tasks_project_1 { method GET - path /api/Spaces-1 + path /api/Spaces-1/tasks + expression {query.project}.contains("Projects-1") && {query.states}.contains("Queued") && {query.states}.contains("Executing") } - route @get_spaces_1 { - rewrite * /GET/api/Spaces-1/response.json + route @get_running_tasks_project_1 { + rewrite * /GET/api/Spaces-1/tasks/project=Projects-1/states=Queued,Executing/response.json file_server } - @get_projectgroups { + @get_running_tasks_project_2 { method GET - path /api/Spaces-1/projectgroups + path /api/Spaces-1/tasks + expression {query.project}.contains("Projects-2") && {query.states}.contains("Queued") && {query.states}.contains("Executing") } - route @get_projectgroups { - rewrite * /GET/api/Spaces-1/projectgroups/response.json + route @get_running_tasks_project_2 { + rewrite * /GET/api/Spaces-1/tasks/project=Projects-2/states=Queued,Executing/response.json file_server } - - @get_projectgroups_1 { + @get_running_tasks_project_3 { method GET - path /api/Spaces-1/projectgroups/ProjectGroups-1 + path /api/Spaces-1/tasks + expression {query.project}.contains("Projects-3") && {query.states}.contains("Queued") && {query.states}.contains("Executing") } - route @get_projectgroups_1 { - rewrite * /GET/api/Spaces-1/projectgroups/ProjectGroups-1/response.json + route @get_running_tasks_project_3 { + rewrite * /GET/api/Spaces-1/tasks/project=Projects-3/states=Queued,Executing/response.json file_server } - @get_projectgroups_2 { + @get_running_tasks_project_4 { method GET - path /api/Spaces-1/projectgroups/ProjectGroups-2 + path /api/Spaces-1/tasks + expression {query.project}.contains("Projects-4") && {query.states}.contains("Queued") && {query.states}.contains("Executing") } - route @get_projectgroups_2 { - rewrite * /GET/api/Spaces-1/projectgroups/ProjectGroups-2/response.json + route @get_running_tasks_project_4 { + rewrite * /GET/api/Spaces-1/tasks/project=Projects-4/states=Queued,Executing/response.json file_server } - @get_projectgroups_3 { + @get_completed_tasks_project_1 { method GET - path /api/Spaces-1/projectgroups/ProjectGroups-3 + path /api/Spaces-1/tasks* + expression {query.project}.contains("Projects-1") && {query}.contains("fromCompletedDate") && {query}.contains("toCompletedDate") } - route @get_projectgroups_3 { - rewrite * /GET/api/Spaces-1/projectgroups/ProjectGroups-3/response.json + route @get_completed_tasks_project_1 { + rewrite * "/GET/api/Spaces-1/tasks/project=Projects-1/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:00.123000+00:00/response.json" file_server } - @get_projectgroups_1_projects { + @get_completed_tasks_project_2 { method GET - path /api/Spaces-1/projectgroups/ProjectGroups-1/projects + path /api/Spaces-1/tasks* + expression {query.project}.contains("Projects-2") && {query}.contains("fromCompletedDate") && {query}.contains("toCompletedDate") } - route @get_projectgroups_1_projects { - rewrite * /GET/api/Spaces-1/projectgroups/ProjectGroups-1/projects/response.json + route @get_completed_tasks_project_2 { + rewrite * "/GET/api/Spaces-1/tasks/project=Projects-2/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:00.123000+00:00/response.json" file_server } - @get_projectgroups_2_projects { + @get_completed_tasks_project_3 { method GET - path /api/Spaces-1/projectgroups/ProjectGroups-2/projects + path /api/Spaces-1/tasks* + expression {query.project}.contains("Projects-3") && {query}.contains("fromCompletedDate") && {query}.contains("toCompletedDate") } - route @get_projectgroups_2_projects { - rewrite * /GET/api/Spaces-1/projectgroups/ProjectGroups-2/projects/response.json + route @get_completed_tasks_project_3 { + rewrite * "/GET/api/Spaces-1/tasks/project=Projects-3/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:00.123000+00:00/response.json" file_server } - @get_projectgroups_3_projects { + @get_completed_tasks_project_4 { method GET - path /api/Spaces-1/projectgroups/ProjectGroups-3/projects + path /api/Spaces-1/tasks* + expression {query.project}.contains("Projects-4") && {query}.contains("fromCompletedDate") && {query}.contains("toCompletedDate") } - route @get_projectgroups_3_projects { - rewrite * /GET/api/Spaces-1/projectgroups/ProjectGroups-3/projects/response.json + route @get_completed_tasks_project_4 { + rewrite * "/GET/api/Spaces-1/tasks/project=Projects-4/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:00.123000+00:00/response.json" file_server } @get_octopusservernodes { @@ -125,6 +124,6 @@ rewrite * /GET/api/octopusservernodes/response.json file_server } - + file_server browse } diff --git a/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-1/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:00.123000+00:00/response.json b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-1/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:00.123000+00:00/response.json new file mode 100644 index 0000000000000..163d82fa86e94 --- /dev/null +++ b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-1/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:00.123000+00:00/response.json @@ -0,0 +1,35 @@ +{ + "ItemType": "Task", + "TotalResults": 1, + "ItemsPerPage": 30, + "NumberOfPages": 1, + "LastPageNumber": 0, + "TotalCounts": { + "Canceled": 0, + "Cancelling": 0, + "Executing": 0, + "Failed": 12, + "Queued": 0, + "Success": 1779, + "TimedOut": 0, + "Interrupted": 0 + }, + "TotalCountsInOtherSpaces": { + "Canceled": 0, + "Cancelling": 0, + "Executing": 0, + "Failed": 0, + "Queued": 0, + "Success": 0, + "TimedOut": 0, + "Interrupted": 0 + }, + "Items": [], + "Links": { + "Self": "/api/tasks?skip=0&take=30", + "Template": "/api/tasks{?skip,active,environment,tenant,runbook,project,name,node,running,states,hasPendingInterruptions,hasWarningsOrErrors,take,ids,partialName,spaces,includeSystem,description,fromCompletedDate,toCompletedDate,fromQueueDate,toQueueDate,fromStartDate,toStartDate}", + "Page.All": "/api/tasks?skip=0&take=2147483647", + "Page.Current": "/api/tasks?skip=0&take=30", + "Page.Last": "/api/tasks?skip=0&take=30" + } +} \ No newline at end of file diff --git a/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-1/fromCompletedDate=2024-09-2314:45:58.888492+00:00/response.json b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-1/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:15.123000+00:00/response.json similarity index 100% rename from octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-1/fromCompletedDate=2024-09-2314:45:58.888492+00:00/response.json rename to octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-1/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:15.123000+00:00/response.json diff --git a/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-1/states=Queued,Executing/response.json b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-1/states=Queued,Executing/response.json new file mode 100644 index 0000000000000..3657788aaaedc --- /dev/null +++ b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-1/states=Queued,Executing/response.json @@ -0,0 +1,35 @@ +{ + "ItemType": "Task", + "TotalResults": 0, + "ItemsPerPage": 30, + "NumberOfPages": 1, + "LastPageNumber": 0, + "TotalCounts": { + "Canceled": 0, + "Cancelling": 0, + "Executing": 0, + "Failed": 2, + "Queued": 0, + "Success": 1, + "TimedOut": 0, + "Interrupted": 0 + }, + "TotalCountsInOtherSpaces": { + "Canceled": 0, + "Cancelling": 0, + "Executing": 0, + "Failed": 0, + "Queued": 0, + "Success": 0, + "TimedOut": 0, + "Interrupted": 0 + }, + "Items": [], + "Links": { + "Self": "/api/tasks?skip=0&take=30", + "Template": "/api/tasks{?skip,active,environment,tenant,runbook,project,name,node,running,states,hasPendingInterruptions,hasWarningsOrErrors,take,ids,partialName,spaces,includeSystem,description,fromCompletedDate,toCompletedDate,fromQueueDate,toQueueDate,fromStartDate,toStartDate}", + "Page.All": "/api/tasks?skip=0&take=2147483647", + "Page.Current": "/api/tasks?skip=0&take=30", + "Page.Last": "/api/tasks?skip=0&take=30" + } +} \ No newline at end of file diff --git a/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-2/fromCompletedDate=2024-09-2314:45:58.888492+00:00/response.json b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-2/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:00.123000+00:00/response.json similarity index 100% rename from octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-2/fromCompletedDate=2024-09-2314:45:58.888492+00:00/response.json rename to octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-2/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:00.123000+00:00/response.json diff --git a/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-2/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:15.123000+00:00/response.json b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-2/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:15.123000+00:00/response.json new file mode 100644 index 0000000000000..79bccd61a6f32 --- /dev/null +++ b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-2/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:15.123000+00:00/response.json @@ -0,0 +1,35 @@ +{ + "ItemType": "Task", + "TotalResults": 0, + "ItemsPerPage": 30, + "NumberOfPages": 1, + "LastPageNumber": 0, + "TotalCounts": { + "Canceled": 0, + "Cancelling": 0, + "Executing": 0, + "Failed": 12, + "Queued": 0, + "Success": 1783, + "TimedOut": 0, + "Interrupted": 0 + }, + "TotalCountsInOtherSpaces": { + "Canceled": 0, + "Cancelling": 0, + "Executing": 0, + "Failed": 0, + "Queued": 0, + "Success": 0, + "TimedOut": 0, + "Interrupted": 0 + }, + "Items": [], + "Links": { + "Self": "/api/tasks?skip=0&take=30", + "Template": "/api/tasks{?skip,active,environment,tenant,runbook,project,name,node,running,states,hasPendingInterruptions,hasWarningsOrErrors,take,ids,partialName,spaces,includeSystem,description,fromCompletedDate,toCompletedDate,fromQueueDate,toQueueDate,fromStartDate,toStartDate}", + "Page.All": "/api/tasks?skip=0&take=2147483647", + "Page.Current": "/api/tasks?skip=0&take=30", + "Page.Last": "/api/tasks?skip=0&take=30" + } +} \ No newline at end of file diff --git a/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-2/states=Queued,Executing/response.json b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-2/states=Queued,Executing/response.json new file mode 100644 index 0000000000000..49bdeab0175bb --- /dev/null +++ b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-2/states=Queued,Executing/response.json @@ -0,0 +1,79 @@ +{ + "ItemType": "Task", + "TotalResults": 1, + "ItemsPerPage": 30, + "NumberOfPages": 1, + "LastPageNumber": 0, + "TotalCounts": { + "Canceled": 0, + "Cancelling": 0, + "Executing": 1, + "Failed": 2, + "Queued": 0, + "Success": 1, + "TimedOut": 0, + "Interrupted": 0 + }, + "TotalCountsInOtherSpaces": { + "Canceled": 0, + "Cancelling": 0, + "Executing": 0, + "Failed": 0, + "Queued": 0, + "Success": 0, + "TimedOut": 0, + "Interrupted": 0 + }, + "Items": [ + { + "Id": "ServerTasks-118048", + "SpaceId": "Spaces-1", + "EstimatedRemainingQueueDurationSeconds": 0, + "Name": "Deploy", + "Description": "Deploy hello release 0.0.36 to staging", + "Arguments": { + "DeploymentId": "Deployments-111" + }, + "State": "Executing", + "Completed": "Executing...", + "QueueTime": "2024-09-23T14:42:00.123+00:00", + "QueueTimeExpiry": null, + "StartTime": "2024-09-23T14:42:30.123+00:00", + "LastUpdatedTime": "2024-11-05T18:59:25.395+00:00", + "CompletedTime": null, + "ServerNode": "OctopusServerNodes-50c3dfbarc82", + "Duration": "18 seconds", + "ErrorMessage": "", + "HasBeenPickedUpByProcessor": true, + "IsCompleted": false, + "FinishedSuccessfully": false, + "HasPendingInterruptions": false, + "CanRerun": false, + "HasWarningsOrErrors": false, + "UnmetPreconditions": null, + "ProjectId": "Projects-2", + "Links": { + "Self": "/api/tasks/ServerTasks-118048", + "Web": "/app#/Spaces-1/tasks/ServerTasks-118048", + "Raw": "/api/tasks/ServerTasks-118048/raw", + "Rerun": "/api/tasks/rerun/ServerTasks-118048", + "Cancel": "/api/tasks/ServerTasks-118048/cancel", + "State": "/api/tasks/ServerTasks-118048/state", + "BlockedBy": "/api/tasks/ServerTasks-118048/blockedby", + "QueuedBehind": "/api/tasks/ServerTasks-118048/queued-behind{?skip,take}", + "Details": "/api/tasks/ServerTasks-118048/details{?verbose,tail,ranges}", + "StatusMessages": "/api/tasks/ServerTasks-118048/status/messages", + "Prioritize": "/api/tasks/ServerTasks-118048/prioritize", + "Artifacts": "/api/Spaces-1/artifacts?regarding=ServerTasks-118048", + "Interruptions": "/api/Spaces-1/interruptions?regarding=ServerTasks-118048" + } + } + ], + "Links": { + "Self": "/api/tasks?skip=0&take=30", + "Template": "/api/tasks{?skip,active,environment,tenant,runbook,project,name,node,running,states,hasPendingInterruptions,hasWarningsOrErrors,take,ids,partialName,spaces,includeSystem,description,fromCompletedDate,toCompletedDate,fromQueueDate,toQueueDate,fromStartDate,toStartDate}", + "Page.All": "/api/tasks?skip=0&take=2147483647", + "Page.Current": "/api/tasks?skip=0&take=30", + "Page.Last": "/api/tasks?skip=0&take=30" + } + } \ No newline at end of file diff --git a/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-3/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:00.123000+00:00/response.json b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-3/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:00.123000+00:00/response.json new file mode 100644 index 0000000000000..9c020bbbd218d --- /dev/null +++ b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-3/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:00.123000+00:00/response.json @@ -0,0 +1,35 @@ +{ + "ItemType": "Task", + "TotalResults": 3, + "ItemsPerPage": 30, + "NumberOfPages": 1, + "LastPageNumber": 0, + "TotalCounts": { + "Canceled": 0, + "Cancelling": 0, + "Executing": 0, + "Failed": 13, + "Queued": 0, + "Success": 1783, + "TimedOut": 0, + "Interrupted": 0 + }, + "TotalCountsInOtherSpaces": { + "Canceled": 0, + "Cancelling": 0, + "Executing": 0, + "Failed": 0, + "Queued": 0, + "Success": 0, + "TimedOut": 0, + "Interrupted": 0 + }, + "Items": [], + "Links": { + "Self": "/api/tasks?skip=0&take=30", + "Template": "/api/tasks{?skip,active,environment,tenant,runbook,project,name,node,running,states,hasPendingInterruptions,hasWarningsOrErrors,take,ids,partialName,spaces,includeSystem,description,fromCompletedDate,toCompletedDate,fromQueueDate,toQueueDate,fromStartDate,toStartDate}", + "Page.All": "/api/tasks?skip=0&take=2147483647", + "Page.Current": "/api/tasks?skip=0&take=30", + "Page.Last": "/api/tasks?skip=0&take=30" + } +} \ No newline at end of file diff --git a/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-3/fromCompletedDate=2024-09-2314:45:58.888492+00:00/response.json b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-3/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:15.123000+00:00/response.json similarity index 92% rename from octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-3/fromCompletedDate=2024-09-2314:45:58.888492+00:00/response.json rename to octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-3/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:15.123000+00:00/response.json index fcefac4dceb19..042a162b333cc 100644 --- a/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-3/fromCompletedDate=2024-09-2314:45:58.888492+00:00/response.json +++ b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-3/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:15.123000+00:00/response.json @@ -35,11 +35,11 @@ }, "State": "Failed", "Completed": "Monday, 23 September 2024 3:10:03 PM +00:00", - "QueueTime": "2024-09-23T15:09:56.364+00:00", + "QueueTime": "2024-09-23T14:42:30.123+00:00", "QueueTimeExpiry": null, - "StartTime": "2024-09-23T15:09:56.995+00:00", + "StartTime": "2024-09-23T14:44:20.123+00:00", "LastUpdatedTime": "2024-09-23T15:10:03.262+00:00", - "CompletedTime": "2024-09-23T15:10:03.262+00:00", + "CompletedTime": "2024-09-23T14:45:10.123+00:00", "ServerNode": "OctopusServerNodes-50c3dfbarc82", "Duration": "6 seconds", "ErrorMessage": "The deployment failed because one or more steps failed. Please see the deployment log for details.", @@ -77,11 +77,11 @@ }, "State": "Success", "Completed": "Monday, 23 September 2024 3:00:28 PM +00:00", - "QueueTime": "2024-09-23T15:00:25.468+00:00", + "QueueTime": "2024-09-23T14:42:50.123+00:00", "QueueTimeExpiry": null, - "StartTime": "2024-09-23T15:00:26.138+00:00", + "StartTime": "2024-09-23T14:44:20.123+00:00", "LastUpdatedTime": "2024-09-23T15:00:28.272+00:00", - "CompletedTime": "2024-09-23T15:00:28.272+00:00", + "CompletedTime": "2024-09-23T14:45:14.123+00:00", "ServerNode": "OctopusServerNodes-50c3dfbarc82", "Duration": "2 seconds", "ErrorMessage": "", @@ -119,11 +119,11 @@ }, "State": "Success", "Completed": "Monday, 23 September 2024 3:00:21 PM +00:00", - "QueueTime": "2024-09-23T15:00:19.040+00:00", + "QueueTime": "2024-09-23T14:44:02.123+00:00", "QueueTimeExpiry": null, - "StartTime": "2024-09-23T15:00:19.679+00:00", + "StartTime": "2024-09-23T14:44:20.123+00:00", "LastUpdatedTime": "2024-09-23T15:00:21.752+00:00", - "CompletedTime": "2024-09-23T15:00:21.752+00:00", + "CompletedTime": "2024-09-23T14:45:01.123+00:00", "ServerNode": "OctopusServerNodes-50c3dfbarc82", "Duration": "2 seconds", "ErrorMessage": "", diff --git a/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-3/states=Queued,Executing/response.json b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-3/states=Queued,Executing/response.json new file mode 100644 index 0000000000000..dca5a6d5a33a1 --- /dev/null +++ b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-3/states=Queued,Executing/response.json @@ -0,0 +1,79 @@ +{ + "ItemType": "Task", + "TotalResults": 1, + "ItemsPerPage": 30, + "NumberOfPages": 1, + "LastPageNumber": 0, + "TotalCounts": { + "Canceled": 0, + "Cancelling": 0, + "Executing": 1, + "Failed": 2, + "Queued": 1, + "Success": 1879, + "TimedOut": 0, + "Interrupted": 0 + }, + "TotalCountsInOtherSpaces": { + "Canceled": 0, + "Cancelling": 0, + "Executing": 0, + "Failed": 0, + "Queued": 0, + "Success": 0, + "TimedOut": 0, + "Interrupted": 0 + }, + "Items": [ + { + "Id": "ServerTasks-118055", + "SpaceId": "Spaces-1", + "EstimatedRemainingQueueDurationSeconds": 0, + "Name": "Deploy", + "Description": "Deploy test release 0.0.41 to dev", + "Arguments": { + "DeploymentId": "Deployments-118" + }, + "State": "Queued", + "Completed": "Queued...", + "QueueTime": "2024-09-23T14:44:00.123+00:00", + "QueueTimeExpiry": null, + "StartTime": null, + "LastUpdatedTime": "2024-11-05T19:13:49.523+00:00", + "CompletedTime": null, + "ServerNode": null, + "Duration": "9 seconds", + "ErrorMessage": "", + "HasBeenPickedUpByProcessor": false, + "IsCompleted": false, + "FinishedSuccessfully": false, + "HasPendingInterruptions": false, + "CanRerun": false, + "HasWarningsOrErrors": false, + "UnmetPreconditions": null, + "ProjectId": "Projects-3", + "Links": { + "Self": "/api/tasks/ServerTasks-118055", + "Web": "/app#/Spaces-1/tasks/ServerTasks-118055", + "Raw": "/api/tasks/ServerTasks-118055/raw", + "Rerun": "/api/tasks/rerun/ServerTasks-118055", + "Cancel": "/api/tasks/ServerTasks-118055/cancel", + "State": "/api/tasks/ServerTasks-118055/state", + "BlockedBy": "/api/tasks/ServerTasks-118055/blockedby", + "QueuedBehind": "/api/tasks/ServerTasks-118055/queued-behind{?skip,take}", + "Details": "/api/tasks/ServerTasks-118055/details{?verbose,tail,ranges}", + "StatusMessages": "/api/tasks/ServerTasks-118055/status/messages", + "Prioritize": "/api/tasks/ServerTasks-118055/prioritize", + "Artifacts": "/api/Spaces-1/artifacts?regarding=ServerTasks-118055", + "Interruptions": "/api/Spaces-1/interruptions?regarding=ServerTasks-118055" + } + } + ], + "Links": { + "Self": "/api/tasks?skip=0&take=30", + "Template": "/api/tasks{?skip,active,environment,tenant,runbook,project,name,node,running,states,hasPendingInterruptions,hasWarningsOrErrors,take,ids,partialName,spaces,includeSystem,description,fromCompletedDate,toCompletedDate,fromQueueDate,toQueueDate,fromStartDate,toStartDate}", + "Page.All": "/api/tasks?skip=0&take=2147483647", + "Page.Current": "/api/tasks?skip=0&take=30", + "Page.Last": "/api/tasks?skip=0&take=30" + } +} \ No newline at end of file diff --git a/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-4/fromCompletedDate=2024-09-2314:45:58.888492+00:00/response.json b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-4/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:00.123000+00:00/response.json similarity index 100% rename from octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-4/fromCompletedDate=2024-09-2314:45:58.888492+00:00/response.json rename to octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-4/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:00.123000+00:00/response.json diff --git a/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-4/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:15.123000+00:00/response.json b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-4/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:15.123000+00:00/response.json new file mode 100644 index 0000000000000..aa05b4987d9be --- /dev/null +++ b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-4/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:15.123000+00:00/response.json @@ -0,0 +1,35 @@ +{ + "ItemType": "Task", + "TotalResults": 0, + "ItemsPerPage": 30, + "NumberOfPages": 1, + "LastPageNumber": 0, + "TotalCounts": { + "Canceled": 0, + "Cancelling": 0, + "Executing": 0, + "Failed": 3, + "Queued": 0, + "Success": 1763, + "TimedOut": 0, + "Interrupted": 0 + }, + "TotalCountsInOtherSpaces": { + "Canceled": 0, + "Cancelling": 0, + "Executing": 0, + "Failed": 0, + "Queued": 0, + "Success": 0, + "TimedOut": 0, + "Interrupted": 0 + }, + "Items": [], + "Links": { + "Self": "/api/tasks?skip=0&take=30", + "Template": "/api/tasks{?skip,active,environment,tenant,runbook,project,name,node,running,states,hasPendingInterruptions,hasWarningsOrErrors,take,ids,partialName,spaces,includeSystem,description,fromCompletedDate,toCompletedDate,fromQueueDate,toQueueDate,fromStartDate,toStartDate}", + "Page.All": "/api/tasks?skip=0&take=2147483647", + "Page.Current": "/api/tasks?skip=0&take=30", + "Page.Last": "/api/tasks?skip=0&take=30" + } +} \ No newline at end of file diff --git a/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-4/states=Queued,Executing/response.json b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-4/states=Queued,Executing/response.json new file mode 100644 index 0000000000000..3657788aaaedc --- /dev/null +++ b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-4/states=Queued,Executing/response.json @@ -0,0 +1,35 @@ +{ + "ItemType": "Task", + "TotalResults": 0, + "ItemsPerPage": 30, + "NumberOfPages": 1, + "LastPageNumber": 0, + "TotalCounts": { + "Canceled": 0, + "Cancelling": 0, + "Executing": 0, + "Failed": 2, + "Queued": 0, + "Success": 1, + "TimedOut": 0, + "Interrupted": 0 + }, + "TotalCountsInOtherSpaces": { + "Canceled": 0, + "Cancelling": 0, + "Executing": 0, + "Failed": 0, + "Queued": 0, + "Success": 0, + "TimedOut": 0, + "Interrupted": 0 + }, + "Items": [], + "Links": { + "Self": "/api/tasks?skip=0&take=30", + "Template": "/api/tasks{?skip,active,environment,tenant,runbook,project,name,node,running,states,hasPendingInterruptions,hasWarningsOrErrors,take,ids,partialName,spaces,includeSystem,description,fromCompletedDate,toCompletedDate,fromQueueDate,toQueueDate,fromStartDate,toStartDate}", + "Page.All": "/api/tasks?skip=0&take=2147483647", + "Page.Current": "/api/tasks?skip=0&take=30", + "Page.Last": "/api/tasks?skip=0&take=30" + } +} \ No newline at end of file diff --git a/octopus_deploy/tests/test_e2e.py b/octopus_deploy/tests/test_e2e.py index 54d144a6c40a1..fb516fbeb9ca3 100644 --- a/octopus_deploy/tests/test_e2e.py +++ b/octopus_deploy/tests/test_e2e.py @@ -10,6 +10,8 @@ def test_e2e(dd_agent_check, instance): aggregator = dd_agent_check(instance) - aggregator.assert_metric('octopus_deploy.api.can_connect', 1, tags=['space_name:Default']) + aggregator.assert_metric('octopus_deploy.api.can_connect', 1, tags=[]) for metric in ALL_METRICS: aggregator.assert_metric(metric) + aggregator.assert_no_duplicate_all() + aggregator.assert_all_metrics_covered() diff --git a/octopus_deploy/tests/test_unit.py b/octopus_deploy/tests/test_unit.py index a6c49864d6d79..272bf0888cd64 100644 --- a/octopus_deploy/tests/test_unit.py +++ b/octopus_deploy/tests/test_unit.py @@ -2,8 +2,9 @@ # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) -import copy +import datetime import logging +from contextlib import nullcontext as does_not_raise import mock import pytest @@ -12,246 +13,859 @@ from datadog_checks.dev.utils import get_metadata_metrics from datadog_checks.octopus_deploy import OctopusDeployCheck -from .constants import ( - ALL_METRICS, - DEPLOYMENT_METRICS, - DEPLOYMENT_METRICS_NO_PROJECT_1, - MOCKED_TIMESTAMPS, - PROJECT_ALL_METRICS, - PROJECT_GROUP_ALL_METRICS, - PROJECT_GROUP_NO_METRICS, - PROJECT_GROUP_NO_TEST_GROUP_METRICS, - PROJECT_GROUP_ONLY_TEST_GROUP_METRICS, - PROJECT_NO_METRICS, - PROJECT_ONLY_HI_METRICS, - PROJECT_ONLY_HI_MY_PROJECT_METRICS, - SERVER_NODES_METRICS, +from .constants import ALL_METRICS + +MOCKED_TIME1 = datetime.datetime.fromisoformat("2024-09-23T14:45:00.123+00:00") +MOCKED_TIME2 = MOCKED_TIME1 + datetime.timedelta(seconds=15) + + +@pytest.mark.parametrize( + ('mock_http_get', 'expected_exception', 'can_connect'), + [ + pytest.param( + { + 'http_error': { + '/api/spaces': MockResponse(status_code=500), + } + }, + pytest.raises(Exception, match=r'Could not connect to octopus API.*'), + 0, + id='http error', + ), + pytest.param( + { + 'mock_data': { + '/api/spaces': {"Items": []}, + } + }, + does_not_raise(), + 1, + id='http ok', + ), + ], + indirect=['mock_http_get'], ) +@pytest.mark.usefixtures('mock_http_get') +@mock.patch("datadog_checks.octopus_deploy.check.get_current_datetime") +def test_can_connect(get_current_datetime, dd_run_check, aggregator, expected_exception, can_connect): + instance = {'octopus_endpoint': 'http://localhost:80'} + check = OctopusDeployCheck('octopus_deploy', {}, [instance]) + get_current_datetime.return_value = MOCKED_TIME1 + + with expected_exception: + dd_run_check(check) + + aggregator.assert_metric('octopus_deploy.api.can_connect', can_connect) @pytest.mark.usefixtures('mock_http_get') -@mock.patch("datadog_checks.octopus_deploy.project_groups.get_current_datetime", side_effect=MOCKED_TIMESTAMPS) -def test_check(get_current_datetime, dd_run_check, aggregator, instance): +@mock.patch("datadog_checks.octopus_deploy.check.get_current_datetime") +def test_all_metrics_covered( + get_current_datetime, + dd_run_check, + aggregator, +): + instance = {'octopus_endpoint': 'http://localhost:80'} check = OctopusDeployCheck('octopus_deploy', {}, [instance]) + get_current_datetime.return_value = MOCKED_TIME1 + dd_run_check(check) aggregator.assert_metric('octopus_deploy.api.can_connect', 1) + for metric in ALL_METRICS: aggregator.assert_metric(metric) + aggregator.assert_all_metrics_covered() aggregator.assert_metrics_using_metadata(get_metadata_metrics()) @pytest.mark.parametrize( - ('mock_http_get, message'), + ('mock_http_get'), [ pytest.param( - {'http_error': {'/api/spaces': MockResponse(status_code=500)}}, - 'HTTPError: 500 Server Error: None for url: None', - id='500', - ), - pytest.param( - {'http_error': {'/api/spaces': MockResponse(status_code=404)}}, - 'HTTPError: 404 Client Error: None for url: None', - id='404', + { + 'mock_data': { + '/api/spaces': {"Items": []}, + } + }, + id='empty spaces', ), ], indirect=['mock_http_get'], ) @pytest.mark.usefixtures('mock_http_get') -def test_emits_critical_service_check_when_service_is_down(dd_run_check, aggregator, instance, message): +@mock.patch("datadog_checks.octopus_deploy.check.get_current_datetime") +def test_empty_spaces(get_current_datetime, dd_run_check, aggregator): + instance = {'octopus_endpoint': 'http://localhost:80'} check = OctopusDeployCheck('octopus_deploy', {}, [instance]) - with pytest.raises(Exception, match=message): - dd_run_check(check) + get_current_datetime.return_value = MOCKED_TIME1 - aggregator.assert_metric('octopus_deploy.api.can_connect', 0) - aggregator.assert_all_metrics_covered() + dd_run_check(check) + + aggregator.assert_metric('octopus_deploy.space.count', count=0) @pytest.mark.usefixtures('mock_http_get') -def test_space_invalid(dd_run_check, aggregator, instance): - invalid_space_instance = copy.deepcopy(instance) - invalid_space_instance['space'] = 'test' - check = OctopusDeployCheck('octopus_deploy', {}, [invalid_space_instance]) - with pytest.raises(Exception, match=r'Space ID not found for provided space name test, does it exist'): - dd_run_check(check) +@mock.patch("datadog_checks.octopus_deploy.check.get_current_datetime") +def test_one_space(get_current_datetime, dd_run_check, aggregator): + instance = {'octopus_endpoint': 'http://localhost:80'} + check = OctopusDeployCheck('octopus_deploy', {}, [instance]) + get_current_datetime.return_value = MOCKED_TIME1 - aggregator.assert_metric('octopus_deploy.api.can_connect', 1) - aggregator.assert_all_metrics_covered() + dd_run_check(check) + + aggregator.assert_metric('octopus_deploy.space.count', 1, tags=['space_id:Spaces-1', 'space_name:Default']) @pytest.mark.usefixtures('mock_http_get') -@mock.patch("datadog_checks.octopus_deploy.project_groups.get_current_datetime", side_effect=MOCKED_TIMESTAMPS) -def test_space_cached(get_current_datetime, dd_run_check, aggregator, instance): +@mock.patch("datadog_checks.octopus_deploy.check.get_current_datetime") +def test_project_groups(get_current_datetime, dd_run_check, aggregator): + instance = {'octopus_endpoint': 'http://localhost:80'} check = OctopusDeployCheck('octopus_deploy', {}, [instance]) - check._get_space_id = mock.MagicMock() - check.space_id = "Spaces-1" + get_current_datetime.return_value = MOCKED_TIME1 + dd_run_check(check) - assert check._get_space_id.call_count == 0 - aggregator.assert_metric('octopus_deploy.api.can_connect', 1) + aggregator.assert_metric( + 'octopus_deploy.project_group.count', + 1, + tags=['project_group_id:ProjectGroups-1', 'project_group_name:Default Project Group', 'space_name:Default'], + ) + aggregator.assert_metric( + 'octopus_deploy.project_group.count', + 1, + tags=['project_group_id:ProjectGroups-2', 'project_group_name:test-group', 'space_name:Default'], + ) + aggregator.assert_metric( + 'octopus_deploy.project_group.count', + 1, + tags=['project_group_id:ProjectGroups-3', 'project_group_name:hello', 'space_name:Default'], + ) + + +@pytest.mark.usefixtures('mock_http_get') +@mock.patch("datadog_checks.octopus_deploy.check.get_current_datetime") +def test_projects(get_current_datetime, dd_run_check, aggregator): + instance = {'octopus_endpoint': 'http://localhost:80'} + check = OctopusDeployCheck('octopus_deploy', {}, [instance]) + get_current_datetime.return_value = MOCKED_TIME1 + + dd_run_check(check) + + aggregator.assert_metric( + 'octopus_deploy.project.count', + 1, + tags=[ + 'project_id:Projects-1', + 'project_name:test-api', + 'project_group_name:Default Project Group', + 'space_name:Default', + ], + ) + aggregator.assert_metric( + 'octopus_deploy.project.count', + 1, + tags=[ + 'project_id:Projects-2', + 'project_name:my-project', + 'project_group_name:Default Project Group', + 'space_name:Default', + ], + ) + aggregator.assert_metric( + 'octopus_deploy.project.count', + 1, + tags=[ + 'project_id:Projects-3', + 'project_name:test', + 'project_group_name:Default Project Group', + 'space_name:Default', + ], + ) + aggregator.assert_metric( + 'octopus_deploy.project.count', + 1, + tags=['project_id:Projects-4', 'project_name:hi', 'project_group_name:test-group', 'space_name:Default'], + ) + + +@pytest.mark.usefixtures('mock_http_get') +@mock.patch("datadog_checks.octopus_deploy.check.get_current_datetime") +def test_queued_or_running_tasks(get_current_datetime, dd_run_check, aggregator): + instance = {'octopus_endpoint': 'http://localhost:80'} + check = OctopusDeployCheck('octopus_deploy', {}, [instance]) + get_current_datetime.return_value = MOCKED_TIME1 + + dd_run_check(check) + + aggregator.assert_metric( + 'octopus_deploy.deployment.count', + 1, + tags=[ + 'task_id:ServerTasks-118048', + 'task_name:Deploy', + 'task_state:Executing', + 'project_name:my-project', + 'space_name:Default', + ], + ) + aggregator.assert_metric( + 'octopus_deploy.deployment.queued_time', + 30, + tags=[ + 'task_id:ServerTasks-118048', + 'task_name:Deploy', + 'task_state:Executing', + 'project_name:my-project', + 'space_name:Default', + ], + ) + aggregator.assert_metric( + 'octopus_deploy.deployment.executing_time', + 150, + tags=[ + 'task_id:ServerTasks-118048', + 'task_name:Deploy', + 'task_state:Executing', + 'project_name:my-project', + 'space_name:Default', + ], + ) + aggregator.assert_metric( + 'octopus_deploy.deployment.completed_time', + 0, + count=0, + tags=[ + 'task_id:ServerTasks-118048', + 'task_name:Deploy', + 'task_state:Executing', + 'project_name:my-project', + 'space_name:Default', + ], + ) + aggregator.assert_metric( + 'octopus_deploy.deployment.count', + 1, + tags=[ + 'task_id:ServerTasks-118055', + 'task_name:Deploy', + 'task_state:Queued', + 'project_name:test', + 'space_name:Default', + ], + ) + aggregator.assert_metric( + 'octopus_deploy.deployment.queued_time', + 60, + tags=[ + 'task_id:ServerTasks-118055', + 'task_name:Deploy', + 'task_state:Queued', + 'project_name:test', + 'space_name:Default', + ], + ) + aggregator.assert_metric( + 'octopus_deploy.deployment.executing_time', + 0, + count=0, + tags=[ + 'task_id:ServerTasks-118055', + 'task_name:Deploy', + 'task_state:Queued', + 'project_name:test', + 'space_name:Default', + ], + ) + aggregator.assert_metric( + 'octopus_deploy.deployment.completed_time', + 0, + count=0, + tags=[ + 'task_id:ServerTasks-118055', + 'task_name:Deploy', + 'task_state:Queued', + 'project_name:test', + 'space_name:Default', + ], + ) + + +@pytest.mark.usefixtures('mock_http_get') +@mock.patch("datadog_checks.octopus_deploy.check.get_current_datetime") +def test_completed_tasks(get_current_datetime, dd_run_check, aggregator): + instance = {'octopus_endpoint': 'http://localhost:80'} + check = OctopusDeployCheck('octopus_deploy', {}, [instance]) + + get_current_datetime.return_value = MOCKED_TIME1 + dd_run_check(check) + metrics = aggregator.metrics('octopus_deploy.deployment.count') + for metric in metrics: + assert not ('project_name:test-api' in metric.tags and 'task_state:Success' in metric.tags) + assert not ('project_name:test' in metric.tags and 'task_state:Success' in metric.tags) + + get_current_datetime.return_value = MOCKED_TIME2 + dd_run_check(check) + + aggregator.assert_metric( + 'octopus_deploy.deployment.count', + 1, + tags=[ + 'task_id:ServerTasks-1847', + 'task_name:Deploy', + 'task_state:Failed', + 'project_name:test', + 'space_name:Default', + ], + ) + aggregator.assert_metric( + 'octopus_deploy.deployment.queued_time', + 110, + tags=[ + 'task_id:ServerTasks-1847', + 'task_name:Deploy', + 'task_state:Failed', + 'project_name:test', + 'space_name:Default', + ], + ) + aggregator.assert_metric( + 'octopus_deploy.deployment.executing_time', + 50, + tags=[ + 'task_id:ServerTasks-1847', + 'task_name:Deploy', + 'task_state:Failed', + 'project_name:test', + 'space_name:Default', + ], + ) + aggregator.assert_metric( + 'octopus_deploy.deployment.completed_time', + 5, + tags=[ + 'task_id:ServerTasks-1847', + 'task_name:Deploy', + 'task_state:Failed', + 'project_name:test', + 'space_name:Default', + ], + ) + aggregator.assert_metric( + 'octopus_deploy.deployment.count', + 1, + tags=[ + 'task_id:ServerTasks-1846', + 'task_name:Deploy', + 'task_state:Success', + 'project_name:test', + 'space_name:Default', + ], + ) + aggregator.assert_metric( + 'octopus_deploy.deployment.queued_time', + 90, + tags=[ + 'task_id:ServerTasks-1846', + 'task_name:Deploy', + 'task_state:Success', + 'project_name:test', + 'space_name:Default', + ], + ) + aggregator.assert_metric( + 'octopus_deploy.deployment.executing_time', + 54, + tags=[ + 'task_id:ServerTasks-1846', + 'task_name:Deploy', + 'task_state:Success', + 'project_name:test', + 'space_name:Default', + ], + ) + aggregator.assert_metric( + 'octopus_deploy.deployment.completed_time', + 1, + tags=[ + 'task_id:ServerTasks-1846', + 'task_name:Deploy', + 'task_state:Success', + 'project_name:test', + 'space_name:Default', + ], + ) + aggregator.assert_metric( + 'octopus_deploy.deployment.count', + tags=[ + 'task_id:ServerTasks-1845', + 'task_name:Deploy', + 'task_state:Success', + 'project_name:test', + 'space_name:Default', + ], + ) + aggregator.assert_metric( + 'octopus_deploy.deployment.queued_time', + 18, + tags=[ + 'task_id:ServerTasks-1845', + 'task_name:Deploy', + 'task_state:Success', + 'project_name:test', + 'space_name:Default', + ], + ) + aggregator.assert_metric( + 'octopus_deploy.deployment.executing_time', + 41, + tags=[ + 'task_id:ServerTasks-1845', + 'task_name:Deploy', + 'task_state:Success', + 'project_name:test', + 'space_name:Default', + ], + ) + aggregator.assert_metric( + 'octopus_deploy.deployment.completed_time', + 14, + tags=[ + 'task_id:ServerTasks-1845', + 'task_name:Deploy', + 'task_state:Success', + 'project_name:test', + 'space_name:Default', + ], + ) @pytest.mark.parametrize( - 'project_groups_config, expected_metrics', + ('mock_http_get'), [ - pytest.param(None, PROJECT_GROUP_ALL_METRICS, id="default"), - pytest.param( - {'include': []}, - PROJECT_GROUP_ALL_METRICS, - id="empty include", - ), - pytest.param( - {'include': ['test-group']}, - PROJECT_GROUP_ONLY_TEST_GROUP_METRICS, - id="include", - ), - pytest.param( - {'include': ['test-group'], 'limit': 1}, - PROJECT_GROUP_ONLY_TEST_GROUP_METRICS, - id="within limit", - ), pytest.param( - {'include': ['test-group'], 'limit': 0}, - PROJECT_GROUP_NO_METRICS, - id="limit hit", - ), - pytest.param( - {'include': ['test-group'], 'exclude': ['test-group']}, - PROJECT_GROUP_NO_METRICS, - id="excluded", - ), - pytest.param( - {'include': ['.*'], 'exclude': ['test-group']}, - PROJECT_GROUP_NO_TEST_GROUP_METRICS, - id="one excluded", - ), - pytest.param( - {'include': ['.*'], 'exclude': ['testing']}, - PROJECT_GROUP_ALL_METRICS, - id="excluded invalid", + { + 'mock_data': { + '/api/spaces': { + "Items": [ + { + "Id": "Spaces-1", + "Name": "First", + }, + { + "Id": "Spaces-2", + "Name": "Second", + }, + ] + }, + '/api/Spaces-1/projectgroups': {"Items": []}, + '/api/Spaces-2/projectgroups': {"Items": []}, + } + }, + id='empty spaces', ), ], + indirect=['mock_http_get'], ) @pytest.mark.usefixtures('mock_http_get') -@mock.patch("datadog_checks.octopus_deploy.project_groups.get_current_datetime", side_effect=MOCKED_TIMESTAMPS) -def test_project_groups_discovery( - get_current_datetime, dd_run_check, aggregator, instance, project_groups_config, expected_metrics -): - instance = copy.deepcopy(instance) - instance['project_groups'] = project_groups_config +@mock.patch("datadog_checks.octopus_deploy.check.get_current_datetime") +def test_discovery_spaces(get_current_datetime, dd_run_check, aggregator): + instance = { + 'octopus_endpoint': 'http://localhost:80', + 'spaces': { + 'include': ['Second'], + }, + } check = OctopusDeployCheck('octopus_deploy', {}, [instance]) + + get_current_datetime.return_value = MOCKED_TIME1 dd_run_check(check) - for metric in expected_metrics: - aggregator.assert_metric(metric["name"], count=metric["count"], tags=metric["tags"]) + + aggregator.assert_metric('octopus_deploy.space.count', tags=['space_name:Default', 'space_name:First'], count=0) + aggregator.assert_metric('octopus_deploy.space.count', tags=['space_id:Spaces-2', 'space_name:Second']) @pytest.mark.usefixtures('mock_http_get') -@mock.patch("datadog_checks.octopus_deploy.project_groups.get_current_datetime", side_effect=MOCKED_TIMESTAMPS) -def test_project_groups_discovery_error(get_current_datetime, dd_run_check, instance): - instance = copy.deepcopy(instance) - instance['project_groups'] = {'include': None} +@mock.patch("datadog_checks.octopus_deploy.check.get_current_datetime") +def test_discovery_default_project_groups(get_current_datetime, dd_run_check, aggregator): + instance = { + 'octopus_endpoint': 'http://localhost:80', + 'project_groups': { + 'include': ['hello'], + }, + } check = OctopusDeployCheck('octopus_deploy', {}, [instance]) - with pytest.raises(Exception, match=r'Setting `include` must be an array'): - dd_run_check(check) + + get_current_datetime.return_value = MOCKED_TIME1 + dd_run_check(check) + + aggregator.assert_metric( + 'octopus_deploy.project_group.count', + tags=['project_group_id:ProjectGroups-1', 'project_group_name:Default Project Group', 'space_name:Default'], + count=0, + ) + aggregator.assert_metric( + 'octopus_deploy.project_group.count', + tags=['project_group_id:ProjectGroups-2', 'project_group_name:test-group', 'space_name:Default'], + count=0, + ) + aggregator.assert_metric( + 'octopus_deploy.project_group.count', + 1, + tags=['project_group_id:ProjectGroups-3', 'project_group_name:hello', 'space_name:Default'], + ) + + +@pytest.mark.usefixtures('mock_http_get') +@mock.patch("datadog_checks.octopus_deploy.check.get_current_datetime") +def test_discovery_space_project_groups(get_current_datetime, dd_run_check, aggregator): + instance = { + 'octopus_endpoint': 'http://localhost:80', + 'spaces': { + 'include': [ + { + 'Default': { + 'project_groups': { + 'include': ['hello'], + } + } + } + ], + }, + } + check = OctopusDeployCheck('octopus_deploy', {}, [instance]) + + get_current_datetime.return_value = MOCKED_TIME1 + dd_run_check(check) + + aggregator.assert_metric( + 'octopus_deploy.project_group.count', + tags=['project_group_id:ProjectGroups-1', 'project_group_name:Default Project Group', 'space_name:Default'], + count=0, + ) + aggregator.assert_metric( + 'octopus_deploy.project_group.count', + tags=['project_group_id:ProjectGroups-2', 'project_group_name:test-group', 'space_name:Default'], + count=0, + ) + aggregator.assert_metric( + 'octopus_deploy.project_group.count', + 1, + tags=['project_group_id:ProjectGroups-3', 'project_group_name:hello', 'space_name:Default'], + ) + + +@pytest.mark.usefixtures('mock_http_get') +@mock.patch("datadog_checks.octopus_deploy.check.get_current_datetime") +def test_discovery_default_projects(get_current_datetime, dd_run_check, aggregator): + instance = { + 'octopus_endpoint': 'http://localhost:80', + 'projects': { + 'include': ['test-api'], + }, + } + check = OctopusDeployCheck('octopus_deploy', {}, [instance]) + + get_current_datetime.return_value = MOCKED_TIME1 + dd_run_check(check) + + aggregator.assert_metric( + 'octopus_deploy.project.count', + 1, + tags=[ + 'project_id:Projects-1', + 'project_name:test-api', + 'project_group_name:Default Project Group', + 'space_name:Default', + ], + ) + aggregator.assert_metric( + 'octopus_deploy.project.count', + tags=[ + 'project_id:Projects-2', + 'project_name:my-project', + 'project_group_name:Default Project Group', + 'space_name:Default', + ], + count=0, + ) + aggregator.assert_metric( + 'octopus_deploy.project.count', + tags=[ + 'project_name:test', + 'project_name:test', + 'project_group_name:Default Project Group', + 'space_name:Default', + ], + count=0, + ) + aggregator.assert_metric( + 'octopus_deploy.project.count', + tags=['project_id:Projects-4', 'project_name:hi', 'project_group_name:test-group', 'space_name:Default'], + count=0, + ) + + +@pytest.mark.usefixtures('mock_http_get') +@mock.patch("datadog_checks.octopus_deploy.check.get_current_datetime") +def test_discovery_space_project_group_projects(get_current_datetime, dd_run_check, aggregator): + instance = { + 'octopus_endpoint': 'http://localhost:80', + 'spaces': { + 'include': [ + { + 'Default': { + 'project_groups': { + 'include': [ + { + 'hello': { + 'projects': { + 'include': ['.*'], + }, + } + } + ], + }, + } + } + ], + }, + } + check = OctopusDeployCheck('octopus_deploy', {}, [instance]) + + get_current_datetime.return_value = MOCKED_TIME1 + dd_run_check(check) + + aggregator.assert_metric( + 'octopus_deploy.project_group.count', + tags=['project_group_id:ProjectGroups-1', 'project_group_name:Default Project Group', 'space_name:Default'], + count=0, + ) + aggregator.assert_metric( + 'octopus_deploy.project_group.count', + tags=['project_group_id:ProjectGroups-2', 'project_group_name:test-group', 'space_name:Default'], + count=0, + ) + aggregator.assert_metric( + 'octopus_deploy.project_group.count', + 1, + tags=['project_group_id:ProjectGroups-3', 'project_group_name:hello', 'space_name:Default'], + ) @pytest.mark.parametrize( - 'project_groups_config, expected_metrics', + ('instance'), [ - pytest.param(None, PROJECT_ALL_METRICS, id="default"), - pytest.param( - {'include': [{'test-group': {'projects': {'include': ['hi']}}}]}, - PROJECT_ONLY_HI_METRICS, - id="include", - ), - pytest.param( - {'include': [{'.*': {'projects': {'include': ['.*'], 'limit': 1}}}]}, - PROJECT_ONLY_HI_MY_PROJECT_METRICS, - id="1 limit", - ), - pytest.param( - {'include': [{'.*': {'projects': {'include': ['.*'], 'limit': 0}}}]}, - PROJECT_NO_METRICS, - id="limit hit", - ), pytest.param( { - 'exclude': ['Default.*'], - 'include': [{'test-group': {'projects': {'include': ['.*']}}}], + 'octopus_endpoint': 'http://localhost:80', + 'spaces': { + 'include': ['Default'], + }, + 'project_groups': { + 'include': ['Default Project Group'], + }, + 'projects': { + 'include': ['.*'], + }, }, - PROJECT_ONLY_HI_METRICS, - id="excluded default", + id='all default', ), pytest.param( - {'include': [{'.*': {'projects': {'include': ['.*'], 'exclude': ['.*']}}}]}, - PROJECT_NO_METRICS, - id="all excluded", + { + 'octopus_endpoint': 'http://localhost:80', + 'spaces': { + 'include': [ + { + 'Default': { + 'project_groups': { + 'include': ['Default Project Group'], + }, + } + } + ], + }, + 'projects': { + 'include': ['.*'], + }, + }, + id='with project groups', ), pytest.param( - {'include': [{'.*': {'projects': {'include': ['.*'], 'exclude': ['heyhey']}}}]}, - PROJECT_ALL_METRICS, - id="excluded invalud", + { + 'octopus_endpoint': 'http://localhost:80', + 'spaces': { + 'include': [ + { + 'Default': { + 'project_groups': { + 'include': [ + { + 'Default Project Group': { + 'projects': { + 'include': ['.*'], + }, + } + } + ], + }, + } + } + ], + }, + }, + id='with projects', ), ], ) @pytest.mark.usefixtures('mock_http_get') -@mock.patch("datadog_checks.octopus_deploy.project_groups.get_current_datetime", side_effect=MOCKED_TIMESTAMPS) -def test_projects_discovery( - get_current_datetime, dd_run_check, aggregator, instance, project_groups_config, expected_metrics -): - instance = copy.deepcopy(instance) - instance['project_groups'] = project_groups_config +@mock.patch("datadog_checks.octopus_deploy.check.get_current_datetime") +def test_run_twice(get_current_datetime, dd_run_check, aggregator, instance): check = OctopusDeployCheck('octopus_deploy', {}, [instance]) + get_current_datetime.return_value = MOCKED_TIME1 + + dd_run_check(check) + + aggregator.assert_metric('octopus_deploy.space.count') + aggregator.assert_metric('octopus_deploy.project_group.count') + aggregator.assert_metric('octopus_deploy.project.count') + + get_current_datetime.return_value = MOCKED_TIME2 dd_run_check(check) - for metric in expected_metrics: - aggregator.assert_metric(metric["name"], count=metric["count"], tags=metric["tags"]) + + aggregator.assert_metric('octopus_deploy.space.count') + aggregator.assert_metric('octopus_deploy.project_group.count') + aggregator.assert_metric('octopus_deploy.project.count') @pytest.mark.usefixtures('mock_http_get') -@mock.patch("datadog_checks.octopus_deploy.project_groups.get_current_datetime", side_effect=MOCKED_TIMESTAMPS) -def test_deployment_metrics(get_current_datetime, dd_run_check, aggregator, instance, caplog): - caplog.set_level(logging.DEBUG) +@mock.patch("datadog_checks.octopus_deploy.check.get_current_datetime") +def test_empty_include(get_current_datetime, dd_run_check, aggregator): + instance = { + 'octopus_endpoint': 'http://localhost:80', + 'spaces': { + 'include': [], + }, + } check = OctopusDeployCheck('octopus_deploy', {}, [instance]) + get_current_datetime.return_value = MOCKED_TIME1 + dd_run_check(check) - for metric in DEPLOYMENT_METRICS: - aggregator.assert_metric(metric["name"], count=metric["count"], value=metric["value"], tags=metric["tags"]) + aggregator.assert_metric('octopus_deploy.space.count', count=0) @pytest.mark.parametrize( - ('mock_http_get, message'), + ('mock_http_get', 'expected_log'), [ pytest.param( { 'http_error': { - '/api/Spaces-1/tasks/project=Projects-1/fromCompletedDate=2024-09-23' - '14:45:58.888492+00:00': MockResponse(status_code=404) + '/api/Spaces-1/tasks': MockResponse(status_code=500), } }, - 'Encountered a RequestException in \'_get_new_tasks_for_project\'', - id='404', + 'Failed to access endpoint: api/Spaces-1/tasks: 500 Server Error: None for url: None', + id='http error', ), ], indirect=['mock_http_get'], ) @pytest.mark.usefixtures('mock_http_get') -@mock.patch("datadog_checks.octopus_deploy.project_groups.get_current_datetime", side_effect=MOCKED_TIMESTAMPS) -def test_exception_when_getting_tasks(get_current_datetime, dd_run_check, aggregator, instance, message, caplog): +@mock.patch("datadog_checks.octopus_deploy.check.get_current_datetime") +def test_tasks_endpoint_unavailable(get_current_datetime, dd_run_check, expected_log, caplog): + instance = {'octopus_endpoint': 'http://localhost:80'} check = OctopusDeployCheck('octopus_deploy', {}, [instance]) - caplog.set_level(logging.INFO) + get_current_datetime.return_value = MOCKED_TIME1 + caplog.set_level(logging.WARNING) dd_run_check(check) - assert message in caplog.text - - for metric in PROJECT_GROUP_ALL_METRICS + PROJECT_ALL_METRICS + DEPLOYMENT_METRICS_NO_PROJECT_1: - aggregator.assert_metric(metric["name"], count=metric["count"], tags=metric["tags"]) + assert expected_log in caplog.text @pytest.mark.usefixtures('mock_http_get') -@mock.patch("datadog_checks.octopus_deploy.project_groups.get_current_datetime", side_effect=MOCKED_TIMESTAMPS) -def test_octopus_server_node_metrics(get_current_datetime, dd_run_check, aggregator, instance): +@mock.patch("datadog_checks.octopus_deploy.check.get_current_datetime") +def test_server_node_metrics(get_current_datetime, dd_run_check, aggregator): + instance = {'octopus_endpoint': 'http://localhost:80'} check = OctopusDeployCheck('octopus_deploy', {}, [instance]) + get_current_datetime.return_value = MOCKED_TIME1 dd_run_check(check) + aggregator.assert_metric( + "octopus_deploy.server_node.count", + 1, + count=1, + tags=[ + 'server_node_id:OctopusServerNodes-octopus-i8932-79236734bc234-09h234n', + 'server_node_name:octopus-i8932-79236734bc234-09h234n', + ], + ) + aggregator.assert_metric( + "octopus_deploy.server_node.max_concurrent_tasks", + 5, + count=1, + tags=[ + 'server_node_id:OctopusServerNodes-octopus-i8932-79236734bc234-09h234n', + 'server_node_name:octopus-i8932-79236734bc234-09h234n', + ], + ) + aggregator.assert_metric( + "octopus_deploy.server_node.in_maintenance_mode", + 0, + count=1, + tags=[ + 'server_node_id:OctopusServerNodes-octopus-i8932-79236734bc234-09h234n', + 'server_node_name:octopus-i8932-79236734bc234-09h234n', + ], + ) - for metric in SERVER_NODES_METRICS: - aggregator.assert_metric(metric["name"], count=metric["count"], value=metric["value"], tags=metric["tags"]) + +@pytest.mark.parametrize( + ('mock_http_get', 'expected_log'), + [ + pytest.param( + { + 'http_error': { + '/api/octopusservernodes': MockResponse(status_code=500), + } + }, + 'Failed to access endpoint: api/octopusservernodes: 500 Server Error: None for url: None', + id='http error', + ), + ], + indirect=['mock_http_get'], +) +@pytest.mark.usefixtures('mock_http_get') +@mock.patch("datadog_checks.octopus_deploy.check.get_current_datetime") +def test_server_node_endpoint_failed(get_current_datetime, dd_run_check, aggregator, expected_log, caplog): + instance = {'octopus_endpoint': 'http://localhost:80'} + check = OctopusDeployCheck('octopus_deploy', {}, [instance]) + get_current_datetime.return_value = MOCKED_TIME1 + caplog.set_level(logging.WARNING) + dd_run_check(check) + assert expected_log in caplog.text + aggregator.assert_metric( + "octopus_deploy.server_node.count", + 1, + count=0, + tags=[ + 'server_node_id:OctopusServerNodes-octopus-i8932-79236734bc234-09h234n', + 'server_node_name:octopus-i8932-79236734bc234-09h234n', + ], + ) + aggregator.assert_metric( + "octopus_deploy.server_node.max_concurrent_tasks", + 5, + count=0, + tags=[ + 'server_node_id:OctopusServerNodes-octopus-i8932-79236734bc234-09h234n', + 'server_node_name:octopus-i8932-79236734bc234-09h234n', + ], + ) + aggregator.assert_metric( + "octopus_deploy.server_node.in_maintenance_mode", + 5, + count=0, + tags=[ + 'server_node_id:OctopusServerNodes-octopus-i8932-79236734bc234-09h234n', + 'server_node_name:octopus-i8932-79236734bc234-09h234n', + ], + )