From 27c6f72958d754eb9b94dbd765377deb7b540a07 Mon Sep 17 00:00:00 2001 From: Mohammad Rafi Date: Thu, 6 Jun 2024 17:36:18 -0400 Subject: [PATCH 1/8] using psutil.oneshot for faster performance --- process/assets/configuration/spec.yaml | 7 + .../process/config_models/defaults.py | 4 + .../process/config_models/instance.py | 1 + .../process/data/conf.yaml.example | 6 + process/datadog_checks/process/process.py | 122 ++++++++++-------- process/hatch.toml | 2 + process/tests/test_bench.py | 36 ++++++ process/tests/test_process.py | 19 +++ 8 files changed, 141 insertions(+), 56 deletions(-) create mode 100644 process/tests/test_bench.py diff --git a/process/assets/configuration/spec.yaml b/process/assets/configuration/spec.yaml index eaaa18888be33..5b53852e97a0d 100644 --- a/process/assets/configuration/spec.yaml +++ b/process/assets/configuration/spec.yaml @@ -140,4 +140,11 @@ files: value: type: integer example: 120 + - name: use_oneshot + description: | + If set to `true`, the check uses `psutil.Process().oneshot()` to collect and cache process metrics. + This can help speed up the check completion. + value: + type: boolean + example: false - template: instances/default diff --git a/process/datadog_checks/process/config_models/defaults.py b/process/datadog_checks/process/config_models/defaults.py index b58dad3ff440f..05adfb09a369c 100644 --- a/process/datadog_checks/process/config_models/defaults.py +++ b/process/datadog_checks/process/config_models/defaults.py @@ -50,3 +50,7 @@ def instance_pid_cache_duration(): def instance_try_sudo(): return False + + +def instance_use_oneshot(): + return False diff --git a/process/datadog_checks/process/config_models/instance.py b/process/datadog_checks/process/config_models/instance.py index 68fb748b2851b..acaa1342585c0 100644 --- a/process/datadog_checks/process/config_models/instance.py +++ b/process/datadog_checks/process/config_models/instance.py @@ -51,6 +51,7 @@ class InstanceConfig(BaseModel): tags: Optional[tuple[str, ...]] = None thresholds: Optional[MappingProxyType[str, Any]] = None try_sudo: Optional[bool] = None + use_oneshot: Optional[bool] = None user: Optional[str] = None @model_validator(mode='before') diff --git a/process/datadog_checks/process/data/conf.yaml.example b/process/datadog_checks/process/data/conf.yaml.example index 15cdd22667c27..4622ee76f3c44 100644 --- a/process/datadog_checks/process/data/conf.yaml.example +++ b/process/datadog_checks/process/data/conf.yaml.example @@ -136,6 +136,12 @@ instances: # # pid_cache_duration: 120 + ## @param use_oneshot - boolean - optional - default: false + ## If set to `true`, the check uses `psutil.Process().oneshot()` to collect and cache process metrics. + ## This can help speed up the check completion. + # + # use_oneshot: false + ## @param tags - list of strings - optional ## A list of tags to attach to every metric and service check emitted by this instance. ## diff --git a/process/datadog_checks/process/process.py b/process/datadog_checks/process/process.py index d6b7a8e5cdc8f..e5a7041721a82 100644 --- a/process/datadog_checks/process/process.py +++ b/process/datadog_checks/process/process.py @@ -77,6 +77,7 @@ def __init__(self, name, init_config, instances): self.collect_children = is_affirmative(self.instance.get('collect_children', False)) self.user = self.instance.get('user', False) self.try_sudo = self.instance.get('try_sudo', False) + self.use_oneshot = self.instance.get('use_oneshot', False) # ad stands for access denied # We cache the PIDs getting this error and don't iterate on them more often than `access_denied_cache_duration`` @@ -293,65 +294,74 @@ def get_process_state(self, name, pids): p = self.process_cache[name][pid] - meminfo = self.psutil_wrapper(p, 'memory_info', ['rss', 'vms']) - st['rss'].append(meminfo.get('rss')) - st['vms'].append(meminfo.get('vms')) + if self.use_oneshot: + with p.oneshot(): + st = self.run_psutil_methods(pid, p, st, new_process) + else: + st = self.run_psutil_methods(pid, p, st, new_process) - mem_percent = self.psutil_wrapper(p, 'memory_percent') - st['mem_pct'].append(mem_percent) + return st - # will fail on win32 and solaris - shared_mem = self.psutil_wrapper(p, 'memory_info', ['shared']).get('shared') - if shared_mem is not None and meminfo.get('rss') is not None: - st['real'].append(meminfo['rss'] - shared_mem) - else: - st['real'].append(None) - - ctxinfo = self.psutil_wrapper(p, 'num_ctx_switches', ['voluntary', 'involuntary']) - st['ctx_swtch_vol'].append(ctxinfo.get('voluntary')) - st['ctx_swtch_invol'].append(ctxinfo.get('involuntary')) - - st['thr'].append(self.psutil_wrapper(p, 'num_threads')) - - cpu_percent = self.psutil_wrapper(p, 'cpu_percent') - cpu_count = psutil.cpu_count() - if not new_process: - # psutil returns `0.` for `cpu_percent` the - # first time it's sampled on a process, - # so save the value only on non-new processes - st['cpu'].append(cpu_percent) - if cpu_count > 0 and cpu_percent is not None: - st['cpu_norm'].append(cpu_percent / cpu_count) - else: - self.log.debug('could not calculate the normalized cpu pct, cpu_count: %s', cpu_count) - st['open_fd'].append(self.psutil_wrapper(p, 'num_fds')) - st['open_handle'].append(self.psutil_wrapper(p, 'num_handles')) - - ioinfo = self.psutil_wrapper(p, 'io_counters', ['read_count', 'write_count', 'read_bytes', 'write_bytes']) - st['r_count'].append(ioinfo.get('read_count')) - st['w_count'].append(ioinfo.get('write_count')) - st['r_bytes'].append(ioinfo.get('read_bytes')) - st['w_bytes'].append(ioinfo.get('write_bytes')) - - pagefault_stats = self.get_pagefault_stats(pid) - if pagefault_stats is not None: - (minflt, cminflt, majflt, cmajflt) = pagefault_stats - st['minflt'].append(minflt) - st['cminflt'].append(cminflt) - st['majflt'].append(majflt) - st['cmajflt'].append(cmajflt) + def run_psutil_methods(self, pid, p, st, new_process): + meminfo = self.psutil_wrapper(p, 'memory_info', ['rss', 'vms']) + st['rss'].append(meminfo.get('rss')) + st['vms'].append(meminfo.get('vms')) + + mem_percent = self.psutil_wrapper(p, 'memory_percent') + st['mem_pct'].append(mem_percent) + + # will fail on win32 and solaris + shared_mem = self.psutil_wrapper(p, 'memory_info', ['shared']).get('shared') + if shared_mem is not None and meminfo.get('rss') is not None: + st['real'].append(meminfo['rss'] - shared_mem) + else: + st['real'].append(None) + + ctxinfo = self.psutil_wrapper(p, 'num_ctx_switches', ['voluntary', 'involuntary']) + st['ctx_swtch_vol'].append(ctxinfo.get('voluntary')) + st['ctx_swtch_invol'].append(ctxinfo.get('involuntary')) + + st['thr'].append(self.psutil_wrapper(p, 'num_threads')) + + cpu_percent = self.psutil_wrapper(p, 'cpu_percent') + cpu_count = psutil.cpu_count() + if not new_process: + # psutil returns `0.` for `cpu_percent` the + # first time it's sampled on a process, + # so save the value only on non-new processes + st['cpu'].append(cpu_percent) + if cpu_count > 0 and cpu_percent is not None: + st['cpu_norm'].append(cpu_percent / cpu_count) else: - st['minflt'].append(None) - st['cminflt'].append(None) - st['majflt'].append(None) - st['cmajflt'].append(None) - - # calculate process run time - create_time = self.psutil_wrapper(p, 'create_time') - if create_time is not None: - now = time.time() - run_time = now - create_time - st['run_time'].append(run_time) + self.log.debug('could not calculate the normalized cpu pct, cpu_count: %s', cpu_count) + st['open_fd'].append(self.psutil_wrapper(p, 'num_fds')) + st['open_handle'].append(self.psutil_wrapper(p, 'num_handles')) + + ioinfo = self.psutil_wrapper(p, 'io_counters', ['read_count', 'write_count', 'read_bytes', 'write_bytes']) + st['r_count'].append(ioinfo.get('read_count')) + st['w_count'].append(ioinfo.get('write_count')) + st['r_bytes'].append(ioinfo.get('read_bytes')) + st['w_bytes'].append(ioinfo.get('write_bytes')) + + pagefault_stats = self.get_pagefault_stats(pid) + if pagefault_stats is not None: + (minflt, cminflt, majflt, cmajflt) = pagefault_stats + st['minflt'].append(minflt) + st['cminflt'].append(cminflt) + st['majflt'].append(majflt) + st['cmajflt'].append(cmajflt) + else: + st['minflt'].append(None) + st['cminflt'].append(None) + st['majflt'].append(None) + st['cmajflt'].append(None) + + # calculate process run time + create_time = self.psutil_wrapper(p, 'create_time') + if create_time is not None: + now = time.time() + run_time = now - create_time + st['run_time'].append(run_time) return st diff --git a/process/hatch.toml b/process/hatch.toml index e4697f09874ad..b185708d8116e 100644 --- a/process/hatch.toml +++ b/process/hatch.toml @@ -2,3 +2,5 @@ [[envs.default.matrix]] python = ["2.7", "3.11"] + +[envs.bench] \ No newline at end of file diff --git a/process/tests/test_bench.py b/process/tests/test_bench.py new file mode 100644 index 0000000000000..16fd3ed4ebedf --- /dev/null +++ b/process/tests/test_bench.py @@ -0,0 +1,36 @@ +# (C) Datadog, Inc. 2018-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) + +from datadog_checks.process import ProcessCheck + +from . import common + + +def test_run(benchmark, dd_run_check): + instance = { + 'name': 'py', + 'search_string': ['python'], + 'exact_match': False, + 'ignored_denied_access': True, + 'thresholds': {'warning': [1, 10], 'critical': [1, 100]}, + } + process = ProcessCheck(common.CHECK_NAME, {}, [instance]) + dd_run_check(process) + + benchmark(dd_run_check, process) + + +def test_run_oneshot(benchmark, dd_run_check): + instance = { + 'name': 'py', + 'search_string': ['python'], + 'exact_match': False, + 'ignored_denied_access': True, + 'use_oneshot': True, + 'thresholds': {'warning': [1, 10], 'critical': [1, 100]}, + } + process = ProcessCheck(common.CHECK_NAME, {}, [instance]) + dd_run_check(process) + + benchmark(dd_run_check, process) diff --git a/process/tests/test_process.py b/process/tests/test_process.py index 233e98a26e118..c545df9a4bfa8 100644 --- a/process/tests/test_process.py +++ b/process/tests/test_process.py @@ -45,6 +45,17 @@ def is_running(self): def children(self, recursive=False): return [] + # https://stackoverflow.com/questions/5093382/object-becomes-none-when-using-a-context-manager + def oneshot(self): + class MockOneShot(object): + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + pass + + return MockOneShot() + class NamedMockProcess(object): def __init__(self, name): @@ -211,6 +222,14 @@ def test_check_filter_user(mock_process, reset_process_list_cache, aggregator, d aggregator.assert_metric('system.processes.number', value=2, tags=generate_expected_tags(instance)) +@patch('psutil.Process', return_value=MockProcess()) +def test_use_oneshot(mock_process, reset_process_list_cache, aggregator, dd_run_check): + instance = {'name': 'foo', 'pid': 1, 'use_oneshot': True} + process = ProcessCheck(common.CHECK_NAME, {}, [instance]) + dd_run_check(process) + aggregator.assert_metric('system.processes.number', value=1, tags=generate_expected_tags(instance)) + + def test_check_missing_pid(aggregator, dd_run_check): instance = {'name': 'foo', 'pid_file': '/foo/bar/baz'} process = ProcessCheck(common.CHECK_NAME, {}, [instance]) From 9e98a171d54e11715fe2137a1bf8ce716e65657d Mon Sep 17 00:00:00 2001 From: Mohammad Rafi Date: Wed, 12 Jun 2024 11:17:56 -0400 Subject: [PATCH 2/8] adding debug line --- process/datadog_checks/process/process.py | 1 + 1 file changed, 1 insertion(+) diff --git a/process/datadog_checks/process/process.py b/process/datadog_checks/process/process.py index e5a7041721a82..6fb9642cdf494 100644 --- a/process/datadog_checks/process/process.py +++ b/process/datadog_checks/process/process.py @@ -295,6 +295,7 @@ def get_process_state(self, name, pids): p = self.process_cache[name][pid] if self.use_oneshot: + self.log.debug("Using psutil Process.oneshot()") with p.oneshot(): st = self.run_psutil_methods(pid, p, st, new_process) else: From 1f3d709477cfb69bb369dcf61c3b60901553f4d9 Mon Sep 17 00:00:00 2001 From: Mohammad Rafi Date: Wed, 12 Jun 2024 12:08:51 -0400 Subject: [PATCH 3/8] setting use_oneshot to default true --- process/assets/configuration/spec.yaml | 2 +- .../process/config_models/defaults.py | 2 +- .../process/data/conf.yaml.example | 4 +- process/datadog_checks/process/process.py | 2 +- process/tests/test_bench.py | 1 + process/tests/test_process.py | 42 +++++++++++++++++++ 6 files changed, 48 insertions(+), 5 deletions(-) diff --git a/process/assets/configuration/spec.yaml b/process/assets/configuration/spec.yaml index 5b53852e97a0d..af74d54a29c37 100644 --- a/process/assets/configuration/spec.yaml +++ b/process/assets/configuration/spec.yaml @@ -146,5 +146,5 @@ files: This can help speed up the check completion. value: type: boolean - example: false + example: true - template: instances/default diff --git a/process/datadog_checks/process/config_models/defaults.py b/process/datadog_checks/process/config_models/defaults.py index 05adfb09a369c..e45539f47a6d5 100644 --- a/process/datadog_checks/process/config_models/defaults.py +++ b/process/datadog_checks/process/config_models/defaults.py @@ -53,4 +53,4 @@ def instance_try_sudo(): def instance_use_oneshot(): - return False + return True diff --git a/process/datadog_checks/process/data/conf.yaml.example b/process/datadog_checks/process/data/conf.yaml.example index 4622ee76f3c44..7edb35e068f1e 100644 --- a/process/datadog_checks/process/data/conf.yaml.example +++ b/process/datadog_checks/process/data/conf.yaml.example @@ -136,11 +136,11 @@ instances: # # pid_cache_duration: 120 - ## @param use_oneshot - boolean - optional - default: false + ## @param use_oneshot - boolean - optional - default: true ## If set to `true`, the check uses `psutil.Process().oneshot()` to collect and cache process metrics. ## This can help speed up the check completion. # - # use_oneshot: false + # use_oneshot: true ## @param tags - list of strings - optional ## A list of tags to attach to every metric and service check emitted by this instance. diff --git a/process/datadog_checks/process/process.py b/process/datadog_checks/process/process.py index 6fb9642cdf494..467f40b5e9708 100644 --- a/process/datadog_checks/process/process.py +++ b/process/datadog_checks/process/process.py @@ -77,7 +77,7 @@ def __init__(self, name, init_config, instances): self.collect_children = is_affirmative(self.instance.get('collect_children', False)) self.user = self.instance.get('user', False) self.try_sudo = self.instance.get('try_sudo', False) - self.use_oneshot = self.instance.get('use_oneshot', False) + self.use_oneshot = is_affirmative(self.instance.get('use_oneshot', True)) # ad stands for access denied # We cache the PIDs getting this error and don't iterate on them more often than `access_denied_cache_duration`` diff --git a/process/tests/test_bench.py b/process/tests/test_bench.py index 16fd3ed4ebedf..0d42123529b42 100644 --- a/process/tests/test_bench.py +++ b/process/tests/test_bench.py @@ -13,6 +13,7 @@ def test_run(benchmark, dd_run_check): 'search_string': ['python'], 'exact_match': False, 'ignored_denied_access': True, + 'use_oneshot': False, 'thresholds': {'warning': [1, 10], 'critical': [1, 100]}, } process = ProcessCheck(common.CHECK_NAME, {}, [instance]) diff --git a/process/tests/test_process.py b/process/tests/test_process.py index c545df9a4bfa8..c2878968e8ecd 100644 --- a/process/tests/test_process.py +++ b/process/tests/test_process.py @@ -256,6 +256,48 @@ def test_check_real_process(aggregator, dd_run_check): 'exact_match': False, 'ignored_denied_access': True, 'thresholds': {'warning': [1, 10], 'critical': [1, 100]}, + 'use_oneshot': False, + } + process = ProcessCheck(common.CHECK_NAME, {}, [instance]) + expected_tags = generate_expected_tags(instance) + dd_run_check(process) + for mname in common.PROCESS_METRIC: + # cases where we don't actually expect some metrics here: + # - if io_counters() is not available + # - if memory_info_ex() is not available + # - first run so no `cpu.pct` + if ( + (not _PSUTIL_IO_COUNTERS and '.io' in mname) + or (not _PSUTIL_MEM_SHARED and 'mem.real' in mname) + or mname == 'system.processes.cpu.pct' + ): + continue + + if Platform.is_windows(): + metric = common.UNIX_TO_WINDOWS_MAP.get(mname, mname) + else: + metric = mname + aggregator.assert_metric(metric, at_least=1, tags=expected_tags) + + aggregator.assert_service_check('process.up', count=1, tags=expected_tags + ['process:py']) + + # this requires another run + dd_run_check(process) + aggregator.assert_metric('system.processes.cpu.pct', count=1, tags=expected_tags) + aggregator.assert_metric('system.processes.cpu.normalized_pct', count=1, tags=expected_tags) + + +def test_check_real_process_oneshot(aggregator, dd_run_check): + "Check that we detect python running (at least this process)" + from datadog_checks.base.utils.platform import Platform + + instance = { + 'name': 'py', + 'search_string': ['python'], + 'exact_match': False, + 'ignored_denied_access': True, + 'thresholds': {'warning': [1, 10], 'critical': [1, 100]}, + 'use_oneshot': True, } process = ProcessCheck(common.CHECK_NAME, {}, [instance]) expected_tags = generate_expected_tags(instance) From 75404ed44d806a48a4764b824d54439f677c5f43 Mon Sep 17 00:00:00 2001 From: Mohammad Rafi Date: Wed, 12 Jun 2024 12:19:34 -0400 Subject: [PATCH 4/8] adding changelog --- process/changelog.d/17817.added | 1 + 1 file changed, 1 insertion(+) create mode 100644 process/changelog.d/17817.added diff --git a/process/changelog.d/17817.added b/process/changelog.d/17817.added new file mode 100644 index 0000000000000..e4d66f3b10541 --- /dev/null +++ b/process/changelog.d/17817.added @@ -0,0 +1 @@ +adding `use_oneshot` parameter to speed up check runs with `psutil.Process().oneshot()` \ No newline at end of file From 62d263e37c4ec0ad6bacc733299b869108b7aa5a Mon Sep 17 00:00:00 2001 From: Mohammad Rafi Date: Thu, 13 Jun 2024 10:11:40 -0400 Subject: [PATCH 5/8] removing and adjusting tests --- process/tests/test_bench.py | 1 - process/tests/test_process.py | 55 ++--------------------------------- 2 files changed, 3 insertions(+), 53 deletions(-) diff --git a/process/tests/test_bench.py b/process/tests/test_bench.py index 0d42123529b42..2bbff428cefa3 100644 --- a/process/tests/test_bench.py +++ b/process/tests/test_bench.py @@ -1,7 +1,6 @@ # (C) Datadog, Inc. 2018-present # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) - from datadog_checks.process import ProcessCheck from . import common diff --git a/process/tests/test_process.py b/process/tests/test_process.py index c2878968e8ecd..1ac310997e50b 100644 --- a/process/tests/test_process.py +++ b/process/tests/test_process.py @@ -222,14 +222,6 @@ def test_check_filter_user(mock_process, reset_process_list_cache, aggregator, d aggregator.assert_metric('system.processes.number', value=2, tags=generate_expected_tags(instance)) -@patch('psutil.Process', return_value=MockProcess()) -def test_use_oneshot(mock_process, reset_process_list_cache, aggregator, dd_run_check): - instance = {'name': 'foo', 'pid': 1, 'use_oneshot': True} - process = ProcessCheck(common.CHECK_NAME, {}, [instance]) - dd_run_check(process) - aggregator.assert_metric('system.processes.number', value=1, tags=generate_expected_tags(instance)) - - def test_check_missing_pid(aggregator, dd_run_check): instance = {'name': 'foo', 'pid_file': '/foo/bar/baz'} process = ProcessCheck(common.CHECK_NAME, {}, [instance]) @@ -245,49 +237,8 @@ def test_check_missing_process(aggregator, dd_run_check, caplog): aggregator.assert_service_check('process.up', count=1, status=process.CRITICAL) assert "Unable to find process named ['fooprocess', '/usr/bin/foo'] among processes" in caplog.text - -def test_check_real_process(aggregator, dd_run_check): - "Check that we detect python running (at least this process)" - from datadog_checks.base.utils.platform import Platform - - instance = { - 'name': 'py', - 'search_string': ['python'], - 'exact_match': False, - 'ignored_denied_access': True, - 'thresholds': {'warning': [1, 10], 'critical': [1, 100]}, - 'use_oneshot': False, - } - process = ProcessCheck(common.CHECK_NAME, {}, [instance]) - expected_tags = generate_expected_tags(instance) - dd_run_check(process) - for mname in common.PROCESS_METRIC: - # cases where we don't actually expect some metrics here: - # - if io_counters() is not available - # - if memory_info_ex() is not available - # - first run so no `cpu.pct` - if ( - (not _PSUTIL_IO_COUNTERS and '.io' in mname) - or (not _PSUTIL_MEM_SHARED and 'mem.real' in mname) - or mname == 'system.processes.cpu.pct' - ): - continue - - if Platform.is_windows(): - metric = common.UNIX_TO_WINDOWS_MAP.get(mname, mname) - else: - metric = mname - aggregator.assert_metric(metric, at_least=1, tags=expected_tags) - - aggregator.assert_service_check('process.up', count=1, tags=expected_tags + ['process:py']) - - # this requires another run - dd_run_check(process) - aggregator.assert_metric('system.processes.cpu.pct', count=1, tags=expected_tags) - aggregator.assert_metric('system.processes.cpu.normalized_pct', count=1, tags=expected_tags) - - -def test_check_real_process_oneshot(aggregator, dd_run_check): +@pytest.mark.parametrize("oneshot",[True,False]) +def test_check_real_process(aggregator, dd_run_check, oneshot): "Check that we detect python running (at least this process)" from datadog_checks.base.utils.platform import Platform @@ -297,7 +248,7 @@ def test_check_real_process_oneshot(aggregator, dd_run_check): 'exact_match': False, 'ignored_denied_access': True, 'thresholds': {'warning': [1, 10], 'critical': [1, 100]}, - 'use_oneshot': True, + 'use_oneshot': oneshot, } process = ProcessCheck(common.CHECK_NAME, {}, [instance]) expected_tags = generate_expected_tags(instance) From dd85dc0ac88ce8f700635c237c4892ec4f06f634 Mon Sep 17 00:00:00 2001 From: Mohammad Rafi Date: Thu, 13 Jun 2024 10:18:37 -0400 Subject: [PATCH 6/8] linter adjustments --- process/tests/test_process.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/process/tests/test_process.py b/process/tests/test_process.py index 1ac310997e50b..a1dbd67184c13 100644 --- a/process/tests/test_process.py +++ b/process/tests/test_process.py @@ -237,7 +237,8 @@ def test_check_missing_process(aggregator, dd_run_check, caplog): aggregator.assert_service_check('process.up', count=1, status=process.CRITICAL) assert "Unable to find process named ['fooprocess', '/usr/bin/foo'] among processes" in caplog.text -@pytest.mark.parametrize("oneshot",[True,False]) + +@pytest.mark.parametrize("oneshot", [True, False]) def test_check_real_process(aggregator, dd_run_check, oneshot): "Check that we detect python running (at least this process)" from datadog_checks.base.utils.platform import Platform From b970c7cf535d7591b3608cd23ee59cf98c7aba30 Mon Sep 17 00:00:00 2001 From: Mohammad Rafi <36865458+mrafi97@users.noreply.github.com> Date: Thu, 13 Jun 2024 14:30:26 -0400 Subject: [PATCH 7/8] Update test_bench.py license Co-authored-by: Steven Yuen --- process/tests/test_bench.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/process/tests/test_bench.py b/process/tests/test_bench.py index 2bbff428cefa3..0abee689c09f0 100644 --- a/process/tests/test_bench.py +++ b/process/tests/test_bench.py @@ -1,4 +1,4 @@ -# (C) Datadog, Inc. 2018-present +# (C) Datadog, Inc. 2024-present # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) from datadog_checks.process import ProcessCheck From 6bb8ac31217d7fafeba657b503f2de13acc7f2b3 Mon Sep 17 00:00:00 2001 From: Branden Clark Date: Fri, 21 Jun 2024 12:59:27 -0400 Subject: [PATCH 8/8] Apply suggestions from code review Co-authored-by: Kyle Neale --- process/changelog.d/17817.added | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/process/changelog.d/17817.added b/process/changelog.d/17817.added index e4d66f3b10541..bef7b39462633 100644 --- a/process/changelog.d/17817.added +++ b/process/changelog.d/17817.added @@ -1 +1 @@ -adding `use_oneshot` parameter to speed up check runs with `psutil.Process().oneshot()` \ No newline at end of file +Add `use_oneshot` parameter to speed up check runs with `psutil.Process().oneshot()` \ No newline at end of file