Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: #2955 Allow to exclude some statistics from aggregation #2956

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions locust/argument_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -703,6 +703,15 @@ def setup_parser_arguments(parser):
action="store_true",
help="Prints the final stats in JSON format to stdout. Useful for parsing the results in other programs/scripts. Use together with --headless and --skip-log for an output only with the json data.",
)
stats_group.add_argument(
"--exclude-from-aggregation",
type=str,
metavar="<str>",
default=None,
dest="exclude_from_aggregation",
env_var="LOCUST_EXCLUDE_FROM_AGGREGATION",
help='Exclude from "Aggregated" stats matching method or name. Regexp is allowed.',
)

log_group = parser.add_argument_group("Logging options")
log_group.add_argument(
Expand Down
9 changes: 7 additions & 2 deletions locust/env.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from __future__ import annotations

from operator import methodcaller
from re import Pattern
from typing import Callable, TypeVar

from configargparse import Namespace
Expand All @@ -27,6 +28,7 @@ def __init__(
tags: list[str] | None = None,
locustfile: str | None = None,
exclude_tags: list[str] | None = None,
exclude_from_aggregation: str | Pattern[str] | None = None,
events: Events | None = None,
host: str | None = None,
reset_stats=False,
Expand Down Expand Up @@ -69,7 +71,8 @@ def __init__(
"""If set, only tasks that are tagged by tags in this list will be executed. Leave this as None to use the one from parsed_options"""
self.exclude_tags = exclude_tags
"""If set, only tasks that aren't tagged by tags in this list will be executed. Leave this as None to use the one from parsed_options"""
self.stats = RequestStats()
self.exclude_from_aggregation = exclude_from_aggregation
self.stats = RequestStats(exclude_from_aggregation=exclude_from_aggregation)
"""Reference to RequestStats instance"""
self.host = host
"""Base URL of the target system"""
Expand Down Expand Up @@ -154,7 +157,9 @@ def create_worker_runner(self, master_host: str, master_port: int) -> WorkerRunn
"""
# Create a new RequestStats with use_response_times_cache set to False to save some memory
# and CPU cycles, since the response_times_cache is not needed for Worker nodes
self.stats = RequestStats(use_response_times_cache=False)
self.stats = RequestStats(
use_response_times_cache=False, exclude_from_aggregation=self.exclude_from_aggregation
)
return self._create_runner(
WorkerRunner,
master_host=master_host,
Expand Down
1 change: 1 addition & 0 deletions locust/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,7 @@ def create_environment(
available_user_classes=available_user_classes,
available_shape_classes=available_shape_classes,
available_user_tasks=available_user_tasks,
exclude_from_aggregation=options.exclude_from_aggregation,
)


Expand Down
17 changes: 15 additions & 2 deletions locust/stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import json
import logging
import os
import re
import signal
import time
from abc import abstractmethod
Expand All @@ -13,6 +14,7 @@
from copy import copy
from html import escape
from itertools import chain
from re import Pattern
from types import FrameType
from typing import TYPE_CHECKING, Any, Callable, NoReturn, Protocol, TypedDict, TypeVar, cast

Expand Down Expand Up @@ -184,14 +186,17 @@ class RequestStats:
Class that holds the request statistics. Accessible in a User from self.environment.stats
"""

def __init__(self, use_response_times_cache=True):
def __init__(self, use_response_times_cache=True, exclude_from_aggregation: str | Pattern[str] | None = None):
"""
:param use_response_times_cache: The value of use_response_times_cache will be set for each StatsEntry()
when they are created. Settings it to False saves some memory and CPU
cycles which we can do on Worker nodes where the response_times_cache
is not needed.
:param exclude_from_aggregation: Define which logs method or name should be excluded from "Aggretated"
stats. Default will accept all the logs. Regexp is allowed.
"""
self.use_response_times_cache = use_response_times_cache
self.exclude_from_aggregation = exclude_from_aggregation
self.entries: dict[tuple[str, str], StatsEntry] = EntriesDict(self)
self.errors: dict[str, StatsError] = {}
self.total = StatsEntry(self, "Aggregated", None, use_response_times_cache=self.use_response_times_cache)
Expand All @@ -217,8 +222,16 @@ def last_request_timestamp(self):
def start_time(self):
return self.total.start_time

def exclude_from_total(self, method: str, name: str):
if self.exclude_from_aggregation:
found_in_method = re.search(self.exclude_from_aggregation, method)
found_in_name = re.search(self.exclude_from_aggregation, name)
return found_in_method or found_in_name
return False

def log_request(self, method: str, name: str, response_time: int, content_length: int) -> None:
self.total.log(response_time, content_length)
if not self.exclude_from_total(method, name):
self.total.log(response_time, content_length)
self.entries[(name, method)].log(response_time, content_length)

def log_error(self, method: str, name: str, error: Exception | str | None) -> None:
Expand Down
5 changes: 5 additions & 0 deletions locust/test/test_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,6 +109,8 @@ def test_parse_options(self):
"-t",
"5m",
"--reset-stats",
"--exclude-from-aggregation",
"CUSTOM",
"--stop-timeout",
"5",
"MyUserClass",
Expand All @@ -120,6 +122,7 @@ def test_parse_options(self):
self.assertEqual("5m", options.run_time)
self.assertTrue(options.reset_stats)
self.assertEqual("5", options.stop_timeout)
self.assertEqual("CUSTOM", options.exclude_from_aggregation)
self.assertEqual(["MyUserClass"], options.user_classes)
# check default arg
self.assertEqual(8089, options.web_port)
Expand All @@ -132,6 +135,7 @@ def test_parse_options_from_env(self):
os.environ["LOCUST_RESET_STATS"] = "true"
os.environ["LOCUST_STOP_TIMEOUT"] = "5"
os.environ["LOCUST_USER_CLASSES"] = "MyUserClass"
os.environ["LOCUST_EXCLUDE_FROM_AGGREGATION"] = "CUSTOM"
options = parse_options(args=[])

self.assertEqual("locustfile.py", options.locustfile)
Expand All @@ -141,6 +145,7 @@ def test_parse_options_from_env(self):
self.assertTrue(options.reset_stats)
self.assertEqual("5", options.stop_timeout)
self.assertEqual(["MyUserClass"], options.user_classes)
self.assertEqual("CUSTOM", options.exclude_from_aggregation)
# check default arg
self.assertEqual(8089, options.web_port)

Expand Down
1 change: 1 addition & 0 deletions locust/test/test_runners.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,7 @@ def __init__(self):
self.heartbeat_interval = 1
self.stop_timeout = 0.0
self.connection_broken = False
self.exclude_from_aggregation = None

def reset_stats(self):
pass
Expand Down
10 changes: 10 additions & 0 deletions locust/test/test_stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,16 @@ def test_total_rps(self):
self.assertAlmostEqual(s2.total_rps, 1 / 5.0)
self.assertEqual(self.stats.total.total_rps, 10 / 5.0)

def test_total_exclude_from_aggregation(self):
before_count = self.stats.num_requests
# First without exclusion
self.stats.log_request("CUSTOM", "some_name", 1337, 1337)
self.assertEqual(self.stats.num_requests, before_count + 1)
# Second with exclusion
self.stats.exclude_from_aggregation = r"CUSTOM"
self.stats.log_request("CUSTOM", "some_name", 1337, 1337)
self.assertEqual(self.stats.num_requests, before_count + 1)

def test_rps_less_than_one_second(self):
s = StatsEntry(self.stats, "percentile_test", "GET")
for i in range(10):
Expand Down
Loading