Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Stop declaring "fake" class level variables in Environment, User and StatsEntry #1948

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions docs/api.rst
Original file line number Diff line number Diff line change
Expand Up @@ -140,3 +140,9 @@ Web UI class

.. autoclass:: locust.web.WebUI
:members:

Other
=====

.. autoclass:: locust.stats.StatsEntry
:members:
6 changes: 3 additions & 3 deletions locust/argument_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -485,7 +485,7 @@ def setup_parser_arguments(parser):
)


def get_parser(default_config_files=DEFAULT_CONFIG_FILES):
def get_parser(default_config_files=DEFAULT_CONFIG_FILES) -> LocustArgumentParser:
# get a parser that is only able to parse the -f argument
parser = get_empty_argument_parser(add_help=True, default_config_files=default_config_files)
# add all the other supported arguments
Expand All @@ -495,15 +495,15 @@ def get_parser(default_config_files=DEFAULT_CONFIG_FILES):
return parser


def parse_options(args=None):
def parse_options(args=None) -> configargparse.Namespace:
parser = get_parser()
parsed_opts = parser.parse_args(args=args)
if parsed_opts.stats_history_enabled and (parsed_opts.csv_prefix is None):
parser.error("'--csv-full-history' requires '--csv'.")
return parsed_opts


def default_args_dict():
def default_args_dict() -> dict:
# returns a dict containing the default arguments (before any custom arguments are added)
default_parser = get_empty_argument_parser()
setup_parser_arguments(default_parser)
Expand Down
10 changes: 4 additions & 6 deletions locust/contrib/fasthttp.py
Original file line number Diff line number Diff line change
Expand Up @@ -272,12 +272,6 @@ class by using the :py:func:`@task decorator <locust.task>` on the methods, or b
for keeping a user session between requests.
"""

client: FastHttpSession = None
"""
Instance of HttpSession that is created upon instantiation of User.
The client support cookies, and therefore keeps the session between HTTP requests.
"""

# Below are various UserAgent settings. Change these in your subclass to alter FastHttpUser's behaviour.
# It needs to be done before FastHttpUser is instantiated, changing them later will have no effect

Expand Down Expand Up @@ -322,6 +316,10 @@ def __init__(self, environment):
concurrency=self.concurrency,
user=self,
)
"""
Instance of HttpSession that is created upon instantiation of User.
The client support cookies, and therefore keeps the session between HTTP requests.
"""


class FastResponse(CompatResponse):
Expand Down
107 changes: 45 additions & 62 deletions locust/env.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,11 @@
List,
Type,
TypeVar,
Union,
)

from configargparse import Namespace

from .event import Events
from .exception import RunnerAlreadyExistsError
from .stats import RequestStats
Expand All @@ -21,90 +24,70 @@


class Environment:
events: Events = None
"""
Event hooks used by Locust internally, as well as to extend Locust's functionality
See :ref:`events` for available events.
"""

user_classes: List[Type[User]] = []
"""User classes that the runner will run"""

shape_class: LoadTestShape = None
"""A shape class to control the shape of the load test"""

tags = None
"""If set, only tasks that are tagged by tags in this list will be executed"""

exclude_tags = None
"""If set, only tasks that aren't tagged by tags in this list will be executed"""

stats: RequestStats = None
"""Reference to RequestStats instance"""

runner: Runner = None
"""Reference to the :class:`Runner <locust.runners.Runner>` instance"""

web_ui: WebUI = None
"""Reference to the WebUI instance"""

host: str = None
"""Base URL of the target system"""

reset_stats = False
"""Determines if stats should be reset once all simulated users have been spawned"""

stop_timeout = None
"""
If set, the runner will try to stop the running users gracefully and wait this many seconds
before killing them hard.
"""

catch_exceptions = True
"""
If True exceptions that happen within running users will be caught (and reported in UI/console).
If False, exceptions will be raised.
"""

process_exit_code: int = None
"""
If set it'll be the exit code of the Locust process
"""

parsed_options = None
"""Reference to the parsed command line options (used to pre-populate fields in Web UI). May be None when using Locust as a library"""

def __init__(
self,
*,
user_classes=None,
shape_class=None,
tags=None,
locustfile=None,
user_classes: Union[List[Type[User]], None] = None,
shape_class: Union[LoadTestShape, None] = None,
tags: Union[List[str], None] = None,
locustfile: str = None,
exclude_tags=None,
events=None,
host=None,
events: Events = None,
host: str = None,
reset_stats=False,
stop_timeout=None,
stop_timeout: Union[float, None] = None,
catch_exceptions=True,
parsed_options=None,
parsed_options: Namespace = None,
):

self.runner: Runner = None
"""Reference to the :class:`Runner <locust.runners.Runner>` instance"""

self.web_ui: WebUI = None
"""Reference to the WebUI instance"""

self.process_exit_code: int = None
"""
If set it'll be the exit code of the Locust process
"""

if events:
self.events = events
"""
Event hooks used by Locust internally, as well as to extend Locust's functionality
See :ref:`events` for available events.
"""
else:
self.events = Events()

self.locustfile = locustfile
self.user_classes = user_classes or []
"""Filename (not path) of locustfile"""
self.user_classes: List[Type[User]] = user_classes or []
"""User classes that the runner will run"""
self.shape_class = shape_class
"""A shape class to control the shape of the load test"""
self.tags = tags
"""If set, only tasks that are tagged by tags in this list will be executed"""
self.exclude_tags = exclude_tags
"""If set, only tasks that aren't tagged by tags in this list will be executed"""
self.stats = RequestStats()
"""Reference to RequestStats instance"""
self.host = host
"""Base URL of the target system"""
self.reset_stats = reset_stats
"""Determines if stats should be reset once all simulated users have been spawned"""
self.stop_timeout = stop_timeout
"""
If set, the runner will try to stop the running users gracefully and wait this many seconds
before killing them hard.
"""
self.catch_exceptions = catch_exceptions
"""
If True exceptions that happen within running users will be caught (and reported in UI/console).
If False, exceptions will be raised.
"""
self.parsed_options = parsed_options
"""Reference to the parsed command line options (used to pre-populate fields in Web UI). May be None when using Locust as a library"""

self._filter_tasks_by_tags()

Expand Down
117 changes: 49 additions & 68 deletions locust/stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ def diff_response_time_dicts(latest, old):

class RequestStats:
"""
Class that holds the request statistics.
Class that holds the request statistics. Accessible in a User from self.environment.stats
"""

def __init__(self, use_response_times_cache=True):
Expand All @@ -126,8 +126,8 @@ def __init__(self, use_response_times_cache=True):
is not needed.
"""
self.use_response_times_cache = use_response_times_cache
self.entries = {}
self.errors = {}
self.entries: dict[str, StatsEntry] = {}
self.errors: dict[str, StatsError] = {}
self.total = StatsEntry(self, "Aggregated", None, use_response_times_cache=self.use_response_times_cache)
self.history = []

Expand Down Expand Up @@ -212,75 +212,56 @@ class StatsEntry:
Represents a single stats entry (name and method)
"""

name = None
""" Name (URL) of this stats entry """

method = None
""" Method (GET, POST, PUT, etc.) """

num_requests = None
""" The number of requests made """

num_none_requests = None
""" The number of requests made with a None response time (typically async requests) """

num_failures = None
""" Number of failed request """

total_response_time = None
""" Total sum of the response times """

min_response_time = None
""" Minimum response time """

max_response_time = None
""" Maximum response time """

num_reqs_per_sec = None
""" A {second => request_count} dict that holds the number of requests made per second """

num_fail_per_sec = None
""" A (second => failure_count) dict that hold the number of failures per second """

response_times = None
"""
A {response_time => count} dict that holds the response time distribution of all
the requests.

The keys (the response time in ms) are rounded to store 1, 2, ... 9, 10, 20. .. 90,
100, 200 .. 900, 1000, 2000 ... 9000, in order to save memory.

This dict is used to calculate the median and percentile response times.
"""

use_response_times_cache = False
"""
If set to True, the copy of the response_time dict will be stored in response_times_cache
every second, and kept for 20 seconds (by default, will be CURRENT_RESPONSE_TIME_PERCENTILE_WINDOW + 10).
We can use this dict to calculate the *current* median response time, as well as other response
time percentiles.
"""

response_times_cache = None
"""
If use_response_times_cache is set to True, this will be a {timestamp => CachedResponseTimes()}
OrderedDict that holds a copy of the response_times dict for each of the last 20 seconds.
"""

total_content_length = None
""" The sum of the content length of all the requests for this entry """

start_time = None
""" Time of the first request for this entry """

last_request_timestamp = None
""" Time of the last request for this entry """

def __init__(self, stats, name, method, use_response_times_cache=False):
def __init__(self, stats: RequestStats, name: str, method: str, use_response_times_cache=False):
self.stats = stats
self.name = name
""" Name (URL) of this stats entry """
self.method = method
""" Method (GET, POST, PUT, etc.) """
self.use_response_times_cache = use_response_times_cache
"""
If set to True, the copy of the response_time dict will be stored in response_times_cache
every second, and kept for 20 seconds (by default, will be CURRENT_RESPONSE_TIME_PERCENTILE_WINDOW + 10).
We can use this dict to calculate the *current* median response time, as well as other response
time percentiles.
"""
self.num_requests = 0
""" The number of requests made """
self.num_none_requests = 0
""" The number of requests made with a None response time (typically async requests) """
self.num_failures = 0
""" Number of failed request """
self.total_response_time = 0
""" Total sum of the response times """
self.min_response_time = None
""" Minimum response time """
self.max_response_time = 0
""" Maximum response time """
self.num_reqs_per_sec = {}
""" A {second => request_count} dict that holds the number of requests made per second """
self.num_fail_per_sec = {}
""" A (second => failure_count) dict that hold the number of failures per second """
self.response_times = {}
"""
A {response_time => count} dict that holds the response time distribution of all
the requests.

The keys (the response time in ms) are rounded to store 1, 2, ... 9, 10, 20. .. 90,
100, 200 .. 900, 1000, 2000 ... 9000, in order to save memory.

This dict is used to calculate the median and percentile response times.
"""
self.response_times_cache = None
"""
If use_response_times_cache is set to True, this will be a {timestamp => CachedResponseTimes()}
OrderedDict that holds a copy of the response_times dict for each of the last 20 seconds.
"""
self.total_content_length = 0
""" The sum of the content length of all the requests for this entry """
self.start_time = 0.0
""" Time of the first request for this entry """
self.last_request_timestamp = None
""" Time of the last request for this entry """
self.reset()

def reset(self):
Expand Down
24 changes: 9 additions & 15 deletions locust/user/users.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,18 +100,14 @@ class ForumPage(TaskSet):
abstract = True
"""If abstract is True, the class is meant to be subclassed, and locust will not spawn users of this class during a test."""

environment = None
"""A reference to the :py:class:`Environment <locust.env.Environment>` in which this user is running"""

client = None
_state = None
_greenlet: greenlet.Greenlet = None
_group: Group
_taskset_instance = None

def __init__(self, environment):
super().__init__()
self.environment = environment
"""A reference to the :py:class:`Environment <locust.env.Environment>` in which this user is running"""
self._state = None
self._greenlet: greenlet.Greenlet = None
self._group: Group
self._taskset_instance: TaskSet = None

def on_start(self):
"""
Expand Down Expand Up @@ -222,12 +218,6 @@ class by using the :py:func:`@task decorator <locust.task>` on methods, or by se
abstract = True
"""If abstract is True, the class is meant to be subclassed, and users will not choose this locust during a test"""

client: HttpSession = None
"""
Instance of HttpSession that is created upon instantiation of Locust.
The client supports cookies, and therefore keeps the session between HTTP requests.
"""

def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if self.host is None:
Expand All @@ -242,3 +232,7 @@ def __init__(self, *args, **kwargs):
)
session.trust_env = False
self.client = session
"""
Instance of HttpSession that is created upon instantiation of Locust.
The client supports cookies, and therefore keeps the session between HTTP requests.
"""