diff --git a/.github/workflows/black.yml b/.github/workflows/black.yml
index 9065b5e..4a3a46b 100644
--- a/.github/workflows/black.yml
+++ b/.github/workflows/black.yml
@@ -6,5 +6,6 @@ jobs:
lint:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v3
+ - uses: actions/setup-python@v4
+ - uses: actions/checkout@v4
- uses: psf/black@stable
diff --git a/.github/workflows/name: pre-commit.yml b/.github/workflows/name: pre-commit.yml
new file mode 100644
index 0000000..8a4a9bb
--- /dev/null
+++ b/.github/workflows/name: pre-commit.yml
@@ -0,0 +1,14 @@
+name: pre-commit
+
+on:
+ [push, pull_request]
+
+jobs:
+ pre-commit:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - uses: actions/setup-python@v4
+ with:
+ python-version: "3.11"
+ - uses: pre-commit/action@v3.0.0
diff --git a/.gitignore b/.gitignore
index 1c513bc..77b4864 100644
--- a/.gitignore
+++ b/.gitignore
@@ -10,3 +10,4 @@ __pycache__
.mypy_cache
*.swp
.vscode/settings.json
+.coverage
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 800bb4a..70a7fca 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,30 +1,31 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
- rev: v4.4.0
+ rev: v4.5.0
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
+- repo: https://github.com/asottile/pyupgrade
+ rev: v3.15.0
+ hooks:
+ - id: pyupgrade
+ args: [--py39-plus]
- repo: https://github.com/psf/black
- rev: 23.9.1
+ rev: 23.12.0
hooks:
- id: black
args: [--config=pyproject.toml]
- repo: https://github.com/hadialqattan/pycln
- rev: v2.2.2
+ rev: v2.4.0
hooks:
- id: pycln
args: [--config=pyproject.toml]
- repo: https://github.com/pycqa/isort
- rev: 5.12.0
+ rev: 5.13.2
hooks:
- id: isort
files: "\\.(py)$"
args: [--settings-path=pyproject.toml]
- repo: https://github.com/dosisod/refurb
- rev: v1.21.0
+ rev: v1.25.0
hooks:
- id: refurb
-- repo: https://github.com/asottile/pyupgrade
- rev: v3.13.0
- hooks:
- - id: pyupgrade
diff --git a/.vscode/launch.json b/.vscode/launch.json
index fbe98a3..68224d6 100644
--- a/.vscode/launch.json
+++ b/.vscode/launch.json
@@ -4,6 +4,15 @@
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
+ {
+ "name": "Python: Debug Tests",
+ "type": "python",
+ "request": "launch",
+ "program": "${file}",
+ "purpose": ["debug-test"],
+ "console": "integratedTerminal",
+ "justMyCode": false
+ },
{
"name": "Python: Current File",
"type": "python",
diff --git a/.vscode/settings.json b/.vscode/settings.json
index ac7a5e5..6107dc4 100644
--- a/.vscode/settings.json
+++ b/.vscode/settings.json
@@ -1,4 +1,9 @@
{
"python.analysis.typeCheckingMode": "basic",
- "python.terminal.activateEnvironment": true
+ "python.terminal.activateEnvironment": true,
+ "python.testing.pytestArgs": [
+ "tests"
+ ],
+ "python.testing.unittestEnabled": false,
+ "python.testing.pytestEnabled": true
}
diff --git a/.vscode/tasks.json b/.vscode/tasks.json
new file mode 100644
index 0000000..4c070f7
--- /dev/null
+++ b/.vscode/tasks.json
@@ -0,0 +1,35 @@
+{
+ // See https://go.microsoft.com/fwlink/?LinkId=733558
+ // for the documentation about the tasks.json format
+ // .vscode/tasks.json
+ {
+ "version": "2.0.0",
+ "tasks": [
+ {
+ "label": "Run pytest with coverage",
+ "type": "shell",
+ "command": "pytest",
+ "args": [
+ "--cov=pyadtpulse",
+ "--cov-report=html",
+ "${workspaceFolder}/tests"
+ ],
+ "group": {
+ "kind": "test",
+ "isDefault": false
+ }
+ },
+ {
+ "label": "Run pytest without coverage",
+ "type": "shell",
+ "command": "pytest",
+ "args": [
+ "${workspaceFolder}/tests"
+ ],
+ "group": {
+ "kind": "test",
+ "isDefault": true
+ }
+ }
+ ]
+ }
diff --git a/CHANGELOG.md b/CHANGELOG.md
index d589094..79142ab 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,11 @@
+## 1.2.0 (2024-01-30)
+
+* add exceptions and exception handling
+* make code more robust for error handling
+* refactor code into smaller objects
+* add testing framework
+* add poetry
+
## 1.1.5 (2023-12-22)
* fix more zone html parsing due to changes in Pulse v27
diff --git a/conftest.py b/conftest.py
new file mode 100644
index 0000000..da932af
--- /dev/null
+++ b/conftest.py
@@ -0,0 +1,458 @@
+"""Pulse Test Configuration."""
+
+import os
+import re
+import sys
+from collections.abc import Generator
+from datetime import datetime
+from enum import Enum
+from pathlib import Path
+from typing import Any
+from unittest.mock import AsyncMock, patch
+from urllib import parse
+
+import freezegun
+import pytest
+from aiohttp import client_exceptions, web
+from aioresponses import aioresponses
+
+# Get the root directory of your project
+project_root = Path(__file__).resolve().parent
+
+# Modify sys.path to include the project root
+sys.path.insert(0, str(project_root))
+test_file_dir = project_root / "tests" / "data_files"
+# pylint: disable=wrong-import-position
+# ruff: noqa: E402
+# flake8: noqa: E402
+from pyadtpulse.const import (
+ ADT_DEVICE_URI,
+ ADT_GATEWAY_URI,
+ ADT_LOGIN_URI,
+ ADT_LOGOUT_URI,
+ ADT_MFA_FAIL_URI,
+ ADT_ORB_URI,
+ ADT_SUMMARY_URI,
+ ADT_SYNC_CHECK_URI,
+ ADT_SYSTEM_SETTINGS,
+ ADT_SYSTEM_URI,
+ DEFAULT_API_HOST,
+)
+from pyadtpulse.pulse_connection_properties import PulseConnectionProperties
+from pyadtpulse.util import remove_prefix
+
+MOCKED_API_VERSION = "27.0.0-140"
+
+
+class LoginType(Enum):
+ """Login Types."""
+
+ SUCCESS = "signin.html"
+ MFA = "mfa.html"
+ FAIL = "signin_fail.html"
+ LOCKED = "signin_locked.html"
+ NOT_SIGNED_IN = "not_signed_in.html"
+
+
+@pytest.fixture
+def read_file():
+ """Fixture to read a file.
+
+ Args:
+ file_name (str): Name of the file to read
+ """
+
+ def _read_file(file_name: str) -> str:
+ file_path = test_file_dir / file_name
+ return file_path.read_text(encoding="utf-8")
+
+ return _read_file
+
+
+@pytest.fixture
+def mock_sleep(mocker):
+ """Fixture to mock asyncio.sleep."""
+ return mocker.patch("asyncio.sleep", new_callable=AsyncMock)
+
+
+@pytest.fixture
+def freeze_time_to_now():
+ """Fixture to freeze time to now."""
+ current_time = datetime.now()
+ with freezegun.freeze_time(current_time) as frozen_time:
+ yield frozen_time
+
+
+@pytest.fixture
+def get_mocked_connection_properties() -> PulseConnectionProperties:
+ """Fixture to get the test connection properties."""
+ p = PulseConnectionProperties(DEFAULT_API_HOST)
+ p.api_version = MOCKED_API_VERSION
+ return p
+
+
+@pytest.fixture
+def mock_server_down():
+ """Fixture to mock server down."""
+ with aioresponses() as m:
+ m.get(
+ DEFAULT_API_HOST,
+ status=500,
+ exception=client_exceptions.ServerConnectionError(),
+ repeat=True,
+ )
+ yield m
+
+
+@pytest.fixture
+def mock_server_temporarily_down(get_mocked_url, read_file):
+ """Fixture to mock server temporarily down."""
+ with aioresponses() as responses:
+ responses.get(
+ DEFAULT_API_HOST,
+ status=500,
+ exception=client_exceptions.ServerConnectionError(),
+ )
+ responses.get(
+ DEFAULT_API_HOST,
+ status=500,
+ exception=client_exceptions.ServerConnectionError(),
+ )
+ responses.get(
+ DEFAULT_API_HOST,
+ status=302,
+ headers={"Location": get_mocked_url(ADT_LOGIN_URI)},
+ )
+ responses.get(
+ f"{DEFAULT_API_HOST}/{ADT_LOGIN_URI}",
+ status=307,
+ headers={"Location": get_mocked_url(ADT_LOGIN_URI)},
+ repeat=True,
+ )
+ responses.get(
+ get_mocked_url(ADT_LOGIN_URI),
+ body=read_file("signin.html"),
+ content_type="text/html",
+ )
+
+ yield responses
+
+
+@pytest.fixture
+def get_mocked_url(get_mocked_connection_properties):
+ """Fixture to get the test url."""
+
+ def _get_mocked_url(path: str) -> str:
+ return get_mocked_connection_properties.make_url(path)
+
+ return _get_mocked_url
+
+
+@pytest.fixture
+def get_relative_mocked_url(get_mocked_connection_properties):
+ def _get_relative_mocked_url(path: str) -> str:
+ return remove_prefix(
+ get_mocked_connection_properties.make_url(path), DEFAULT_API_HOST
+ )
+
+ return _get_relative_mocked_url
+
+
+@pytest.fixture
+def get_mocked_mapped_static_responses(get_mocked_url) -> dict[str, str]:
+ """Fixture to get the test mapped responses."""
+ return {
+ get_mocked_url(ADT_SUMMARY_URI): "summary.html",
+ get_mocked_url(ADT_SYSTEM_URI): "system.html",
+ get_mocked_url(ADT_GATEWAY_URI): "gateway.html",
+ get_mocked_url(ADT_MFA_FAIL_URI): "mfa.html",
+ }
+
+
+@pytest.fixture
+def extract_ids_from_data_directory() -> list[str]:
+ """Extract the device ids all the device files in the data directory."""
+ id_pattern = re.compile(r"device_(\d{1,})\.html")
+ ids = set()
+ for file_name in os.listdir(test_file_dir):
+ match = id_pattern.match(file_name)
+ if match:
+ ids.add(match.group(1))
+ return list(ids)
+
+
+@pytest.fixture
+def mocked_server_responses(
+ get_mocked_mapped_static_responses: dict[str, str],
+ read_file,
+ get_mocked_url,
+ extract_ids_from_data_directory: list[str],
+) -> Generator[aioresponses, Any, None]:
+ """Fixture to get the test mapped responses."""
+ static_responses = get_mocked_mapped_static_responses
+ with aioresponses() as responses:
+ for url, file_name in static_responses.items():
+ responses.get(
+ url, body=read_file(file_name), content_type="text/html", repeat=True
+ )
+
+ # device id rewriting
+ for device_id in extract_ids_from_data_directory:
+ responses.get(
+ f"{get_mocked_url(ADT_DEVICE_URI)}?id={device_id}",
+ body=read_file(f"device_{device_id}.html"),
+ content_type="text/html",
+ )
+ # redirects
+ responses.get(
+ get_mocked_url(ADT_LOGIN_URI),
+ body=read_file("signin.html"),
+ content_type="text/html",
+ )
+ responses.get(
+ DEFAULT_API_HOST,
+ status=302,
+ headers={"Location": get_mocked_url(ADT_LOGIN_URI)},
+ repeat=True,
+ )
+ responses.get(
+ f"{DEFAULT_API_HOST}/",
+ status=302,
+ headers={"Location": get_mocked_url(ADT_LOGIN_URI)},
+ repeat=True,
+ )
+ responses.get(
+ f"{DEFAULT_API_HOST}/{ADT_LOGIN_URI}",
+ status=307,
+ headers={"Location": get_mocked_url(ADT_LOGIN_URI)},
+ repeat=True,
+ )
+ # login/logout
+
+ logout_pattern = re.compile(
+ rf"{re.escape(get_mocked_url(ADT_LOGOUT_URI))}/?.*$"
+ )
+ responses.get(
+ logout_pattern,
+ status=302,
+ headers={"Location": get_mocked_url(ADT_LOGIN_URI)},
+ repeat=True,
+ )
+
+ # not doing default sync check response or keepalive
+ # because we need to set it on each test
+ yield responses
+
+
+def add_custom_response(
+ mocked_server_responses,
+ read_file,
+ url: str,
+ method: str = "GET",
+ status: int = 200,
+ file_name: str | None = None,
+ headers: dict[str, Any] | None = None,
+):
+ if method.upper() not in ("GET", "POST"):
+ raise ValueError("Unsupported HTTP method. Only GET and POST are supported.")
+
+ mocked_server_responses.add(
+ url,
+ method,
+ status=status,
+ body=read_file(file_name) if file_name else "",
+ content_type="text/html",
+ headers=headers,
+ )
+
+
+def add_signin(
+ signin_type: LoginType, mocked_server_responses, get_mocked_url, read_file
+):
+ if signin_type != LoginType.SUCCESS:
+ add_custom_response(
+ mocked_server_responses,
+ read_file,
+ get_mocked_url(ADT_LOGIN_URI),
+ file_name=signin_type.value,
+ )
+ redirect = get_mocked_url(ADT_LOGIN_URI)
+ if signin_type == LoginType.MFA:
+ redirect = get_mocked_url(ADT_MFA_FAIL_URI)
+ if signin_type == LoginType.SUCCESS:
+ redirect = get_mocked_url(ADT_SUMMARY_URI)
+ add_custom_response(
+ mocked_server_responses,
+ read_file,
+ get_mocked_url(ADT_LOGIN_URI),
+ status=307,
+ method="POST",
+ headers={"Location": redirect},
+ )
+
+
+def add_logout(mocked_server_responses, get_mocked_url, read_file):
+ add_custom_response(
+ mocked_server_responses,
+ read_file,
+ get_mocked_url(ADT_LOGOUT_URI),
+ file_name=LoginType.SUCCESS.value,
+ )
+
+
+@pytest.fixture
+def patched_sync_task_sleep() -> Generator[AsyncMock, Any, Any]:
+ """Fixture to patch asyncio.sleep in async_query()."""
+ a = AsyncMock()
+ with patch(
+ "pyadtpulse.PyADTPulseAsync._sync_task.asyncio.sleep", side_effect=a
+ ) as mock:
+ yield mock
+
+
+# not using this currently
+class PulseMockedWebServer:
+ """Mocked Pulse Web Server."""
+
+ def __init__(self, pulse_properties: PulseConnectionProperties):
+ """Initialize the PulseMockedWebServer."""
+ self.app = web.Application()
+ self.logged_in = False
+ self.status_code = 200
+ self.retry_after_header: str | None = None
+ self.pcp = pulse_properties
+ self.uri_mapping: dict[str, list[str]] = {
+ "/": ["signin.html"],
+ self._make_local_prefix(ADT_LOGIN_URI): ["signin.html"],
+ self._make_local_prefix(ADT_LOGOUT_URI): ["signout.html"],
+ self._make_local_prefix(ADT_SUMMARY_URI): ["summary.html"],
+ self._make_local_prefix(ADT_SYSTEM_URI): ["system.html"],
+ self._make_local_prefix(ADT_SYNC_CHECK_URI): ["sync_check.html"],
+ self._make_local_prefix(ADT_ORB_URI): ["orb.html"],
+ self._make_local_prefix(ADT_SYSTEM_SETTINGS): ["system_settings.html"],
+ }
+ super().__init__()
+ self.app.router.add_route("*", "/{path_info:.*}", self.handler)
+
+ def _make_local_prefix(self, uri: str) -> str:
+ return remove_prefix(self.pcp.make_url(uri), "https://")
+
+ async def handler(self, request: web.Request) -> web.Response | web.FileResponse:
+ """Handler for the PulseMockedWebServer."""
+ path = request.path
+
+ # Check if there is a query parameter for retry_after
+ query_params = parse.parse_qs(request.query_string)
+ retry_after_param = query_params.get("retry_after")
+
+ def serve_file(filename: str) -> web.Response | web.FileResponse:
+ try:
+ return web.FileResponse(filename)
+ except FileNotFoundError:
+ return web.Response(text="Not found", status=404)
+
+ # Function to parse the retry_after parameter
+ def parse_retry_after(value) -> int | datetime:
+ try:
+ return int(value)
+ except ValueError:
+ return datetime.fromisoformat(value)
+
+ # Simulate service unavailable for a specific path
+ def handle_service_unavailable(path: str) -> web.Response | None:
+ if path == "/service_unavailable" or self.status_code == 503:
+ retry_after = retry_after_param[0] if retry_after_param else None
+ self.retry_after_header = str(parse_retry_after(retry_after))
+ self.status_code = 503
+ return web.Response(
+ text="Service Unavailable",
+ status=self.status_code,
+ headers=(
+ {"Retry-After": self.retry_after_header}
+ if self.retry_after_header
+ else None
+ ),
+ )
+ return None
+
+ def handle_rate_limit_exceeded(path: str) -> web.Response | None:
+ # Simulate rate limit exceeded for a specific path
+ if path == "/rate_limit_exceeded" or self.status_code == 429:
+ retry_after = retry_after_param[0] if retry_after_param else None
+ self.retry_after_header = str(parse_retry_after(retry_after))
+ self.status_code = 429
+ return web.Response(
+ text="Rate Limit Exceeded",
+ status=self.status_code,
+ headers=(
+ {"Retry-After": self.retry_after_header}
+ if self.retry_after_header
+ else None
+ ),
+ )
+ return None
+
+ def handle_clear_status(path: str) -> web.Response | None:
+ # Simulate clear status for a specific path
+ if path == "/clear_status":
+ self.status_code = 200
+ self.retry_after_header = None
+ return web.Response(text="Default Response", status=self.status_code)
+ return None
+
+ def handle_add_response(
+ path: str, query_string: dict[str, list[str]]
+ ) -> web.Response | None:
+ if path == "/add_response":
+ patched_uri = query_string["uri"][0]
+ if patched_uri in self.uri_mapping:
+ files = query_string.get("files")
+ if files is not None:
+ self.uri_mapping[patched_uri].extend(files)
+ return web.Response(
+ text="Default Response",
+ status=200,
+ headers={"Content-Type": "text/plain"},
+ )
+ return web.Response(text="URI not found", status=404)
+
+ if (retval := handle_rate_limit_exceeded(path)) is not None:
+ return retval
+ if (retval := handle_service_unavailable(path)) is not None:
+ return retval
+ if (retval := handle_clear_status(path)) is not None:
+ return retval
+ if (retval := handle_add_response(path, query_params)) is not None:
+ return retval
+ # do the actual request handling
+ if (
+ path == self._make_local_prefix(ADT_LOGIN_URI)
+ ) and request.method == "POST":
+ self.logged_in = True
+ raise web.HTTPFound(ADT_SUMMARY_URI)
+ if (
+ path == self._make_local_prefix(ADT_LOGOUT_URI)
+ ) and request.method == "POST":
+ self.logged_in = False
+ raise web.HTTPFound(ADT_LOGIN_URI)
+ if not self.logged_in:
+ return serve_file("signin_fail.html")
+ if path == self._make_local_prefix(ADT_DEVICE_URI):
+ device_id = query_params["id"][0]
+ return serve_file(f"device-{device_id}.html")
+ files_to_serve = self.uri_mapping.get(path)
+ if not files_to_serve:
+ return web.Response(text="URI not found", status=404)
+ file_to_serve = files_to_serve[0]
+ if len(files_to_serve) > 1:
+ file_to_serve = self.uri_mapping[path].pop(1)
+ return serve_file(file_to_serve)
+
+
+@pytest.fixture
+@pytest.mark.asyncio
+async def mocked_pulse_server() -> PulseMockedWebServer:
+ """Fixture to create a mocked Pulse server."""
+ pulse_properties = get_mocked_connection_properties()
+ m = PulseMockedWebServer(pulse_properties)
+ return m
diff --git a/example-client.py b/example-client.py
index 56030d4..9bf917b 100755
--- a/example-client.py
+++ b/example-client.py
@@ -7,8 +7,7 @@
import json
import sys
from pprint import pprint
-from time import sleep
-from typing import Dict, Optional
+from time import sleep, time
from pyadtpulse import PyADTPulse
from pyadtpulse.const import (
@@ -18,8 +17,16 @@
API_HOST_CA,
DEFAULT_API_HOST,
)
+from pyadtpulse.exceptions import (
+ PulseAuthenticationError,
+ PulseClientConnectionError,
+ PulseConnectionError,
+ PulseGatewayOfflineError,
+ PulseLoginException,
+ PulseServerConnectionError,
+ PulseServiceTemporarilyUnavailableError,
+)
from pyadtpulse.site import ADTPulseSite
-from pyadtpulse.util import AuthenticationException
USER = "adtpulse_user"
PASSWD = "adtpulse_password"
@@ -33,14 +40,22 @@
RELOGIN_INTERVAL = "relogin_interval"
SERVICE_HOST = "service_host"
POLL_INTERVAL = "poll_interval"
-
-BOOLEAN_PARAMS = {USE_ASYNC, DEBUG_LOCKS, PULSE_DEBUG, TEST_ALARM}
+DETAILED_DEBUG_LOGGING = "detailed_debug_logging"
+
+BOOLEAN_PARAMS = {
+ USE_ASYNC,
+ DEBUG_LOCKS,
+ PULSE_DEBUG,
+ TEST_ALARM,
+ DETAILED_DEBUG_LOGGING,
+}
INT_PARAMS = {SLEEP_INTERVAL, KEEPALIVE_INTERVAL, RELOGIN_INTERVAL}
FLOAT_PARAMS = {POLL_INTERVAL}
# Default values
DEFAULT_USE_ASYNC = True
DEFAULT_DEBUG = False
+DEFAULT_DETAILED_DEBUG_LOGGING = False
DEFAULT_TEST_ALARM = False
DEFAULT_SLEEP_INTERVAL = 5
DEFAULT_DEBUG_LOCKS = False
@@ -96,6 +111,12 @@ def handle_args() -> argparse.Namespace:
default=None,
help="Set True to enable debugging",
)
+ parser.add_argument(
+ f"--{DETAILED_DEBUG_LOGGING}",
+ type=bool,
+ default=None,
+ help="Set True to enable detailed debug logging",
+ )
parser.add_argument(
f"--{TEST_ALARM}",
type=bool,
@@ -162,6 +183,11 @@ def handle_args() -> argparse.Namespace:
args.debug_locks if args.debug_locks is not None else DEFAULT_DEBUG_LOCKS
)
args.debug = args.debug if args.debug is not None else DEFAULT_DEBUG
+ args.detailed_debug_logging = (
+ args.detailed_debug_logging
+ if args.detailed_debug_logging is not None
+ else DEFAULT_DETAILED_DEBUG_LOGGING
+ )
args.test_alarm = (
args.test_alarm if args.test_alarm is not None else DEFAULT_TEST_ALARM
)
@@ -189,7 +215,7 @@ def handle_args() -> argparse.Namespace:
return args
-def load_parameters_from_json(json_file: str) -> Optional[Dict]:
+def load_parameters_from_json(json_file: str) -> dict | None:
"""Load parameters from a JSON file.
Args:
@@ -353,6 +379,7 @@ def sync_example(
poll_interval: float,
keepalive_interval: int,
relogin_interval: int,
+ detailed_debug_logging: bool,
) -> None:
"""Run example of sync pyadtpulse calls.
@@ -365,23 +392,37 @@ def sync_example(
debug_locks: bool: True to enable thread lock debugging
keepalive_interval (int): keepalive interval in minutes
relogin_interval (int): relogin interval in minutes
+ detailed_debug_logging (bool): True to enable detailed debug logging
"""
- try:
- adt = PyADTPulse(
- username,
- password,
- fingerprint,
- debug_locks=debug_locks,
- keepalive_interval=keepalive_interval,
- relogin_interval=relogin_interval,
- )
- except AuthenticationException:
- print("Invalid credentials for ADT Pulse site")
- sys.exit()
- except BaseException as e:
- print("Received exception logging into ADT Pulse site")
- print(f"{e}")
- sys.exit()
+ while True:
+ try:
+ adt = PyADTPulse(
+ username,
+ password,
+ fingerprint,
+ debug_locks=debug_locks,
+ keepalive_interval=keepalive_interval,
+ relogin_interval=relogin_interval,
+ detailed_debug_logging=detailed_debug_logging,
+ )
+ break
+ except PulseLoginException as e:
+ print(f"ADT Pulse login failed with authentication error: {e}")
+ return
+ except (PulseClientConnectionError, PulseServerConnectionError) as e:
+ backoff_interval = e.backoff.get_current_backoff_interval()
+ print(
+ f"ADT Pulse login failed with connection error: {e}, retrying in {backoff_interval} seconds"
+ )
+ sleep(backoff_interval)
+ continue
+ except PulseServiceTemporarilyUnavailableError as e:
+ backoff_interval = e.backoff.expiration_time - time()
+ print(
+ f"ADT Pulse login failed with service unavailable error: {e}, retrying in {backoff_interval} seconds"
+ )
+ sleep(backoff_interval)
+ continue
if not adt.is_connected:
print("Error: Could not log into ADT Pulse site")
@@ -406,15 +447,33 @@ def sync_example(
test_alarm(adt.site, adt)
done = False
+ have_exception = False
while not done:
try:
- print_site(adt.site)
- print("----")
- if not adt.site.zones:
- print("Error, no zones exist, exiting...")
+ if not have_exception:
+ print_site(adt.site)
+ print("----")
+ if not adt.site.zones:
+ print("Error, no zones exist, exiting...")
+ done = True
+ break
+ have_updates = False
+ try:
+ have_updates = adt.updates_exist
+ have_exception = False
+ except PulseGatewayOfflineError:
+ print("ADT Pulse gateway is offline, re-polling")
+ have_exception = True
+ continue
+ except PulseConnectionError as ex:
+ print("ADT Pulse connection error: %s, re-polling", ex.args[0])
+ have_exception = True
+ continue
+ except PulseAuthenticationError as ex:
+ print("ADT Pulse authentication error: %s, exiting...", ex.args[0])
done = True
break
- if adt.updates_exist:
+ if have_updates and not have_exception:
print("Updates exist, refreshing")
# Don't need to explicitly call update() anymore
# Background thread will already have updated
@@ -450,7 +509,8 @@ async def async_test_alarm(adt: PyADTPulse) -> None:
print("Arming stay pending check succeeded")
else:
print(
- f"FAIL: Arming home pending check failed {adt.site.alarm_control_panel} "
+ "FAIL: Arming home pending check failed "
+ f"{adt.site.alarm_control_panel} "
)
await adt.wait_for_update()
if adt.site.alarm_control_panel.is_home:
@@ -459,7 +519,6 @@ async def async_test_alarm(adt: PyADTPulse) -> None:
while not adt.site.alarm_control_panel.is_home:
pprint(f"FAIL: Arm stay value incorrect {adt.site.alarm_control_panel}")
await adt.wait_for_update()
-
print("Testing invalid alarm state change from armed home to armed away")
if await adt.site.async_arm_away():
print(
@@ -498,7 +557,7 @@ async def async_test_alarm(adt: PyADTPulse) -> None:
f"{adt.site.alarm_control_panel}"
)
await adt.wait_for_update()
- print("Test finally succeeded")
+ print("Test finally succeeded")
print("Testing disarming twice")
if await adt.site.async_disarm():
print("Double disarm call succeeded")
@@ -521,7 +580,7 @@ async def async_test_alarm(adt: PyADTPulse) -> None:
f"{adt.site.alarm_control_panel}"
)
await adt.wait_for_update()
- print("Test finally succeeded")
+ print("Test finally succeeded")
else:
print("Disarming failed")
print("Arming alarm away")
@@ -541,7 +600,7 @@ async def async_test_alarm(adt: PyADTPulse) -> None:
"f{adt.site.alarm_control_panel}"
)
await adt.wait_for_update()
- print("Test finally succeeded")
+ print("Test finally succeeded")
else:
print("Arm away failed")
await adt.site.async_disarm()
@@ -557,6 +616,7 @@ async def async_example(
poll_interval: float,
keepalive_interval: int,
relogin_interval: int,
+ detailed_debug_logging: bool,
) -> None:
"""Run example of pytadtpulse async usage.
@@ -569,6 +629,7 @@ async def async_example(
poll_interval (float): polling interval in seconds
keepalive_interval (int): keepalive interval in minutes
relogin_interval (int): relogin interval in minutes
+ detailed_debug_logging (bool): enable detailed debug logging
"""
adt = PyADTPulse(
username,
@@ -578,11 +639,30 @@ async def async_example(
debug_locks=debug_locks,
keepalive_interval=keepalive_interval,
relogin_interval=relogin_interval,
+ detailed_debug_logging=detailed_debug_logging,
)
- if not await adt.async_login():
- print("ADT Pulse login failed")
- return
+ while True:
+ try:
+ await adt.async_login()
+ break
+ except PulseLoginException as e:
+ print(f"ADT Pulse login failed with authentication error: {e}")
+ return
+ except (PulseClientConnectionError, PulseServerConnectionError) as e:
+ backoff_interval = e.backoff.get_current_backoff_interval()
+ print(
+ f"ADT Pulse login failed with connection error: {e}, retrying in {backoff_interval} seconds"
+ )
+ await asyncio.sleep(backoff_interval)
+ continue
+ except PulseServiceTemporarilyUnavailableError as e:
+ backoff_interval = e.backoff.expiration_time - time()
+ print(
+ f"ADT Pulse login failed with service unavailable error: {e}, retrying in {backoff_interval} seconds"
+ )
+ await asyncio.sleep(backoff_interval)
+ continue
if not adt.is_connected:
print("Error: could not log into ADT Pulse site")
@@ -604,20 +684,40 @@ async def async_example(
await async_test_alarm(adt)
done = False
+ have_exception = False
while not done:
try:
- print(f"Gateway online: {adt.site.gateway.is_online}")
- print_site(adt.site)
- print("----")
- if not adt.site.zones:
- print("No zones exist, exiting...")
+ if not have_exception:
+ print(f"Gateway online: {adt.site.gateway.is_online}")
+ print_site(adt.site)
+ print("----")
+ if not adt.site.zones:
+ print("No zones exist, exiting...")
+ done = True
+ break
+ print("\nZones:")
+ pprint(adt.site.zones, compact=True)
+ try:
+ await adt.wait_for_update()
+ have_exception = False
+ except PulseGatewayOfflineError as ex:
+ print(
+ f"ADT Pulse gateway is offline, re-polling in {ex.backoff.get_current_backoff_interval()}"
+ )
+ have_exception = True
+ continue
+ except (PulseClientConnectionError, PulseServerConnectionError) as ex:
+ print(
+ f"ADT Pulse connection error: {ex.args[0]}, re-polling in {ex.backoff.get_current_backoff_interval()}"
+ )
+ have_exception = True
+ continue
+ except PulseAuthenticationError as ex:
+ print("ADT Pulse authentication error: %s, exiting...", ex.args[0])
done = True
break
- print("\nZones:")
- pprint(adt.site.zones, compact=True)
- await adt.wait_for_update()
print("Updates exist, refreshing")
- # no need to call an update method
+ # no need to call an update method
except KeyboardInterrupt:
print("exiting...")
done = True
@@ -656,6 +756,7 @@ def main():
args.poll_interval,
args.keepalive_interval,
args.relogin_interval,
+ args.detailed_debug_logging,
)
else:
asyncio.run(
@@ -668,6 +769,7 @@ def main():
args.poll_interval,
args.keepalive_interval,
args.relogin_interval,
+ args.detailed_debug_logging,
)
)
diff --git a/poetry.lock b/poetry.lock
new file mode 100644
index 0000000..3717558
--- /dev/null
+++ b/poetry.lock
@@ -0,0 +1,1430 @@
+# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand.
+
+[[package]]
+name = "aiohttp"
+version = "3.9.1"
+description = "Async http client/server framework (asyncio)"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "aiohttp-3.9.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e1f80197f8b0b846a8d5cf7b7ec6084493950d0882cc5537fb7b96a69e3c8590"},
+ {file = "aiohttp-3.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c72444d17777865734aa1a4d167794c34b63e5883abb90356a0364a28904e6c0"},
+ {file = "aiohttp-3.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9b05d5cbe9dafcdc733262c3a99ccf63d2f7ce02543620d2bd8db4d4f7a22f83"},
+ {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c4fa235d534b3547184831c624c0b7c1e262cd1de847d95085ec94c16fddcd5"},
+ {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:289ba9ae8e88d0ba16062ecf02dd730b34186ea3b1e7489046fc338bdc3361c4"},
+ {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bff7e2811814fa2271be95ab6e84c9436d027a0e59665de60edf44e529a42c1f"},
+ {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81b77f868814346662c96ab36b875d7814ebf82340d3284a31681085c051320f"},
+ {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b9c7426923bb7bd66d409da46c41e3fb40f5caf679da624439b9eba92043fa6"},
+ {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8d44e7bf06b0c0a70a20f9100af9fcfd7f6d9d3913e37754c12d424179b4e48f"},
+ {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:22698f01ff5653fe66d16ffb7658f582a0ac084d7da1323e39fd9eab326a1f26"},
+ {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ca7ca5abfbfe8d39e653870fbe8d7710be7a857f8a8386fc9de1aae2e02ce7e4"},
+ {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:8d7f98fde213f74561be1d6d3fa353656197f75d4edfbb3d94c9eb9b0fc47f5d"},
+ {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5216b6082c624b55cfe79af5d538e499cd5f5b976820eac31951fb4325974501"},
+ {file = "aiohttp-3.9.1-cp310-cp310-win32.whl", hash = "sha256:0e7ba7ff228c0d9a2cd66194e90f2bca6e0abca810b786901a569c0de082f489"},
+ {file = "aiohttp-3.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:c7e939f1ae428a86e4abbb9a7c4732bf4706048818dfd979e5e2839ce0159f23"},
+ {file = "aiohttp-3.9.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:df9cf74b9bc03d586fc53ba470828d7b77ce51b0582d1d0b5b2fb673c0baa32d"},
+ {file = "aiohttp-3.9.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ecca113f19d5e74048c001934045a2b9368d77b0b17691d905af18bd1c21275e"},
+ {file = "aiohttp-3.9.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8cef8710fb849d97c533f259103f09bac167a008d7131d7b2b0e3a33269185c0"},
+ {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bea94403a21eb94c93386d559bce297381609153e418a3ffc7d6bf772f59cc35"},
+ {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91c742ca59045dce7ba76cab6e223e41d2c70d79e82c284a96411f8645e2afff"},
+ {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6c93b7c2e52061f0925c3382d5cb8980e40f91c989563d3d32ca280069fd6a87"},
+ {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee2527134f95e106cc1653e9ac78846f3a2ec1004cf20ef4e02038035a74544d"},
+ {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11ff168d752cb41e8492817e10fb4f85828f6a0142b9726a30c27c35a1835f01"},
+ {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b8c3a67eb87394386847d188996920f33b01b32155f0a94f36ca0e0c635bf3e3"},
+ {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c7b5d5d64e2a14e35a9240b33b89389e0035e6de8dbb7ffa50d10d8b65c57449"},
+ {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:69985d50a2b6f709412d944ffb2e97d0be154ea90600b7a921f95a87d6f108a2"},
+ {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:c9110c06eaaac7e1f5562caf481f18ccf8f6fdf4c3323feab28a93d34cc646bd"},
+ {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d737e69d193dac7296365a6dcb73bbbf53bb760ab25a3727716bbd42022e8d7a"},
+ {file = "aiohttp-3.9.1-cp311-cp311-win32.whl", hash = "sha256:4ee8caa925aebc1e64e98432d78ea8de67b2272252b0a931d2ac3bd876ad5544"},
+ {file = "aiohttp-3.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:a34086c5cc285be878622e0a6ab897a986a6e8bf5b67ecb377015f06ed316587"},
+ {file = "aiohttp-3.9.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f800164276eec54e0af5c99feb9494c295118fc10a11b997bbb1348ba1a52065"},
+ {file = "aiohttp-3.9.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:500f1c59906cd142d452074f3811614be04819a38ae2b3239a48b82649c08821"},
+ {file = "aiohttp-3.9.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0b0a6a36ed7e164c6df1e18ee47afbd1990ce47cb428739d6c99aaabfaf1b3af"},
+ {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69da0f3ed3496808e8cbc5123a866c41c12c15baaaead96d256477edf168eb57"},
+ {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:176df045597e674fa950bf5ae536be85699e04cea68fa3a616cf75e413737eb5"},
+ {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b796b44111f0cab6bbf66214186e44734b5baab949cb5fb56154142a92989aeb"},
+ {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f27fdaadce22f2ef950fc10dcdf8048407c3b42b73779e48a4e76b3c35bca26c"},
+ {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bcb6532b9814ea7c5a6a3299747c49de30e84472fa72821b07f5a9818bce0f66"},
+ {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:54631fb69a6e44b2ba522f7c22a6fb2667a02fd97d636048478db2fd8c4e98fe"},
+ {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4b4c452d0190c5a820d3f5c0f3cd8a28ace48c54053e24da9d6041bf81113183"},
+ {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:cae4c0c2ca800c793cae07ef3d40794625471040a87e1ba392039639ad61ab5b"},
+ {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:565760d6812b8d78d416c3c7cfdf5362fbe0d0d25b82fed75d0d29e18d7fc30f"},
+ {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:54311eb54f3a0c45efb9ed0d0a8f43d1bc6060d773f6973efd90037a51cd0a3f"},
+ {file = "aiohttp-3.9.1-cp312-cp312-win32.whl", hash = "sha256:85c3e3c9cb1d480e0b9a64c658cd66b3cfb8e721636ab8b0e746e2d79a7a9eed"},
+ {file = "aiohttp-3.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:11cb254e397a82efb1805d12561e80124928e04e9c4483587ce7390b3866d213"},
+ {file = "aiohttp-3.9.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8a22a34bc594d9d24621091d1b91511001a7eea91d6652ea495ce06e27381f70"},
+ {file = "aiohttp-3.9.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:598db66eaf2e04aa0c8900a63b0101fdc5e6b8a7ddd805c56d86efb54eb66672"},
+ {file = "aiohttp-3.9.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2c9376e2b09895c8ca8b95362283365eb5c03bdc8428ade80a864160605715f1"},
+ {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41473de252e1797c2d2293804e389a6d6986ef37cbb4a25208de537ae32141dd"},
+ {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9c5857612c9813796960c00767645cb5da815af16dafb32d70c72a8390bbf690"},
+ {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ffcd828e37dc219a72c9012ec44ad2e7e3066bec6ff3aaa19e7d435dbf4032ca"},
+ {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:219a16763dc0294842188ac8a12262b5671817042b35d45e44fd0a697d8c8361"},
+ {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f694dc8a6a3112059258a725a4ebe9acac5fe62f11c77ac4dcf896edfa78ca28"},
+ {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:bcc0ea8d5b74a41b621ad4a13d96c36079c81628ccc0b30cfb1603e3dfa3a014"},
+ {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:90ec72d231169b4b8d6085be13023ece8fa9b1bb495e4398d847e25218e0f431"},
+ {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:cf2a0ac0615842b849f40c4d7f304986a242f1e68286dbf3bd7a835e4f83acfd"},
+ {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:0e49b08eafa4f5707ecfb321ab9592717a319e37938e301d462f79b4e860c32a"},
+ {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2c59e0076ea31c08553e868cec02d22191c086f00b44610f8ab7363a11a5d9d8"},
+ {file = "aiohttp-3.9.1-cp38-cp38-win32.whl", hash = "sha256:4831df72b053b1eed31eb00a2e1aff6896fb4485301d4ccb208cac264b648db4"},
+ {file = "aiohttp-3.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:3135713c5562731ee18f58d3ad1bf41e1d8883eb68b363f2ffde5b2ea4b84cc7"},
+ {file = "aiohttp-3.9.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cfeadf42840c1e870dc2042a232a8748e75a36b52d78968cda6736de55582766"},
+ {file = "aiohttp-3.9.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:70907533db712f7aa791effb38efa96f044ce3d4e850e2d7691abd759f4f0ae0"},
+ {file = "aiohttp-3.9.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cdefe289681507187e375a5064c7599f52c40343a8701761c802c1853a504558"},
+ {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7481f581251bb5558ba9f635db70908819caa221fc79ee52a7f58392778c636"},
+ {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:49f0c1b3c2842556e5de35f122fc0f0b721334ceb6e78c3719693364d4af8499"},
+ {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d406b01a9f5a7e232d1b0d161b40c05275ffbcbd772dc18c1d5a570961a1ca4"},
+ {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d8e4450e7fe24d86e86b23cc209e0023177b6d59502e33807b732d2deb6975f"},
+ {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c0266cd6f005e99f3f51e583012de2778e65af6b73860038b968a0a8888487a"},
+ {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab221850108a4a063c5b8a70f00dd7a1975e5a1713f87f4ab26a46e5feac5a0e"},
+ {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c88a15f272a0ad3d7773cf3a37cc7b7d077cbfc8e331675cf1346e849d97a4e5"},
+ {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:237533179d9747080bcaad4d02083ce295c0d2eab3e9e8ce103411a4312991a0"},
+ {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:02ab6006ec3c3463b528374c4cdce86434e7b89ad355e7bf29e2f16b46c7dd6f"},
+ {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04fa38875e53eb7e354ece1607b1d2fdee2d175ea4e4d745f6ec9f751fe20c7c"},
+ {file = "aiohttp-3.9.1-cp39-cp39-win32.whl", hash = "sha256:82eefaf1a996060602f3cc1112d93ba8b201dbf5d8fd9611227de2003dddb3b7"},
+ {file = "aiohttp-3.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:9b05d33ff8e6b269e30a7957bd3244ffbce2a7a35a81b81c382629b80af1a8bf"},
+ {file = "aiohttp-3.9.1.tar.gz", hash = "sha256:8fc49a87ac269d4529da45871e2ffb6874e87779c3d0e2ccd813c0899221239d"},
+]
+
+[package.dependencies]
+aiosignal = ">=1.1.2"
+attrs = ">=17.3.0"
+frozenlist = ">=1.1.1"
+multidict = ">=4.5,<7.0"
+yarl = ">=1.0,<2.0"
+
+[package.extras]
+speedups = ["Brotli", "aiodns", "brotlicffi"]
+
+[[package]]
+name = "aioresponses"
+version = "0.7.6"
+description = "Mock out requests made by ClientSession from aiohttp package"
+optional = false
+python-versions = "*"
+files = [
+ {file = "aioresponses-0.7.6-py2.py3-none-any.whl", hash = "sha256:d2c26defbb9b440ea2685ec132e90700907fd10bcca3e85ec2f157219f0d26f7"},
+ {file = "aioresponses-0.7.6.tar.gz", hash = "sha256:f795d9dbda2d61774840e7e32f5366f45752d1adc1b74c9362afd017296c7ee1"},
+]
+
+[package.dependencies]
+aiohttp = ">=3.3.0,<4.0.0"
+
+[[package]]
+name = "aiosignal"
+version = "1.3.1"
+description = "aiosignal: a list of registered asynchronous callbacks"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"},
+ {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"},
+]
+
+[package.dependencies]
+frozenlist = ">=1.1.0"
+
+[[package]]
+name = "astroid"
+version = "3.0.1"
+description = "An abstract syntax tree for Python with inference support."
+optional = false
+python-versions = ">=3.8.0"
+files = [
+ {file = "astroid-3.0.1-py3-none-any.whl", hash = "sha256:7d5895c9825e18079c5aeac0572bc2e4c83205c95d416e0b4fee8bc361d2d9ca"},
+ {file = "astroid-3.0.1.tar.gz", hash = "sha256:86b0bb7d7da0be1a7c4aedb7974e391b32d4ed89e33de6ed6902b4b15c97577e"},
+]
+
+[[package]]
+name = "attrs"
+version = "23.1.0"
+description = "Classes Without Boilerplate"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"},
+ {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"},
+]
+
+[package.extras]
+cov = ["attrs[tests]", "coverage[toml] (>=5.3)"]
+dev = ["attrs[docs,tests]", "pre-commit"]
+docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"]
+tests = ["attrs[tests-no-zope]", "zope-interface"]
+tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
+
+[[package]]
+name = "beautifulsoup4"
+version = "4.12.2"
+description = "Screen-scraping library"
+optional = false
+python-versions = ">=3.6.0"
+files = [
+ {file = "beautifulsoup4-4.12.2-py3-none-any.whl", hash = "sha256:bd2520ca0d9d7d12694a53d44ac482d181b4ec1888909b035a3dbf40d0f57d4a"},
+ {file = "beautifulsoup4-4.12.2.tar.gz", hash = "sha256:492bbc69dca35d12daac71c4db1bfff0c876c00ef4a2ffacce226d4638eb72da"},
+]
+
+[package.dependencies]
+soupsieve = ">1.2"
+
+[package.extras]
+html5lib = ["html5lib"]
+lxml = ["lxml"]
+
+[[package]]
+name = "black"
+version = "23.10.1"
+description = "The uncompromising code formatter."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "black-23.10.1-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:ec3f8e6234c4e46ff9e16d9ae96f4ef69fa328bb4ad08198c8cee45bb1f08c69"},
+ {file = "black-23.10.1-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:1b917a2aa020ca600483a7b340c165970b26e9029067f019e3755b56e8dd5916"},
+ {file = "black-23.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c74de4c77b849e6359c6f01987e94873c707098322b91490d24296f66d067dc"},
+ {file = "black-23.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:7b4d10b0f016616a0d93d24a448100adf1699712fb7a4efd0e2c32bbb219b173"},
+ {file = "black-23.10.1-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:b15b75fc53a2fbcac8a87d3e20f69874d161beef13954747e053bca7a1ce53a0"},
+ {file = "black-23.10.1-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:e293e4c2f4a992b980032bbd62df07c1bcff82d6964d6c9496f2cd726e246ace"},
+ {file = "black-23.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d56124b7a61d092cb52cce34182a5280e160e6aff3137172a68c2c2c4b76bcb"},
+ {file = "black-23.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:3f157a8945a7b2d424da3335f7ace89c14a3b0625e6593d21139c2d8214d55ce"},
+ {file = "black-23.10.1-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:cfcce6f0a384d0da692119f2d72d79ed07c7159879d0bb1bb32d2e443382bf3a"},
+ {file = "black-23.10.1-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:33d40f5b06be80c1bbce17b173cda17994fbad096ce60eb22054da021bf933d1"},
+ {file = "black-23.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:840015166dbdfbc47992871325799fd2dc0dcf9395e401ada6d88fe11498abad"},
+ {file = "black-23.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:037e9b4664cafda5f025a1728c50a9e9aedb99a759c89f760bd83730e76ba884"},
+ {file = "black-23.10.1-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:7cb5936e686e782fddb1c73f8aa6f459e1ad38a6a7b0e54b403f1f05a1507ee9"},
+ {file = "black-23.10.1-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:7670242e90dc129c539e9ca17665e39a146a761e681805c54fbd86015c7c84f7"},
+ {file = "black-23.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ed45ac9a613fb52dad3b61c8dea2ec9510bf3108d4db88422bacc7d1ba1243d"},
+ {file = "black-23.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:6d23d7822140e3fef190734216cefb262521789367fbdc0b3f22af6744058982"},
+ {file = "black-23.10.1-py3-none-any.whl", hash = "sha256:d431e6739f727bb2e0495df64a6c7a5310758e87505f5f8cde9ff6c0f2d7e4fe"},
+ {file = "black-23.10.1.tar.gz", hash = "sha256:1f8ce316753428ff68749c65a5f7844631aa18c8679dfd3ca9dc1a289979c258"},
+]
+
+[package.dependencies]
+click = ">=8.0.0"
+mypy-extensions = ">=0.4.3"
+packaging = ">=22.0"
+pathspec = ">=0.9.0"
+platformdirs = ">=2"
+
+[package.extras]
+colorama = ["colorama (>=0.4.3)"]
+d = ["aiohttp (>=3.7.4)"]
+jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
+uvloop = ["uvloop (>=0.15.2)"]
+
+[[package]]
+name = "bs4"
+version = "0.0.1"
+description = "Dummy package for Beautiful Soup"
+optional = false
+python-versions = "*"
+files = [
+ {file = "bs4-0.0.1.tar.gz", hash = "sha256:36ecea1fd7cc5c0c6e4a1ff075df26d50da647b75376626cc186e2212886dd3a"},
+]
+
+[package.dependencies]
+beautifulsoup4 = "*"
+
+[[package]]
+name = "cfgv"
+version = "3.4.0"
+description = "Validate configuration and produce human readable error messages."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"},
+ {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"},
+]
+
+[[package]]
+name = "click"
+version = "8.1.7"
+description = "Composable command line interface toolkit"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"},
+ {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"},
+]
+
+[package.dependencies]
+colorama = {version = "*", markers = "platform_system == \"Windows\""}
+
+[[package]]
+name = "colorama"
+version = "0.4.6"
+description = "Cross-platform colored terminal text."
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
+files = [
+ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
+ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
+]
+
+[[package]]
+name = "coverage"
+version = "7.3.4"
+description = "Code coverage measurement for Python"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "coverage-7.3.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:aff2bd3d585969cc4486bfc69655e862028b689404563e6b549e6a8244f226df"},
+ {file = "coverage-7.3.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4353923f38d752ecfbd3f1f20bf7a3546993ae5ecd7c07fd2f25d40b4e54571"},
+ {file = "coverage-7.3.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea473c37872f0159294f7073f3fa72f68b03a129799f3533b2bb44d5e9fa4f82"},
+ {file = "coverage-7.3.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5214362abf26e254d749fc0c18af4c57b532a4bfde1a057565616dd3b8d7cc94"},
+ {file = "coverage-7.3.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f99b7d3f7a7adfa3d11e3a48d1a91bb65739555dd6a0d3fa68aa5852d962e5b1"},
+ {file = "coverage-7.3.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:74397a1263275bea9d736572d4cf338efaade2de9ff759f9c26bcdceb383bb49"},
+ {file = "coverage-7.3.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f154bd866318185ef5865ace5be3ac047b6d1cc0aeecf53bf83fe846f4384d5d"},
+ {file = "coverage-7.3.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e0d84099ea7cba9ff467f9c6f747e3fc3906e2aadac1ce7b41add72e8d0a3712"},
+ {file = "coverage-7.3.4-cp310-cp310-win32.whl", hash = "sha256:3f477fb8a56e0c603587b8278d9dbd32e54bcc2922d62405f65574bd76eba78a"},
+ {file = "coverage-7.3.4-cp310-cp310-win_amd64.whl", hash = "sha256:c75738ce13d257efbb6633a049fb2ed8e87e2e6c2e906c52d1093a4d08d67c6b"},
+ {file = "coverage-7.3.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:997aa14b3e014339d8101b9886063c5d06238848905d9ad6c6eabe533440a9a7"},
+ {file = "coverage-7.3.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8a9c5bc5db3eb4cd55ecb8397d8e9b70247904f8eca718cc53c12dcc98e59fc8"},
+ {file = "coverage-7.3.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27ee94f088397d1feea3cb524e4313ff0410ead7d968029ecc4bc5a7e1d34fbf"},
+ {file = "coverage-7.3.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ce03e25e18dd9bf44723e83bc202114817f3367789052dc9e5b5c79f40cf59d"},
+ {file = "coverage-7.3.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85072e99474d894e5df582faec04abe137b28972d5e466999bc64fc37f564a03"},
+ {file = "coverage-7.3.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a877810ef918d0d345b783fc569608804f3ed2507bf32f14f652e4eaf5d8f8d0"},
+ {file = "coverage-7.3.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:9ac17b94ab4ca66cf803f2b22d47e392f0977f9da838bf71d1f0db6c32893cb9"},
+ {file = "coverage-7.3.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:36d75ef2acab74dc948d0b537ef021306796da551e8ac8b467810911000af66a"},
+ {file = "coverage-7.3.4-cp311-cp311-win32.whl", hash = "sha256:47ee56c2cd445ea35a8cc3ad5c8134cb9bece3a5cb50bb8265514208d0a65928"},
+ {file = "coverage-7.3.4-cp311-cp311-win_amd64.whl", hash = "sha256:11ab62d0ce5d9324915726f611f511a761efcca970bd49d876cf831b4de65be5"},
+ {file = "coverage-7.3.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:33e63c578f4acce1b6cd292a66bc30164495010f1091d4b7529d014845cd9bee"},
+ {file = "coverage-7.3.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:782693b817218169bfeb9b9ba7f4a9f242764e180ac9589b45112571f32a0ba6"},
+ {file = "coverage-7.3.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c4277ddaad9293454da19121c59f2d850f16bcb27f71f89a5c4836906eb35ef"},
+ {file = "coverage-7.3.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3d892a19ae24b9801771a5a989fb3e850bd1ad2e2b6e83e949c65e8f37bc67a1"},
+ {file = "coverage-7.3.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3024ec1b3a221bd10b5d87337d0373c2bcaf7afd86d42081afe39b3e1820323b"},
+ {file = "coverage-7.3.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a1c3e9d2bbd6f3f79cfecd6f20854f4dc0c6e0ec317df2b265266d0dc06535f1"},
+ {file = "coverage-7.3.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e91029d7f151d8bf5ab7d8bfe2c3dbefd239759d642b211a677bc0709c9fdb96"},
+ {file = "coverage-7.3.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:6879fe41c60080aa4bb59703a526c54e0412b77e649a0d06a61782ecf0853ee1"},
+ {file = "coverage-7.3.4-cp312-cp312-win32.whl", hash = "sha256:fd2f8a641f8f193968afdc8fd1697e602e199931012b574194052d132a79be13"},
+ {file = "coverage-7.3.4-cp312-cp312-win_amd64.whl", hash = "sha256:d1d0ce6c6947a3a4aa5479bebceff2c807b9f3b529b637e2b33dea4468d75fc7"},
+ {file = "coverage-7.3.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:36797b3625d1da885b369bdaaa3b0d9fb8865caed3c2b8230afaa6005434aa2f"},
+ {file = "coverage-7.3.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bfed0ec4b419fbc807dec417c401499ea869436910e1ca524cfb4f81cf3f60e7"},
+ {file = "coverage-7.3.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f97ff5a9fc2ca47f3383482858dd2cb8ddbf7514427eecf5aa5f7992d0571429"},
+ {file = "coverage-7.3.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:607b6c6b35aa49defaebf4526729bd5238bc36fe3ef1a417d9839e1d96ee1e4c"},
+ {file = "coverage-7.3.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8e258dcc335055ab59fe79f1dec217d9fb0cdace103d6b5c6df6b75915e7959"},
+ {file = "coverage-7.3.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a02ac7c51819702b384fea5ee033a7c202f732a2a2f1fe6c41e3d4019828c8d3"},
+ {file = "coverage-7.3.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b710869a15b8caf02e31d16487a931dbe78335462a122c8603bb9bd401ff6fb2"},
+ {file = "coverage-7.3.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c6a23ae9348a7a92e7f750f9b7e828448e428e99c24616dec93a0720342f241d"},
+ {file = "coverage-7.3.4-cp38-cp38-win32.whl", hash = "sha256:758ebaf74578b73f727acc4e8ab4b16ab6f22a5ffd7dd254e5946aba42a4ce76"},
+ {file = "coverage-7.3.4-cp38-cp38-win_amd64.whl", hash = "sha256:309ed6a559bc942b7cc721f2976326efbfe81fc2b8f601c722bff927328507dc"},
+ {file = "coverage-7.3.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:aefbb29dc56317a4fcb2f3857d5bce9b881038ed7e5aa5d3bcab25bd23f57328"},
+ {file = "coverage-7.3.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:183c16173a70caf92e2dfcfe7c7a576de6fa9edc4119b8e13f91db7ca33a7923"},
+ {file = "coverage-7.3.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a4184dcbe4f98d86470273e758f1d24191ca095412e4335ff27b417291f5964"},
+ {file = "coverage-7.3.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93698ac0995516ccdca55342599a1463ed2e2d8942316da31686d4d614597ef9"},
+ {file = "coverage-7.3.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb220b3596358a86361139edce40d97da7458412d412e1e10c8e1970ee8c09ab"},
+ {file = "coverage-7.3.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d5b14abde6f8d969e6b9dd8c7a013d9a2b52af1235fe7bebef25ad5c8f47fa18"},
+ {file = "coverage-7.3.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:610afaf929dc0e09a5eef6981edb6a57a46b7eceff151947b836d869d6d567c1"},
+ {file = "coverage-7.3.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d6ed790728fb71e6b8247bd28e77e99d0c276dff952389b5388169b8ca7b1c28"},
+ {file = "coverage-7.3.4-cp39-cp39-win32.whl", hash = "sha256:c15fdfb141fcf6a900e68bfa35689e1256a670db32b96e7a931cab4a0e1600e5"},
+ {file = "coverage-7.3.4-cp39-cp39-win_amd64.whl", hash = "sha256:38d0b307c4d99a7aca4e00cad4311b7c51b7ac38fb7dea2abe0d182dd4008e05"},
+ {file = "coverage-7.3.4-pp38.pp39.pp310-none-any.whl", hash = "sha256:b1e0f25ae99cf247abfb3f0fac7ae25739e4cd96bf1afa3537827c576b4847e5"},
+ {file = "coverage-7.3.4.tar.gz", hash = "sha256:020d56d2da5bc22a0e00a5b0d54597ee91ad72446fa4cf1b97c35022f6b6dbf0"},
+]
+
+[package.extras]
+toml = ["tomli"]
+
+[[package]]
+name = "dill"
+version = "0.3.7"
+description = "serialize all of Python"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "dill-0.3.7-py3-none-any.whl", hash = "sha256:76b122c08ef4ce2eedcd4d1abd8e641114bfc6c2867f49f3c41facf65bf19f5e"},
+ {file = "dill-0.3.7.tar.gz", hash = "sha256:cc1c8b182eb3013e24bd475ff2e9295af86c1a38eb1aff128dac8962a9ce3c03"},
+]
+
+[package.extras]
+graph = ["objgraph (>=1.7.2)"]
+
+[[package]]
+name = "distlib"
+version = "0.3.7"
+description = "Distribution utilities"
+optional = false
+python-versions = "*"
+files = [
+ {file = "distlib-0.3.7-py2.py3-none-any.whl", hash = "sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057"},
+ {file = "distlib-0.3.7.tar.gz", hash = "sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8"},
+]
+
+[[package]]
+name = "execnet"
+version = "2.0.2"
+description = "execnet: rapid multi-Python deployment"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "execnet-2.0.2-py3-none-any.whl", hash = "sha256:88256416ae766bc9e8895c76a87928c0012183da3cc4fc18016e6f050e025f41"},
+ {file = "execnet-2.0.2.tar.gz", hash = "sha256:cc59bc4423742fd71ad227122eb0dd44db51efb3dc4095b45ac9a08c770096af"},
+]
+
+[package.extras]
+testing = ["hatch", "pre-commit", "pytest", "tox"]
+
+[[package]]
+name = "filelock"
+version = "3.13.1"
+description = "A platform independent file lock."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"},
+ {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"},
+]
+
+[package.extras]
+docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"]
+testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"]
+typing = ["typing-extensions (>=4.8)"]
+
+[[package]]
+name = "freezegun"
+version = "1.2.2"
+description = "Let your Python tests travel through time"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "freezegun-1.2.2-py3-none-any.whl", hash = "sha256:ea1b963b993cb9ea195adbd893a48d573fda951b0da64f60883d7e988b606c9f"},
+ {file = "freezegun-1.2.2.tar.gz", hash = "sha256:cd22d1ba06941384410cd967d8a99d5ae2442f57dfafeff2fda5de8dc5c05446"},
+]
+
+[package.dependencies]
+python-dateutil = ">=2.7"
+
+[[package]]
+name = "frozenlist"
+version = "1.4.0"
+description = "A list-like structure which implements collections.abc.MutableSequence"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "frozenlist-1.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:764226ceef3125e53ea2cb275000e309c0aa5464d43bd72abd661e27fffc26ab"},
+ {file = "frozenlist-1.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d6484756b12f40003c6128bfcc3fa9f0d49a687e171186c2d85ec82e3758c559"},
+ {file = "frozenlist-1.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9ac08e601308e41eb533f232dbf6b7e4cea762f9f84f6357136eed926c15d12c"},
+ {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d081f13b095d74b67d550de04df1c756831f3b83dc9881c38985834387487f1b"},
+ {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:71932b597f9895f011f47f17d6428252fc728ba2ae6024e13c3398a087c2cdea"},
+ {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:981b9ab5a0a3178ff413bca62526bb784249421c24ad7381e39d67981be2c326"},
+ {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e41f3de4df3e80de75845d3e743b3f1c4c8613c3997a912dbf0229fc61a8b963"},
+ {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6918d49b1f90821e93069682c06ffde41829c346c66b721e65a5c62b4bab0300"},
+ {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e5c8764c7829343d919cc2dfc587a8db01c4f70a4ebbc49abde5d4b158b007b"},
+ {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8d0edd6b1c7fb94922bf569c9b092ee187a83f03fb1a63076e7774b60f9481a8"},
+ {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e29cda763f752553fa14c68fb2195150bfab22b352572cb36c43c47bedba70eb"},
+ {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:0c7c1b47859ee2cac3846fde1c1dc0f15da6cec5a0e5c72d101e0f83dcb67ff9"},
+ {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:901289d524fdd571be1c7be054f48b1f88ce8dddcbdf1ec698b27d4b8b9e5d62"},
+ {file = "frozenlist-1.4.0-cp310-cp310-win32.whl", hash = "sha256:1a0848b52815006ea6596c395f87449f693dc419061cc21e970f139d466dc0a0"},
+ {file = "frozenlist-1.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:b206646d176a007466358aa21d85cd8600a415c67c9bd15403336c331a10d956"},
+ {file = "frozenlist-1.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:de343e75f40e972bae1ef6090267f8260c1446a1695e77096db6cfa25e759a95"},
+ {file = "frozenlist-1.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ad2a9eb6d9839ae241701d0918f54c51365a51407fd80f6b8289e2dfca977cc3"},
+ {file = "frozenlist-1.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bd7bd3b3830247580de99c99ea2a01416dfc3c34471ca1298bccabf86d0ff4dc"},
+ {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bdf1847068c362f16b353163391210269e4f0569a3c166bc6a9f74ccbfc7e839"},
+ {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38461d02d66de17455072c9ba981d35f1d2a73024bee7790ac2f9e361ef1cd0c"},
+ {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5a32087d720c608f42caed0ef36d2b3ea61a9d09ee59a5142d6070da9041b8f"},
+ {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dd65632acaf0d47608190a71bfe46b209719bf2beb59507db08ccdbe712f969b"},
+ {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261b9f5d17cac914531331ff1b1d452125bf5daa05faf73b71d935485b0c510b"},
+ {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b89ac9768b82205936771f8d2eb3ce88503b1556324c9f903e7156669f521472"},
+ {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:008eb8b31b3ea6896da16c38c1b136cb9fec9e249e77f6211d479db79a4eaf01"},
+ {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e74b0506fa5aa5598ac6a975a12aa8928cbb58e1f5ac8360792ef15de1aa848f"},
+ {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:490132667476f6781b4c9458298b0c1cddf237488abd228b0b3650e5ecba7467"},
+ {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:76d4711f6f6d08551a7e9ef28c722f4a50dd0fc204c56b4bcd95c6cc05ce6fbb"},
+ {file = "frozenlist-1.4.0-cp311-cp311-win32.whl", hash = "sha256:a02eb8ab2b8f200179b5f62b59757685ae9987996ae549ccf30f983f40602431"},
+ {file = "frozenlist-1.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:515e1abc578dd3b275d6a5114030b1330ba044ffba03f94091842852f806f1c1"},
+ {file = "frozenlist-1.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f0ed05f5079c708fe74bf9027e95125334b6978bf07fd5ab923e9e55e5fbb9d3"},
+ {file = "frozenlist-1.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ca265542ca427bf97aed183c1676e2a9c66942e822b14dc6e5f42e038f92a503"},
+ {file = "frozenlist-1.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:491e014f5c43656da08958808588cc6c016847b4360e327a62cb308c791bd2d9"},
+ {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17ae5cd0f333f94f2e03aaf140bb762c64783935cc764ff9c82dff626089bebf"},
+ {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e78fb68cf9c1a6aa4a9a12e960a5c9dfbdb89b3695197aa7064705662515de2"},
+ {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5655a942f5f5d2c9ed93d72148226d75369b4f6952680211972a33e59b1dfdc"},
+ {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c11b0746f5d946fecf750428a95f3e9ebe792c1ee3b1e96eeba145dc631a9672"},
+ {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e66d2a64d44d50d2543405fb183a21f76b3b5fd16f130f5c99187c3fb4e64919"},
+ {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:88f7bc0fcca81f985f78dd0fa68d2c75abf8272b1f5c323ea4a01a4d7a614efc"},
+ {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5833593c25ac59ede40ed4de6d67eb42928cca97f26feea219f21d0ed0959b79"},
+ {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:fec520865f42e5c7f050c2a79038897b1c7d1595e907a9e08e3353293ffc948e"},
+ {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:b826d97e4276750beca7c8f0f1a4938892697a6bcd8ec8217b3312dad6982781"},
+ {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ceb6ec0a10c65540421e20ebd29083c50e6d1143278746a4ef6bcf6153171eb8"},
+ {file = "frozenlist-1.4.0-cp38-cp38-win32.whl", hash = "sha256:2b8bcf994563466db019fab287ff390fffbfdb4f905fc77bc1c1d604b1c689cc"},
+ {file = "frozenlist-1.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:a6c8097e01886188e5be3e6b14e94ab365f384736aa1fca6a0b9e35bd4a30bc7"},
+ {file = "frozenlist-1.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6c38721585f285203e4b4132a352eb3daa19121a035f3182e08e437cface44bf"},
+ {file = "frozenlist-1.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a0c6da9aee33ff0b1a451e867da0c1f47408112b3391dd43133838339e410963"},
+ {file = "frozenlist-1.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93ea75c050c5bb3d98016b4ba2497851eadf0ac154d88a67d7a6816206f6fa7f"},
+ {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f61e2dc5ad442c52b4887f1fdc112f97caeff4d9e6ebe78879364ac59f1663e1"},
+ {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa384489fefeb62321b238e64c07ef48398fe80f9e1e6afeff22e140e0850eef"},
+ {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10ff5faaa22786315ef57097a279b833ecab1a0bfb07d604c9cbb1c4cdc2ed87"},
+ {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:007df07a6e3eb3e33e9a1fe6a9db7af152bbd8a185f9aaa6ece10a3529e3e1c6"},
+ {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f4f399d28478d1f604c2ff9119907af9726aed73680e5ed1ca634d377abb087"},
+ {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c5374b80521d3d3f2ec5572e05adc94601985cc526fb276d0c8574a6d749f1b3"},
+ {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ce31ae3e19f3c902de379cf1323d90c649425b86de7bbdf82871b8a2a0615f3d"},
+ {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7211ef110a9194b6042449431e08c4d80c0481e5891e58d429df5899690511c2"},
+ {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:556de4430ce324c836789fa4560ca62d1591d2538b8ceb0b4f68fb7b2384a27a"},
+ {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7645a8e814a3ee34a89c4a372011dcd817964ce8cb273c8ed6119d706e9613e3"},
+ {file = "frozenlist-1.4.0-cp39-cp39-win32.whl", hash = "sha256:19488c57c12d4e8095a922f328df3f179c820c212940a498623ed39160bc3c2f"},
+ {file = "frozenlist-1.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:6221d84d463fb110bdd7619b69cb43878a11d51cbb9394ae3105d082d5199167"},
+ {file = "frozenlist-1.4.0.tar.gz", hash = "sha256:09163bdf0b2907454042edb19f887c6d33806adc71fbd54afc14908bfdc22251"},
+]
+
+[[package]]
+name = "identify"
+version = "2.5.31"
+description = "File identification library for Python"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "identify-2.5.31-py2.py3-none-any.whl", hash = "sha256:90199cb9e7bd3c5407a9b7e81b4abec4bb9d249991c79439ec8af740afc6293d"},
+ {file = "identify-2.5.31.tar.gz", hash = "sha256:7736b3c7a28233637e3c36550646fc6389bedd74ae84cb788200cc8e2dd60b75"},
+]
+
+[package.extras]
+license = ["ukkonen"]
+
+[[package]]
+name = "idna"
+version = "3.4"
+description = "Internationalized Domain Names in Applications (IDNA)"
+optional = false
+python-versions = ">=3.5"
+files = [
+ {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"},
+ {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"},
+]
+
+[[package]]
+name = "iniconfig"
+version = "2.0.0"
+description = "brain-dead simple config-ini parsing"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"},
+ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
+]
+
+[[package]]
+name = "isort"
+version = "5.12.0"
+description = "A Python utility / library to sort Python imports."
+optional = false
+python-versions = ">=3.8.0"
+files = [
+ {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"},
+ {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"},
+]
+
+[package.extras]
+colors = ["colorama (>=0.4.3)"]
+pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"]
+plugins = ["setuptools"]
+requirements-deprecated-finder = ["pip-api", "pipreqs"]
+
+[[package]]
+name = "libcst"
+version = "1.1.0"
+description = "A concrete syntax tree with AST-like properties for Python 3.5, 3.6, 3.7, 3.8, 3.9, and 3.10 programs."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "libcst-1.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:63f75656fd733dc20354c46253fde3cf155613e37643c3eaf6f8818e95b7a3d1"},
+ {file = "libcst-1.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8ae11eb1ea55a16dc0cdc61b41b29ac347da70fec14cc4381248e141ee2fbe6c"},
+ {file = "libcst-1.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4bc745d0c06420fe2644c28d6ddccea9474fb68a2135904043676deb4fa1e6bc"},
+ {file = "libcst-1.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c1f2da45f1c45634090fd8672c15e0159fdc46853336686959b2d093b6e10fa"},
+ {file = "libcst-1.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:003e5e83a12eed23542c4ea20fdc8de830887cc03662432bb36f84f8c4841b81"},
+ {file = "libcst-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:3ebbb9732ae3cc4ae7a0e97890bed0a57c11d6df28790c2b9c869f7da653c7c7"},
+ {file = "libcst-1.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d68c34e3038d3d1d6324eb47744cbf13f2c65e1214cf49db6ff2a6603c1cd838"},
+ {file = "libcst-1.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9dffa1795c2804d183efb01c0f1efd20a7831db6a21a0311edf90b4100d67436"},
+ {file = "libcst-1.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc9b6ac36d7ec9db2f053014ea488086ca2ed9c322be104fbe2c71ca759da4bb"},
+ {file = "libcst-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b7a38ec4c1c009ac39027d51558b52851fb9234669ba5ba62283185963a31c"},
+ {file = "libcst-1.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5297a16e575be8173185e936b7765c89a3ca69d4ae217a4af161814a0f9745a7"},
+ {file = "libcst-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:7ccaf53925f81118aeaadb068a911fac8abaff608817d7343da280616a5ca9c1"},
+ {file = "libcst-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:75816647736f7e09c6120bdbf408456f99b248d6272277eed9a58cf50fb8bc7d"},
+ {file = "libcst-1.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c8f26250f87ca849a7303ed7a4fd6b2c7ac4dec16b7d7e68ca6a476d7c9bfcdb"},
+ {file = "libcst-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d37326bd6f379c64190a28947a586b949de3a76be00176b0732c8ee87d67ebe"},
+ {file = "libcst-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3d8cf974cfa2487b28f23f56c4bff90d550ef16505e58b0dca0493d5293784b"},
+ {file = "libcst-1.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82d1271403509b0a4ee6ff7917c2d33b5a015f44d1e208abb1da06ba93b2a378"},
+ {file = "libcst-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:bca1841693941fdd18371824bb19a9702d5784cd347cb8231317dbdc7062c5bc"},
+ {file = "libcst-1.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f36f592e035ef84f312a12b75989dde6a5f6767fe99146cdae6a9ee9aff40dd0"},
+ {file = "libcst-1.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f561c9a84eca18be92f4ad90aa9bd873111efbea995449301719a1a7805dbc5c"},
+ {file = "libcst-1.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97fbc73c87e9040e148881041fd5ffa2a6ebf11f64b4ccb5b52e574b95df1a15"},
+ {file = "libcst-1.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99fdc1929703fd9e7408aed2e03f58701c5280b05c8911753a8d8619f7dfdda5"},
+ {file = "libcst-1.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0bf69cbbab5016d938aac4d3ae70ba9ccb3f90363c588b3b97be434e6ba95403"},
+ {file = "libcst-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:fe41b33aa73635b1651f64633f429f7aa21f86d2db5748659a99d9b7b1ed2a90"},
+ {file = "libcst-1.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:73c086705ed34dbad16c62c9adca4249a556c1b022993d511da70ea85feaf669"},
+ {file = "libcst-1.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3a07ecfabbbb8b93209f952a365549e65e658831e9231649f4f4e4263cad24b1"},
+ {file = "libcst-1.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c653d9121d6572d8b7f8abf20f88b0a41aab77ff5a6a36e5a0ec0f19af0072e8"},
+ {file = "libcst-1.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f1cd308a4c2f71d5e4eec6ee693819933a03b78edb2e4cc5e3ad1afd5fb3f07"},
+ {file = "libcst-1.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8afb6101b8b3c86c5f9cec6b90ab4da16c3c236fe7396f88e8b93542bb341f7c"},
+ {file = "libcst-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:d22d1abfe49aa60fc61fa867e10875a9b3024ba5a801112f4d7ba42d8d53242e"},
+ {file = "libcst-1.1.0.tar.gz", hash = "sha256:0acbacb9a170455701845b7e940e2d7b9519db35a86768d86330a0b0deae1086"},
+]
+
+[package.dependencies]
+pyyaml = ">=5.2"
+typing-extensions = ">=3.7.4.2"
+typing-inspect = ">=0.4.0"
+
+[package.extras]
+dev = ["Sphinx (>=5.1.1)", "black (==23.9.1)", "build (>=0.10.0)", "coverage (>=4.5.4)", "fixit (==2.0.0.post1)", "flake8 (>=3.7.8,<5)", "hypothesis (>=4.36.0)", "hypothesmith (>=0.0.4)", "jinja2 (==3.1.2)", "jupyter (>=1.0.0)", "maturin (>=0.8.3,<0.16)", "nbsphinx (>=0.4.2)", "prompt-toolkit (>=2.0.9)", "pyre-check (==0.9.18)", "setuptools-rust (>=1.5.2)", "setuptools-scm (>=6.0.1)", "slotscheck (>=0.7.1)", "sphinx-rtd-theme (>=0.4.3)", "ufmt (==2.2.0)", "usort (==1.0.7)"]
+
+[[package]]
+name = "mccabe"
+version = "0.7.0"
+description = "McCabe checker, plugin for flake8"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"},
+ {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"},
+]
+
+[[package]]
+name = "multidict"
+version = "6.0.4"
+description = "multidict implementation"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"},
+ {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"},
+ {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"},
+ {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"},
+ {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"},
+ {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"},
+ {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"},
+ {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"},
+ {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"},
+ {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"},
+ {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"},
+ {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"},
+ {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"},
+ {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"},
+ {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"},
+ {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"},
+ {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"},
+ {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"},
+ {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"},
+ {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"},
+ {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"},
+ {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"},
+ {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"},
+ {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"},
+ {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"},
+ {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"},
+ {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"},
+ {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"},
+ {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"},
+ {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"},
+ {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"},
+ {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"},
+ {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"},
+ {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"},
+ {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"},
+ {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"},
+ {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"},
+ {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"},
+ {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"},
+ {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"},
+ {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"},
+ {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"},
+ {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"},
+ {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"},
+ {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"},
+ {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"},
+ {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"},
+ {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"},
+ {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"},
+ {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"},
+ {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"},
+ {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"},
+ {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"},
+ {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"},
+ {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"},
+ {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"},
+ {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"},
+ {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"},
+ {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"},
+ {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"},
+ {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"},
+ {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"},
+ {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"},
+ {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"},
+ {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"},
+ {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"},
+ {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"},
+ {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"},
+ {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"},
+ {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"},
+ {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"},
+ {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"},
+ {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"},
+ {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"},
+]
+
+[[package]]
+name = "mypy"
+version = "1.6.1"
+description = "Optional static typing for Python"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "mypy-1.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e5012e5cc2ac628177eaac0e83d622b2dd499e28253d4107a08ecc59ede3fc2c"},
+ {file = "mypy-1.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d8fbb68711905f8912e5af474ca8b78d077447d8f3918997fecbf26943ff3cbb"},
+ {file = "mypy-1.6.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21a1ad938fee7d2d96ca666c77b7c494c3c5bd88dff792220e1afbebb2925b5e"},
+ {file = "mypy-1.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b96ae2c1279d1065413965c607712006205a9ac541895004a1e0d4f281f2ff9f"},
+ {file = "mypy-1.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:40b1844d2e8b232ed92e50a4bd11c48d2daa351f9deee6c194b83bf03e418b0c"},
+ {file = "mypy-1.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:81af8adaa5e3099469e7623436881eff6b3b06db5ef75e6f5b6d4871263547e5"},
+ {file = "mypy-1.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8c223fa57cb154c7eab5156856c231c3f5eace1e0bed9b32a24696b7ba3c3245"},
+ {file = "mypy-1.6.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8032e00ce71c3ceb93eeba63963b864bf635a18f6c0c12da6c13c450eedb183"},
+ {file = "mypy-1.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4c46b51de523817a0045b150ed11b56f9fff55f12b9edd0f3ed35b15a2809de0"},
+ {file = "mypy-1.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:19f905bcfd9e167159b3d63ecd8cb5e696151c3e59a1742e79bc3bcb540c42c7"},
+ {file = "mypy-1.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:82e469518d3e9a321912955cc702d418773a2fd1e91c651280a1bda10622f02f"},
+ {file = "mypy-1.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d4473c22cc296425bbbce7e9429588e76e05bc7342da359d6520b6427bf76660"},
+ {file = "mypy-1.6.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59a0d7d24dfb26729e0a068639a6ce3500e31d6655df8557156c51c1cb874ce7"},
+ {file = "mypy-1.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cfd13d47b29ed3bbaafaff7d8b21e90d827631afda134836962011acb5904b71"},
+ {file = "mypy-1.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:eb4f18589d196a4cbe5290b435d135dee96567e07c2b2d43b5c4621b6501531a"},
+ {file = "mypy-1.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:41697773aa0bf53ff917aa077e2cde7aa50254f28750f9b88884acea38a16169"},
+ {file = "mypy-1.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7274b0c57737bd3476d2229c6389b2ec9eefeb090bbaf77777e9d6b1b5a9d143"},
+ {file = "mypy-1.6.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbaf4662e498c8c2e352da5f5bca5ab29d378895fa2d980630656178bd607c46"},
+ {file = "mypy-1.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bb8ccb4724f7d8601938571bf3f24da0da791fe2db7be3d9e79849cb64e0ae85"},
+ {file = "mypy-1.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:68351911e85145f582b5aa6cd9ad666c8958bcae897a1bfda8f4940472463c45"},
+ {file = "mypy-1.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:49ae115da099dcc0922a7a895c1eec82c1518109ea5c162ed50e3b3594c71208"},
+ {file = "mypy-1.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8b27958f8c76bed8edaa63da0739d76e4e9ad4ed325c814f9b3851425582a3cd"},
+ {file = "mypy-1.6.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:925cd6a3b7b55dfba252b7c4561892311c5358c6b5a601847015a1ad4eb7d332"},
+ {file = "mypy-1.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8f57e6b6927a49550da3d122f0cb983d400f843a8a82e65b3b380d3d7259468f"},
+ {file = "mypy-1.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:a43ef1c8ddfdb9575691720b6352761f3f53d85f1b57d7745701041053deff30"},
+ {file = "mypy-1.6.1-py3-none-any.whl", hash = "sha256:4cbe68ef919c28ea561165206a2dcb68591c50f3bcf777932323bc208d949cf1"},
+ {file = "mypy-1.6.1.tar.gz", hash = "sha256:4d01c00d09a0be62a4ca3f933e315455bde83f37f892ba4b08ce92f3cf44bcc1"},
+]
+
+[package.dependencies]
+mypy-extensions = ">=1.0.0"
+typing-extensions = ">=4.1.0"
+
+[package.extras]
+dmypy = ["psutil (>=4.0)"]
+install-types = ["pip"]
+reports = ["lxml"]
+
+[[package]]
+name = "mypy-extensions"
+version = "1.0.0"
+description = "Type system extensions for programs checked with the mypy type checker."
+optional = false
+python-versions = ">=3.5"
+files = [
+ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"},
+ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
+]
+
+[[package]]
+name = "nodeenv"
+version = "1.8.0"
+description = "Node.js virtual environment builder"
+optional = false
+python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*"
+files = [
+ {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"},
+ {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"},
+]
+
+[package.dependencies]
+setuptools = "*"
+
+[[package]]
+name = "packaging"
+version = "23.2"
+description = "Core utilities for Python packages"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"},
+ {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"},
+]
+
+[[package]]
+name = "pathspec"
+version = "0.11.2"
+description = "Utility library for gitignore style pattern matching of file paths."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"},
+ {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"},
+]
+
+[[package]]
+name = "platformdirs"
+version = "3.11.0"
+description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "platformdirs-3.11.0-py3-none-any.whl", hash = "sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e"},
+ {file = "platformdirs-3.11.0.tar.gz", hash = "sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3"},
+]
+
+[package.extras]
+docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"]
+test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"]
+
+[[package]]
+name = "pluggy"
+version = "1.3.0"
+description = "plugin and hook calling mechanisms for python"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"},
+ {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"},
+]
+
+[package.extras]
+dev = ["pre-commit", "tox"]
+testing = ["pytest", "pytest-benchmark"]
+
+[[package]]
+name = "pre-commit"
+version = "3.5.0"
+description = "A framework for managing and maintaining multi-language pre-commit hooks."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "pre_commit-3.5.0-py2.py3-none-any.whl", hash = "sha256:841dc9aef25daba9a0238cd27984041fa0467b4199fc4852e27950664919f660"},
+ {file = "pre_commit-3.5.0.tar.gz", hash = "sha256:5804465c675b659b0862f07907f96295d490822a450c4c40e747d0b1c6ebcb32"},
+]
+
+[package.dependencies]
+cfgv = ">=2.0.0"
+identify = ">=1.0.0"
+nodeenv = ">=0.11.1"
+pyyaml = ">=5.1"
+virtualenv = ">=20.10.0"
+
+[[package]]
+name = "pycln"
+version = "2.3.0"
+description = "A formatter for finding and removing unused import statements."
+optional = false
+python-versions = ">=3.6.2,<4"
+files = [
+ {file = "pycln-2.3.0-py3-none-any.whl", hash = "sha256:d6731e17a60728b827211de2ca4bfc9b40ea1df99a12f3e0fd06a98a0c9e6caa"},
+ {file = "pycln-2.3.0.tar.gz", hash = "sha256:8759b36753234c8f95895a31dde329479ffed2218f49d1a1c77c7edccc02e09b"},
+]
+
+[package.dependencies]
+libcst = {version = ">=0.3.10", markers = "python_version >= \"3.7\""}
+pathspec = ">=0.9.0"
+pyyaml = ">=5.3.1"
+tomlkit = ">=0.11.1"
+typer = ">=0.4.1"
+
+[[package]]
+name = "pylint"
+version = "3.0.2"
+description = "python code static checker"
+optional = false
+python-versions = ">=3.8.0"
+files = [
+ {file = "pylint-3.0.2-py3-none-any.whl", hash = "sha256:60ed5f3a9ff8b61839ff0348b3624ceeb9e6c2a92c514d81c9cc273da3b6bcda"},
+ {file = "pylint-3.0.2.tar.gz", hash = "sha256:0d4c286ef6d2f66c8bfb527a7f8a629009e42c99707dec821a03e1b51a4c1496"},
+]
+
+[package.dependencies]
+astroid = ">=3.0.1,<=3.1.0-dev0"
+colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""}
+dill = [
+ {version = ">=0.3.7", markers = "python_version >= \"3.12\""},
+ {version = ">=0.3.6", markers = "python_version >= \"3.11\" and python_version < \"3.12\""},
+]
+isort = ">=4.2.5,<6"
+mccabe = ">=0.6,<0.8"
+platformdirs = ">=2.2.0"
+tomlkit = ">=0.10.1"
+
+[package.extras]
+spelling = ["pyenchant (>=3.2,<4.0)"]
+testutils = ["gitpython (>3)"]
+
+[[package]]
+name = "pytest"
+version = "7.4.3"
+description = "pytest: simple powerful testing with Python"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"},
+ {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"},
+]
+
+[package.dependencies]
+colorama = {version = "*", markers = "sys_platform == \"win32\""}
+iniconfig = "*"
+packaging = "*"
+pluggy = ">=0.12,<2.0"
+
+[package.extras]
+testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
+
+[[package]]
+name = "pytest-aiohttp"
+version = "1.0.5"
+description = "Pytest plugin for aiohttp support"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pytest-aiohttp-1.0.5.tar.gz", hash = "sha256:880262bc5951e934463b15e3af8bb298f11f7d4d3ebac970aab425aff10a780a"},
+ {file = "pytest_aiohttp-1.0.5-py3-none-any.whl", hash = "sha256:63a5360fd2f34dda4ab8e6baee4c5f5be4cd186a403cabd498fced82ac9c561e"},
+]
+
+[package.dependencies]
+aiohttp = ">=3.8.1"
+pytest = ">=6.1.0"
+pytest-asyncio = ">=0.17.2"
+
+[package.extras]
+testing = ["coverage (==6.2)", "mypy (==0.931)"]
+
+[[package]]
+name = "pytest-asyncio"
+version = "0.21.1"
+description = "Pytest support for asyncio"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pytest-asyncio-0.21.1.tar.gz", hash = "sha256:40a7eae6dded22c7b604986855ea48400ab15b069ae38116e8c01238e9eeb64d"},
+ {file = "pytest_asyncio-0.21.1-py3-none-any.whl", hash = "sha256:8666c1c8ac02631d7c51ba282e0c69a8a452b211ffedf2599099845da5c5c37b"},
+]
+
+[package.dependencies]
+pytest = ">=7.0.0"
+
+[package.extras]
+docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"]
+testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"]
+
+[[package]]
+name = "pytest-cov"
+version = "4.1.0"
+description = "Pytest plugin for measuring coverage."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"},
+ {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"},
+]
+
+[package.dependencies]
+coverage = {version = ">=5.2.1", extras = ["toml"]}
+pytest = ">=4.6"
+
+[package.extras]
+testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"]
+
+[[package]]
+name = "pytest-cover"
+version = "3.0.0"
+description = "Pytest plugin for measuring coverage. Forked from `pytest-cov`."
+optional = false
+python-versions = "*"
+files = [
+ {file = "pytest-cover-3.0.0.tar.gz", hash = "sha256:5bdb6c1cc3dd75583bb7bc2c57f5e1034a1bfcb79d27c71aceb0b16af981dbf4"},
+ {file = "pytest_cover-3.0.0-py2.py3-none-any.whl", hash = "sha256:578249955eb3b5f3991209df6e532bb770b647743b7392d3d97698dc02f39ebb"},
+]
+
+[package.dependencies]
+pytest-cov = ">=2.0"
+
+[[package]]
+name = "pytest-coverage"
+version = "0.0"
+description = "Pytest plugin for measuring coverage. Forked from `pytest-cov`."
+optional = false
+python-versions = "*"
+files = [
+ {file = "pytest-coverage-0.0.tar.gz", hash = "sha256:db6af2cbd7e458c7c9fd2b4207cee75258243c8a81cad31a7ee8cfad5be93c05"},
+ {file = "pytest_coverage-0.0-py2.py3-none-any.whl", hash = "sha256:dedd084c5e74d8e669355325916dc011539b190355021b037242514dee546368"},
+]
+
+[package.dependencies]
+pytest-cover = "*"
+
+[[package]]
+name = "pytest-mock"
+version = "3.12.0"
+description = "Thin-wrapper around the mock package for easier use with pytest"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"},
+ {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"},
+]
+
+[package.dependencies]
+pytest = ">=5.0"
+
+[package.extras]
+dev = ["pre-commit", "pytest-asyncio", "tox"]
+
+[[package]]
+name = "pytest-timeout"
+version = "2.2.0"
+description = "pytest plugin to abort hanging tests"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pytest-timeout-2.2.0.tar.gz", hash = "sha256:3b0b95dabf3cb50bac9ef5ca912fa0cfc286526af17afc806824df20c2f72c90"},
+ {file = "pytest_timeout-2.2.0-py3-none-any.whl", hash = "sha256:bde531e096466f49398a59f2dde76fa78429a09a12411466f88a07213e220de2"},
+]
+
+[package.dependencies]
+pytest = ">=5.0.0"
+
+[[package]]
+name = "pytest-xdist"
+version = "3.5.0"
+description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pytest-xdist-3.5.0.tar.gz", hash = "sha256:cbb36f3d67e0c478baa57fa4edc8843887e0f6cfc42d677530a36d7472b32d8a"},
+ {file = "pytest_xdist-3.5.0-py3-none-any.whl", hash = "sha256:d075629c7e00b611df89f490a5063944bee7a4362a5ff11c7cc7824a03dfce24"},
+]
+
+[package.dependencies]
+execnet = ">=1.1"
+pytest = ">=6.2.0"
+
+[package.extras]
+psutil = ["psutil (>=3.0)"]
+setproctitle = ["setproctitle"]
+testing = ["filelock"]
+
+[[package]]
+name = "python-dateutil"
+version = "2.8.2"
+description = "Extensions to the standard Python datetime module"
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
+files = [
+ {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
+ {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"},
+]
+
+[package.dependencies]
+six = ">=1.5"
+
+[[package]]
+name = "pyupgrade"
+version = "3.15.0"
+description = "A tool to automatically upgrade syntax for newer versions."
+optional = false
+python-versions = ">=3.8.1"
+files = [
+ {file = "pyupgrade-3.15.0-py2.py3-none-any.whl", hash = "sha256:8dc8ebfaed43566e2c65994162795017c7db11f531558a74bc8aa077907bc305"},
+ {file = "pyupgrade-3.15.0.tar.gz", hash = "sha256:a7fde381060d7c224f55aef7a30fae5ac93bbc428367d27e70a603bc2acd4f00"},
+]
+
+[package.dependencies]
+tokenize-rt = ">=5.2.0"
+
+[[package]]
+name = "pyyaml"
+version = "6.0.1"
+description = "YAML parser and emitter for Python"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"},
+ {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"},
+ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"},
+ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"},
+ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"},
+ {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"},
+ {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"},
+ {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"},
+ {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"},
+ {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"},
+ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"},
+ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"},
+ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"},
+ {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"},
+ {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"},
+ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
+ {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
+ {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
+ {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
+ {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
+ {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
+ {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"},
+ {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"},
+ {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"},
+ {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"},
+ {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"},
+ {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"},
+ {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"},
+ {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"},
+ {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"},
+ {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"},
+ {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"},
+ {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"},
+ {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"},
+ {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"},
+ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"},
+ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"},
+ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"},
+ {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"},
+ {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"},
+ {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"},
+ {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"},
+ {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"},
+ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"},
+ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"},
+ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"},
+ {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"},
+ {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"},
+ {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"},
+ {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"},
+]
+
+[[package]]
+name = "refurb"
+version = "1.22.1"
+description = "A tool for refurbish and modernize Python codebases"
+optional = false
+python-versions = ">=3.10"
+files = [
+ {file = "refurb-1.22.1-py3-none-any.whl", hash = "sha256:7409fdcb01d73274ef249e729687656fc9cab7b454a5c72d19b8cefefc5aab74"},
+ {file = "refurb-1.22.1.tar.gz", hash = "sha256:3ff6b6f503b0fab9d082a23a0d81ae7bbce59f7b906d5046e863d8ddc46ad529"},
+]
+
+[package.dependencies]
+mypy = ">=0.981"
+
+[[package]]
+name = "ruff"
+version = "0.1.4"
+description = "An extremely fast Python linter and code formatter, written in Rust."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "ruff-0.1.4-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:864958706b669cce31d629902175138ad8a069d99ca53514611521f532d91495"},
+ {file = "ruff-0.1.4-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:9fdd61883bb34317c788af87f4cd75dfee3a73f5ded714b77ba928e418d6e39e"},
+ {file = "ruff-0.1.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4eaca8c9cc39aa7f0f0d7b8fe24ecb51232d1bb620fc4441a61161be4a17539"},
+ {file = "ruff-0.1.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a9a1301dc43cbf633fb603242bccd0aaa34834750a14a4c1817e2e5c8d60de17"},
+ {file = "ruff-0.1.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78e8db8ab6f100f02e28b3d713270c857d370b8d61871d5c7d1702ae411df683"},
+ {file = "ruff-0.1.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:80fea754eaae06335784b8ea053d6eb8e9aac75359ebddd6fee0858e87c8d510"},
+ {file = "ruff-0.1.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6bc02a480d4bfffd163a723698da15d1a9aec2fced4c06f2a753f87f4ce6969c"},
+ {file = "ruff-0.1.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9862811b403063765b03e716dac0fda8fdbe78b675cd947ed5873506448acea4"},
+ {file = "ruff-0.1.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58826efb8b3efbb59bb306f4b19640b7e366967a31c049d49311d9eb3a4c60cb"},
+ {file = "ruff-0.1.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:fdfd453fc91d9d86d6aaa33b1bafa69d114cf7421057868f0b79104079d3e66e"},
+ {file = "ruff-0.1.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:e8791482d508bd0b36c76481ad3117987301b86072158bdb69d796503e1c84a8"},
+ {file = "ruff-0.1.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:01206e361021426e3c1b7fba06ddcb20dbc5037d64f6841e5f2b21084dc51800"},
+ {file = "ruff-0.1.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:645591a613a42cb7e5c2b667cbefd3877b21e0252b59272ba7212c3d35a5819f"},
+ {file = "ruff-0.1.4-py3-none-win32.whl", hash = "sha256:99908ca2b3b85bffe7e1414275d004917d1e0dfc99d497ccd2ecd19ad115fd0d"},
+ {file = "ruff-0.1.4-py3-none-win_amd64.whl", hash = "sha256:1dfd6bf8f6ad0a4ac99333f437e0ec168989adc5d837ecd38ddb2cc4a2e3db8a"},
+ {file = "ruff-0.1.4-py3-none-win_arm64.whl", hash = "sha256:d98ae9ebf56444e18a3e3652b3383204748f73e247dea6caaf8b52d37e6b32da"},
+ {file = "ruff-0.1.4.tar.gz", hash = "sha256:21520ecca4cc555162068d87c747b8f95e1e95f8ecfcbbe59e8dd00710586315"},
+]
+
+[[package]]
+name = "setuptools"
+version = "68.2.2"
+description = "Easily download, build, install, upgrade, and uninstall Python packages"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "setuptools-68.2.2-py3-none-any.whl", hash = "sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a"},
+ {file = "setuptools-68.2.2.tar.gz", hash = "sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87"},
+]
+
+[package.extras]
+docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
+testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
+testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
+
+[[package]]
+name = "six"
+version = "1.16.0"
+description = "Python 2 and 3 compatibility utilities"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
+files = [
+ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
+ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
+]
+
+[[package]]
+name = "soupsieve"
+version = "2.5"
+description = "A modern CSS selector implementation for Beautiful Soup."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"},
+ {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"},
+]
+
+[[package]]
+name = "tokenize-rt"
+version = "5.2.0"
+description = "A wrapper around the stdlib `tokenize` which roundtrips."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "tokenize_rt-5.2.0-py2.py3-none-any.whl", hash = "sha256:b79d41a65cfec71285433511b50271b05da3584a1da144a0752e9c621a285289"},
+ {file = "tokenize_rt-5.2.0.tar.gz", hash = "sha256:9fe80f8a5c1edad2d3ede0f37481cc0cc1538a2f442c9c2f9e4feacd2792d054"},
+]
+
+[[package]]
+name = "tomlkit"
+version = "0.12.2"
+description = "Style preserving TOML library"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "tomlkit-0.12.2-py3-none-any.whl", hash = "sha256:eeea7ac7563faeab0a1ed8fe12c2e5a51c61f933f2502f7e9db0241a65163ad0"},
+ {file = "tomlkit-0.12.2.tar.gz", hash = "sha256:df32fab589a81f0d7dc525a4267b6d7a64ee99619cbd1eeb0fae32c1dd426977"},
+]
+
+[[package]]
+name = "typeguard"
+version = "4.1.5"
+description = "Run-time type checker for Python"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "typeguard-4.1.5-py3-none-any.whl", hash = "sha256:8923e55f8873caec136c892c3bed1f676eae7be57cdb94819281b3d3bc9c0953"},
+ {file = "typeguard-4.1.5.tar.gz", hash = "sha256:ea0a113bbc111bcffc90789ebb215625c963411f7096a7e9062d4e4630c155fd"},
+]
+
+[package.dependencies]
+typing-extensions = {version = ">=4.7.0", markers = "python_version < \"3.12\""}
+
+[package.extras]
+doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)"]
+test = ["coverage[toml] (>=7)", "mypy (>=1.2.0)", "pytest (>=7)"]
+
+[[package]]
+name = "typer"
+version = "0.9.0"
+description = "Typer, build great CLIs. Easy to code. Based on Python type hints."
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "typer-0.9.0-py3-none-any.whl", hash = "sha256:5d96d986a21493606a358cae4461bd8cdf83cbf33a5aa950ae629ca3b51467ee"},
+ {file = "typer-0.9.0.tar.gz", hash = "sha256:50922fd79aea2f4751a8e0408ff10d2662bd0c8bbfa84755a699f3bada2978b2"},
+]
+
+[package.dependencies]
+click = ">=7.1.1,<9.0.0"
+typing-extensions = ">=3.7.4.3"
+
+[package.extras]
+all = ["colorama (>=0.4.3,<0.5.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"]
+dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "pre-commit (>=2.17.0,<3.0.0)"]
+doc = ["cairosvg (>=2.5.2,<3.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pillow (>=9.3.0,<10.0.0)"]
+test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "pytest (>=4.4.0,<8.0.0)", "pytest-cov (>=2.10.0,<5.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<4.0.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"]
+
+[[package]]
+name = "types-beautifulsoup4"
+version = "4.12.0.7"
+description = "Typing stubs for beautifulsoup4"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "types-beautifulsoup4-4.12.0.7.tar.gz", hash = "sha256:59980028d29bf55d0db359efa305b75bacf0cb92e3f3f6b3fd408f2531df274c"},
+ {file = "types_beautifulsoup4-4.12.0.7-py3-none-any.whl", hash = "sha256:8b03b054cb2e62abf82bbbeda57a07257026f4ed9010ef17d8f8eff43bb1f9b7"},
+]
+
+[package.dependencies]
+types-html5lib = "*"
+
+[[package]]
+name = "types-html5lib"
+version = "1.1.11.15"
+description = "Typing stubs for html5lib"
+optional = false
+python-versions = "*"
+files = [
+ {file = "types-html5lib-1.1.11.15.tar.gz", hash = "sha256:80e1a2062d22a3affe5c28d97da30bffbf3a076d393c80fc6f1671216c1bd492"},
+ {file = "types_html5lib-1.1.11.15-py3-none-any.whl", hash = "sha256:16fe936d99b9f7fc210e2e21a2aed1b6bbbc554ad8242a6ef75f6f2bddb27e58"},
+]
+
+[[package]]
+name = "typing-extensions"
+version = "4.8.0"
+description = "Backported and Experimental Type Hints for Python 3.8+"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"},
+ {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"},
+]
+
+[[package]]
+name = "typing-inspect"
+version = "0.9.0"
+description = "Runtime inspection utilities for typing module."
+optional = false
+python-versions = "*"
+files = [
+ {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"},
+ {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"},
+]
+
+[package.dependencies]
+mypy-extensions = ">=0.3.0"
+typing-extensions = ">=3.7.4"
+
+[[package]]
+name = "uvloop"
+version = "0.19.0"
+description = "Fast implementation of asyncio event loop on top of libuv"
+optional = false
+python-versions = ">=3.8.0"
+files = [
+ {file = "uvloop-0.19.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:de4313d7f575474c8f5a12e163f6d89c0a878bc49219641d49e6f1444369a90e"},
+ {file = "uvloop-0.19.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5588bd21cf1fcf06bded085f37e43ce0e00424197e7c10e77afd4bbefffef428"},
+ {file = "uvloop-0.19.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b1fd71c3843327f3bbc3237bedcdb6504fd50368ab3e04d0410e52ec293f5b8"},
+ {file = "uvloop-0.19.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a05128d315e2912791de6088c34136bfcdd0c7cbc1cf85fd6fd1bb321b7c849"},
+ {file = "uvloop-0.19.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:cd81bdc2b8219cb4b2556eea39d2e36bfa375a2dd021404f90a62e44efaaf957"},
+ {file = "uvloop-0.19.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5f17766fb6da94135526273080f3455a112f82570b2ee5daa64d682387fe0dcd"},
+ {file = "uvloop-0.19.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4ce6b0af8f2729a02a5d1575feacb2a94fc7b2e983868b009d51c9a9d2149bef"},
+ {file = "uvloop-0.19.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:31e672bb38b45abc4f26e273be83b72a0d28d074d5b370fc4dcf4c4eb15417d2"},
+ {file = "uvloop-0.19.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:570fc0ed613883d8d30ee40397b79207eedd2624891692471808a95069a007c1"},
+ {file = "uvloop-0.19.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5138821e40b0c3e6c9478643b4660bd44372ae1e16a322b8fc07478f92684e24"},
+ {file = "uvloop-0.19.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:91ab01c6cd00e39cde50173ba4ec68a1e578fee9279ba64f5221810a9e786533"},
+ {file = "uvloop-0.19.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:47bf3e9312f63684efe283f7342afb414eea4d3011542155c7e625cd799c3b12"},
+ {file = "uvloop-0.19.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:da8435a3bd498419ee8c13c34b89b5005130a476bda1d6ca8cfdde3de35cd650"},
+ {file = "uvloop-0.19.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:02506dc23a5d90e04d4f65c7791e65cf44bd91b37f24cfc3ef6cf2aff05dc7ec"},
+ {file = "uvloop-0.19.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2693049be9d36fef81741fddb3f441673ba12a34a704e7b4361efb75cf30befc"},
+ {file = "uvloop-0.19.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7010271303961c6f0fe37731004335401eb9075a12680738731e9c92ddd96ad6"},
+ {file = "uvloop-0.19.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5daa304d2161d2918fa9a17d5635099a2f78ae5b5960e742b2fcfbb7aefaa593"},
+ {file = "uvloop-0.19.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:7207272c9520203fea9b93843bb775d03e1cf88a80a936ce760f60bb5add92f3"},
+ {file = "uvloop-0.19.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:78ab247f0b5671cc887c31d33f9b3abfb88d2614b84e4303f1a63b46c046c8bd"},
+ {file = "uvloop-0.19.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:472d61143059c84947aa8bb74eabbace30d577a03a1805b77933d6bd13ddebbd"},
+ {file = "uvloop-0.19.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45bf4c24c19fb8a50902ae37c5de50da81de4922af65baf760f7c0c42e1088be"},
+ {file = "uvloop-0.19.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:271718e26b3e17906b28b67314c45d19106112067205119dddbd834c2b7ce797"},
+ {file = "uvloop-0.19.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:34175c9fd2a4bc3adc1380e1261f60306344e3407c20a4d684fd5f3be010fa3d"},
+ {file = "uvloop-0.19.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e27f100e1ff17f6feeb1f33968bc185bf8ce41ca557deee9d9bbbffeb72030b7"},
+ {file = "uvloop-0.19.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:13dfdf492af0aa0a0edf66807d2b465607d11c4fa48f4a1fd41cbea5b18e8e8b"},
+ {file = "uvloop-0.19.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6e3d4e85ac060e2342ff85e90d0c04157acb210b9ce508e784a944f852a40e67"},
+ {file = "uvloop-0.19.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ca4956c9ab567d87d59d49fa3704cf29e37109ad348f2d5223c9bf761a332e7"},
+ {file = "uvloop-0.19.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f467a5fd23b4fc43ed86342641f3936a68ded707f4627622fa3f82a120e18256"},
+ {file = "uvloop-0.19.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:492e2c32c2af3f971473bc22f086513cedfc66a130756145a931a90c3958cb17"},
+ {file = "uvloop-0.19.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2df95fca285a9f5bfe730e51945ffe2fa71ccbfdde3b0da5772b4ee4f2e770d5"},
+ {file = "uvloop-0.19.0.tar.gz", hash = "sha256:0246f4fd1bf2bf702e06b0d45ee91677ee5c31242f39aab4ea6fe0c51aedd0fd"},
+]
+
+[package.extras]
+docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"]
+test = ["Cython (>=0.29.36,<0.30.0)", "aiohttp (==3.9.0b0)", "aiohttp (>=3.8.1)", "flake8 (>=5.0,<6.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=23.0.0,<23.1.0)", "pycodestyle (>=2.9.0,<2.10.0)"]
+
+[[package]]
+name = "virtualenv"
+version = "20.24.6"
+description = "Virtual Python Environment builder"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "virtualenv-20.24.6-py3-none-any.whl", hash = "sha256:520d056652454c5098a00c0f073611ccbea4c79089331f60bf9d7ba247bb7381"},
+ {file = "virtualenv-20.24.6.tar.gz", hash = "sha256:02ece4f56fbf939dbbc33c0715159951d6bf14aaf5457b092e4548e1382455af"},
+]
+
+[package.dependencies]
+distlib = ">=0.3.7,<1"
+filelock = ">=3.12.2,<4"
+platformdirs = ">=3.9.1,<4"
+
+[package.extras]
+docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"]
+test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"]
+
+[[package]]
+name = "yarl"
+version = "1.9.2"
+description = "Yet another URL library"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c2ad583743d16ddbdf6bb14b5cd76bf43b0d0006e918809d5d4ddf7bde8dd82"},
+ {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:82aa6264b36c50acfb2424ad5ca537a2060ab6de158a5bd2a72a032cc75b9eb8"},
+ {file = "yarl-1.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0c77533b5ed4bcc38e943178ccae29b9bcf48ffd1063f5821192f23a1bd27b9"},
+ {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee4afac41415d52d53a9833ebae7e32b344be72835bbb589018c9e938045a560"},
+ {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9bf345c3a4f5ba7f766430f97f9cc1320786f19584acc7086491f45524a551ac"},
+ {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a96c19c52ff442a808c105901d0bdfd2e28575b3d5f82e2f5fd67e20dc5f4ea"},
+ {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:891c0e3ec5ec881541f6c5113d8df0315ce5440e244a716b95f2525b7b9f3608"},
+ {file = "yarl-1.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c3a53ba34a636a256d767c086ceb111358876e1fb6b50dfc4d3f4951d40133d5"},
+ {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:566185e8ebc0898b11f8026447eacd02e46226716229cea8db37496c8cdd26e0"},
+ {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2b0738fb871812722a0ac2154be1f049c6223b9f6f22eec352996b69775b36d4"},
+ {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:32f1d071b3f362c80f1a7d322bfd7b2d11e33d2adf395cc1dd4df36c9c243095"},
+ {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:e9fdc7ac0d42bc3ea78818557fab03af6181e076a2944f43c38684b4b6bed8e3"},
+ {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:56ff08ab5df8429901ebdc5d15941b59f6253393cb5da07b4170beefcf1b2528"},
+ {file = "yarl-1.9.2-cp310-cp310-win32.whl", hash = "sha256:8ea48e0a2f931064469bdabca50c2f578b565fc446f302a79ba6cc0ee7f384d3"},
+ {file = "yarl-1.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:50f33040f3836e912ed16d212f6cc1efb3231a8a60526a407aeb66c1c1956dde"},
+ {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:646d663eb2232d7909e6601f1a9107e66f9791f290a1b3dc7057818fe44fc2b6"},
+ {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aff634b15beff8902d1f918012fc2a42e0dbae6f469fce134c8a0dc51ca423bb"},
+ {file = "yarl-1.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a83503934c6273806aed765035716216cc9ab4e0364f7f066227e1aaea90b8d0"},
+ {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b25322201585c69abc7b0e89e72790469f7dad90d26754717f3310bfe30331c2"},
+ {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22a94666751778629f1ec4280b08eb11815783c63f52092a5953faf73be24191"},
+ {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ec53a0ea2a80c5cd1ab397925f94bff59222aa3cf9c6da938ce05c9ec20428d"},
+ {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:159d81f22d7a43e6eabc36d7194cb53f2f15f498dbbfa8edc8a3239350f59fe7"},
+ {file = "yarl-1.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:832b7e711027c114d79dffb92576acd1bd2decc467dec60e1cac96912602d0e6"},
+ {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:95d2ecefbcf4e744ea952d073c6922e72ee650ffc79028eb1e320e732898d7e8"},
+ {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d4e2c6d555e77b37288eaf45b8f60f0737c9efa3452c6c44626a5455aeb250b9"},
+ {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:783185c75c12a017cc345015ea359cc801c3b29a2966c2655cd12b233bf5a2be"},
+ {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:b8cc1863402472f16c600e3e93d542b7e7542a540f95c30afd472e8e549fc3f7"},
+ {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:822b30a0f22e588b32d3120f6d41e4ed021806418b4c9f0bc3048b8c8cb3f92a"},
+ {file = "yarl-1.9.2-cp311-cp311-win32.whl", hash = "sha256:a60347f234c2212a9f0361955007fcf4033a75bf600a33c88a0a8e91af77c0e8"},
+ {file = "yarl-1.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:be6b3fdec5c62f2a67cb3f8c6dbf56bbf3f61c0f046f84645cd1ca73532ea051"},
+ {file = "yarl-1.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:38a3928ae37558bc1b559f67410df446d1fbfa87318b124bf5032c31e3447b74"},
+ {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac9bb4c5ce3975aeac288cfcb5061ce60e0d14d92209e780c93954076c7c4367"},
+ {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3da8a678ca8b96c8606bbb8bfacd99a12ad5dd288bc6f7979baddd62f71c63ef"},
+ {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13414591ff516e04fcdee8dc051c13fd3db13b673c7a4cb1350e6b2ad9639ad3"},
+ {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf74d08542c3a9ea97bb8f343d4fcbd4d8f91bba5ec9d5d7f792dbe727f88938"},
+ {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e7221580dc1db478464cfeef9b03b95c5852cc22894e418562997df0d074ccc"},
+ {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:494053246b119b041960ddcd20fd76224149cfea8ed8777b687358727911dd33"},
+ {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:52a25809fcbecfc63ac9ba0c0fb586f90837f5425edfd1ec9f3372b119585e45"},
+ {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:e65610c5792870d45d7b68c677681376fcf9cc1c289f23e8e8b39c1485384185"},
+ {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:1b1bba902cba32cdec51fca038fd53f8beee88b77efc373968d1ed021024cc04"},
+ {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:662e6016409828ee910f5d9602a2729a8a57d74b163c89a837de3fea050c7582"},
+ {file = "yarl-1.9.2-cp37-cp37m-win32.whl", hash = "sha256:f364d3480bffd3aa566e886587eaca7c8c04d74f6e8933f3f2c996b7f09bee1b"},
+ {file = "yarl-1.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6a5883464143ab3ae9ba68daae8e7c5c95b969462bbe42e2464d60e7e2698368"},
+ {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5610f80cf43b6202e2c33ba3ec2ee0a2884f8f423c8f4f62906731d876ef4fac"},
+ {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b9a4e67ad7b646cd6f0938c7ebfd60e481b7410f574c560e455e938d2da8e0f4"},
+ {file = "yarl-1.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:83fcc480d7549ccebe9415d96d9263e2d4226798c37ebd18c930fce43dfb9574"},
+ {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fcd436ea16fee7d4207c045b1e340020e58a2597301cfbcfdbe5abd2356c2fb"},
+ {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84e0b1599334b1e1478db01b756e55937d4614f8654311eb26012091be109d59"},
+ {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3458a24e4ea3fd8930e934c129b676c27452e4ebda80fbe47b56d8c6c7a63a9e"},
+ {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:838162460b3a08987546e881a2bfa573960bb559dfa739e7800ceeec92e64417"},
+ {file = "yarl-1.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4e2d08f07a3d7d3e12549052eb5ad3eab1c349c53ac51c209a0e5991bbada78"},
+ {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:de119f56f3c5f0e2fb4dee508531a32b069a5f2c6e827b272d1e0ff5ac040333"},
+ {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:149ddea5abf329752ea5051b61bd6c1d979e13fbf122d3a1f9f0c8be6cb6f63c"},
+ {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:674ca19cbee4a82c9f54e0d1eee28116e63bc6fd1e96c43031d11cbab8b2afd5"},
+ {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:9b3152f2f5677b997ae6c804b73da05a39daa6a9e85a512e0e6823d81cdad7cc"},
+ {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5415d5a4b080dc9612b1b63cba008db84e908b95848369aa1da3686ae27b6d2b"},
+ {file = "yarl-1.9.2-cp38-cp38-win32.whl", hash = "sha256:f7a3d8146575e08c29ed1cd287068e6d02f1c7bdff8970db96683b9591b86ee7"},
+ {file = "yarl-1.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:63c48f6cef34e6319a74c727376e95626f84ea091f92c0250a98e53e62c77c72"},
+ {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:75df5ef94c3fdc393c6b19d80e6ef1ecc9ae2f4263c09cacb178d871c02a5ba9"},
+ {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c027a6e96ef77d401d8d5a5c8d6bc478e8042f1e448272e8d9752cb0aff8b5c8"},
+ {file = "yarl-1.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3b078dbe227f79be488ffcfc7a9edb3409d018e0952cf13f15fd6512847f3f7"},
+ {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59723a029760079b7d991a401386390c4be5bfec1e7dd83e25a6a0881859e716"},
+ {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b03917871bf859a81ccb180c9a2e6c1e04d2f6a51d953e6a5cdd70c93d4e5a2a"},
+ {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1012fa63eb6c032f3ce5d2171c267992ae0c00b9e164efe4d73db818465fac3"},
+ {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a74dcbfe780e62f4b5a062714576f16c2f3493a0394e555ab141bf0d746bb955"},
+ {file = "yarl-1.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c56986609b057b4839968ba901944af91b8e92f1725d1a2d77cbac6972b9ed1"},
+ {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2c315df3293cd521033533d242d15eab26583360b58f7ee5d9565f15fee1bef4"},
+ {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:b7232f8dfbd225d57340e441d8caf8652a6acd06b389ea2d3222b8bc89cbfca6"},
+ {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:53338749febd28935d55b41bf0bcc79d634881195a39f6b2f767870b72514caf"},
+ {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:066c163aec9d3d073dc9ffe5dd3ad05069bcb03fcaab8d221290ba99f9f69ee3"},
+ {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8288d7cd28f8119b07dd49b7230d6b4562f9b61ee9a4ab02221060d21136be80"},
+ {file = "yarl-1.9.2-cp39-cp39-win32.whl", hash = "sha256:b124e2a6d223b65ba8768d5706d103280914d61f5cae3afbc50fc3dfcc016623"},
+ {file = "yarl-1.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:61016e7d582bc46a5378ffdd02cd0314fb8ba52f40f9cf4d9a5e7dbef88dee18"},
+ {file = "yarl-1.9.2.tar.gz", hash = "sha256:04ab9d4b9f587c06d801c2abfe9317b77cdf996c65a90d5e84ecc45010823571"},
+]
+
+[package.dependencies]
+idna = ">=2.0"
+multidict = ">=4.0"
+
+[metadata]
+lock-version = "2.0"
+python-versions = "^3.11"
+content-hash = "17c8d7d27a96e7620452e911597d0f74ee4b6ef0ddaf1e6c7c1d2adb1d8ee44b"
diff --git a/pyadtpulse/__init__.py b/pyadtpulse/__init__.py
index b4d3f06..a5e4bb5 100644
--- a/pyadtpulse/__init__.py
+++ b/pyadtpulse/__init__.py
@@ -2,96 +2,28 @@
import logging
import asyncio
-import datetime
-import re
import time
-from contextlib import suppress
-from random import randint
from threading import RLock, Thread
-from typing import List, Optional, Union
from warnings import warn
import uvloop
-from aiohttp import ClientResponse, ClientSession
-from bs4 import BeautifulSoup
-from .alarm_panel import ADT_ALARM_UNKNOWN
from .const import (
- ADT_DEFAULT_HTTP_HEADERS,
+ ADT_DEFAULT_HTTP_USER_AGENT,
ADT_DEFAULT_KEEPALIVE_INTERVAL,
ADT_DEFAULT_RELOGIN_INTERVAL,
- ADT_GATEWAY_STRING,
- ADT_LOGIN_URI,
- ADT_LOGOUT_URI,
- ADT_MAX_KEEPALIVE_INTERVAL,
- ADT_MIN_RELOGIN_INTERVAL,
- ADT_SUMMARY_URI,
- ADT_SYNC_CHECK_URI,
- ADT_TIMEOUT_URI,
- API_HOST_CA,
DEFAULT_API_HOST,
)
-from .pulse_connection import ADTPulseConnection
-from .site import ADTPulseSite
-from .util import (
- AuthenticationException,
- DebugRLock,
- close_response,
- handle_response,
- make_soup,
-)
+from .pyadtpulse_async import SYNC_CHECK_TASK_NAME, PyADTPulseAsync
+from .util import DebugRLock, set_debug_lock
LOG = logging.getLogger(__name__)
-SYNC_CHECK_TASK_NAME = "ADT Pulse Sync Check Task"
-KEEPALIVE_TASK_NAME = "ADT Pulse Keepalive Task"
-
-class PyADTPulse:
+class PyADTPulse(PyADTPulseAsync):
"""Base object for ADT Pulse service."""
- __slots__ = (
- "_pulse_connection",
- "_sync_task",
- "_timeout_task",
- "_authenticated",
- "_updates_exist",
- "_session_thread",
- "_attribute_lock",
- "_last_login_time",
- "_site",
- "_username",
- "_password",
- "_fingerprint",
- "_login_exception",
- "_relogin_interval",
- "_keepalive_interval",
- )
-
- @staticmethod
- def _check_service_host(service_host: str) -> None:
- if service_host is None or service_host == "":
- raise ValueError("Service host is mandatory")
- if service_host not in (DEFAULT_API_HOST, API_HOST_CA):
- raise ValueError(
- "Service host must be one of {DEFAULT_API_HOST}" f" or {API_HOST_CA}"
- )
-
- @staticmethod
- def _check_keepalive_interval(keepalive_interval: int) -> None:
- if keepalive_interval > ADT_MAX_KEEPALIVE_INTERVAL or keepalive_interval <= 0:
- raise ValueError(
- f"keepalive interval ({keepalive_interval}) must be "
- f"greater than 0 and less than {ADT_MAX_KEEPALIVE_INTERVAL}"
- )
-
- @staticmethod
- def _check_relogin_interval(relogin_interval: int) -> None:
- if relogin_interval < ADT_MIN_RELOGIN_INTERVAL:
- raise ValueError(
- f"relogin interval ({relogin_interval}) must be "
- f"greater than {ADT_MIN_RELOGIN_INTERVAL}"
- )
+ __slots__ = ("_session_thread", "_p_attribute_lock", "_login_exception")
def __init__(
self,
@@ -99,381 +31,125 @@ def __init__(
password: str,
fingerprint: str,
service_host: str = DEFAULT_API_HOST,
- user_agent=ADT_DEFAULT_HTTP_HEADERS["User-Agent"],
- websession: Optional[ClientSession] = None,
+ user_agent=ADT_DEFAULT_HTTP_USER_AGENT["User-Agent"],
do_login: bool = True,
debug_locks: bool = False,
- keepalive_interval: Optional[int] = ADT_DEFAULT_KEEPALIVE_INTERVAL,
- relogin_interval: Optional[int] = ADT_DEFAULT_RELOGIN_INTERVAL,
+ keepalive_interval: int = ADT_DEFAULT_KEEPALIVE_INTERVAL,
+ relogin_interval: int = ADT_DEFAULT_RELOGIN_INTERVAL,
+ detailed_debug_logging: bool = False,
):
- """Create a PyADTPulse object.
-
- Args:
- username (str): Username.
- password (str): Password.
- fingerprint (str): 2FA fingerprint.
- service_host (str, optional): host prefix to use
- i.e. https://portal.adtpulse.com or
- https://portal-ca.adtpulse.com
- user_agent (str, optional): User Agent.
- Defaults to ADT_DEFAULT_HTTP_HEADERS["User-Agent"].
- websession (ClientSession, optional): an initialized
- aiohttp.ClientSession to use, defaults to None
- do_login (bool, optional): login synchronously when creating object
- Should be set to False for asynchronous usage
- and async_login() should be called instead
- Setting websession will override this
- and not login
- Defaults to True
- debug_locks: (bool, optional): use debugging locks
- Defaults to False
- keepalive_interval (int, optional): number of minutes between
- keepalive checks, defaults to ADT_DEFAULT_KEEPALIVE_INTERVAL,
- maxiumum is ADT_MAX_KEEPALIVE_INTERVAL
- relogin_interval (int, optional): number of minutes between relogin checks
- defaults to ADT_DEFAULT_RELOGIN_INTERVAL,
- minimum is ADT_MIN_RELOGIN_INTERVAL
- """
- self._check_service_host(service_host)
- self._init_login_info(username, password, fingerprint)
- self._pulse_connection = ADTPulseConnection(
+ self._p_attribute_lock = set_debug_lock(
+ debug_locks, "pyadtpulse._p_attribute_lockattribute_lock"
+ )
+ warn(
+ "PyADTPulse is deprecated, please use PyADTPulseAsync instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ super().__init__(
+ username,
+ password,
+ fingerprint,
service_host,
- session=websession,
- user_agent=user_agent,
- debug_locks=debug_locks,
+ user_agent,
+ debug_locks,
+ keepalive_interval,
+ relogin_interval,
+ detailed_debug_logging,
)
-
- self._sync_task: Optional[asyncio.Task] = None
- self._timeout_task: Optional[asyncio.Task] = None
-
- # FIXME use thread event/condition, regular condition?
- # defer initialization to make sure we have an event loop
- self._authenticated: Optional[asyncio.locks.Event] = None
- self._login_exception: Optional[BaseException] = None
-
- self._updates_exist: Optional[asyncio.locks.Event] = None
-
- self._session_thread: Optional[Thread] = None
- self._attribute_lock: Union[RLock, DebugRLock]
- if not debug_locks:
- self._attribute_lock = RLock()
- else:
- self._attribute_lock = DebugRLock("PyADTPulse._attribute_lock")
- self._last_login_time: int = 0
-
- self._site: Optional[ADTPulseSite] = None
- self.keepalive_interval = keepalive_interval
- self.relogin_interval = relogin_interval
-
- # authenticate the user
- if do_login and websession is None:
+ self._session_thread: Thread | None = None
+ self._login_exception: Exception | None = None
+ if do_login:
self.login()
- def _init_login_info(self, username: str, password: str, fingerprint: str) -> None:
- if username is None or username == "":
- raise ValueError("Username is mandatory")
-
- pattern = r"\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,}\b"
- if not re.match(pattern, username):
- raise ValueError("Username must be an email address")
- self._username = username
-
- if password is None or password == "":
- raise ValueError("Password is mandatory")
- self._password = password
-
- if fingerprint is None or fingerprint == "":
- raise ValueError("Fingerprint is required")
- self._fingerprint = fingerprint
-
def __repr__(self) -> str:
"""Object representation."""
- return f"<{self.__class__.__name__}: {self._username}>"
+ return (
+ f"<{self.__class__.__name__}: {self._authentication_properties.username}>"
+ )
# ADTPulse API endpoint is configurable (besides default US ADT Pulse endpoint) to
# support testing as well as alternative ADT Pulse endpoints such as
# portal-ca.adtpulse.com
- @property
- def service_host(self) -> str:
- """Get the Pulse host.
-
- Returns: (str): the ADT Pulse endpoint host
- """
- return self._pulse_connection.service_host
-
- @service_host.setter
- def service_host(self, host: str) -> None:
- """Override the Pulse host (i.e. to use portal-ca.adpulse.com).
-
- Args:
- host (str): name of Pulse endpoint host
- """
- self._check_service_host(host)
- with self._attribute_lock:
- self._pulse_connection.service_host = host
-
- def set_service_host(self, host: str) -> None:
- """Backward compatibility for service host property setter."""
- self.service_host = host
-
- @property
- def username(self) -> str:
- """Get username.
-
- Returns:
- str: the username
- """
- with self._attribute_lock:
- return self._username
-
- @property
- def version(self) -> str:
- """Get the ADT Pulse site version.
-
- Returns:
- str: a string containing the version
- """
- return self._pulse_connection.api_version
-
- @property
- def relogin_interval(self) -> int:
- """Get re-login interval.
-
- Returns:
- int: number of minutes to re-login to Pulse
- 0 means disabled
- """
- with self._attribute_lock:
- return self._relogin_interval
-
- @relogin_interval.setter
- def relogin_interval(self, interval: Optional[int]) -> None:
- """Set re-login interval.
-
- Args:
- interval (int): The number of minutes between logins.
- If set to None, resets to ADT_DEFAULT_RELOGIN_INTERVAL
-
- Raises:
- ValueError: if a relogin interval of less than 10 minutes
- is specified
- """
- if interval is None:
- interval = ADT_DEFAULT_RELOGIN_INTERVAL
- else:
- self._check_relogin_interval(interval)
- with self._attribute_lock:
- self._relogin_interval = interval
- LOG.debug("relogin interval set to %d", self._relogin_interval)
-
- @property
- def keepalive_interval(self) -> int:
- """Get the keepalive interval in minutes.
-
- Returns:
- int: the keepalive interval
+ def _pulse_session_thread(self) -> None:
"""
- with self._attribute_lock:
- return self._keepalive_interval
+ Pulse the session thread.
- @keepalive_interval.setter
- def keepalive_interval(self, interval: Optional[int]) -> None:
- """Set the keepalive interval in minutes.
-
- If set to None, resets to ADT_DEFAULT_KEEPALIVE_INTERVAL
+ Acquires the attribute lock and creates a background thread for the ADT
+ Pulse API. The thread runs the synchronous loop `_sync_loop()` until completion.
+ Once the loop finishes, the thread is closed, the pulse connection's event loop
+ is set to `None`, and the session thread is set to `None`.
"""
- if interval is None:
- interval = ADT_DEFAULT_KEEPALIVE_INTERVAL
- else:
- self._check_keepalive_interval(interval)
- with self._attribute_lock:
- self._keepalive_interval = interval
- LOG.debug("keepalive interval set to %d", self._keepalive_interval)
-
- async def _update_sites(self, soup: BeautifulSoup) -> None:
- with self._attribute_lock:
- if self._site is None:
- await self._initialize_sites(soup)
- if self._site is None:
- raise RuntimeError("pyadtpulse could not retrieve site")
- self._site.alarm_control_panel._update_alarm_from_soup(soup)
- self._site._update_zone_from_soup(soup)
-
- async def _initialize_sites(self, soup: BeautifulSoup) -> None:
- # typically, ADT Pulse accounts have only a single site (premise/location)
- singlePremise = soup.find("span", {"id": "p_singlePremise"})
- if singlePremise:
- site_name = singlePremise.text
-
- # FIXME: this code works, but it doesn't pass the linter
- signout_link = str(
- soup.find("a", {"class": "p_signoutlink"}).get("href") # type: ignore
- )
- if signout_link:
- m = re.search("networkid=(.+)&", signout_link)
- if m and m.group(1) and m.group(1):
- site_id = m.group(1)
- LOG.debug("Discovered site id %s: %s", site_id, site_name)
- new_site = ADTPulseSite(self._pulse_connection, site_id, site_name)
-
- # fetch zones first, so that we can have the status
- # updated with _update_alarm_status
- if not await new_site._fetch_devices(None):
- LOG.error("Could not fetch zones from ADT site")
- new_site.alarm_control_panel._update_alarm_from_soup(soup)
- if new_site.alarm_control_panel.status == ADT_ALARM_UNKNOWN:
- new_site.gateway.is_online = False
- new_site._update_zone_from_soup(soup)
- with self._attribute_lock:
- self._site = new_site
- return
- else:
- LOG.warning(
- "Couldn't find site id for %s in %s", site_name, signout_link
- )
- else:
- LOG.error("ADT Pulse accounts with MULTIPLE sites not supported!!!")
-
- # ...and current network id from:
- #
- #
- # ... or perhaps better, just extract all from /system/settings.jsp
-
- def _check_retry_after(
- self, response: Optional[ClientResponse], task_name: str
- ) -> int:
- if response is None:
- return 0
- header_value = response.headers.get("Retry-After")
- if header_value is None:
- return 0
- if header_value.isnumeric():
- retval = int(header_value)
- else:
- try:
- retval = (
- datetime.datetime.strptime(header_value, "%a, %d %b %G %T %Z")
- - datetime.datetime.now()
- ).seconds
- except ValueError:
- return 0
- reason = "Unknown"
- if response.status == 429:
- reason = "Too many requests"
- elif response.status == 503:
- reason = "Service unavailable"
- LOG.warning(
- "Task %s received Retry-After %s due to %s", task_name, retval, reason
- )
- return retval
-
- async def _keepalive_task(self) -> None:
- retry_after = 0
- response: ClientResponse | None = None
- if self._timeout_task is not None:
- task_name = self._timeout_task.get_name()
- else:
- task_name = f"{KEEPALIVE_TASK_NAME} - possible internal error"
- LOG.debug("creating %s", task_name)
- with self._attribute_lock:
- if self._authenticated is None:
- raise RuntimeError(
- "Keepalive task is running without an authenticated event"
- )
- while self._authenticated.is_set():
- relogin_interval = self.relogin_interval * 60
- if relogin_interval != 0 and time.time() - self._last_login_time > randint(
- int(0.75 * relogin_interval), relogin_interval
- ):
- LOG.info("Login timeout reached, re-logging in")
- # FIXME?: should we just pause the task?
- with self._attribute_lock:
- if self._sync_task is not None:
- self._sync_task.cancel()
- with suppress(Exception):
- await self._sync_task
- await self._do_logout_query()
- if not await self.async_quick_relogin():
- LOG.error("%s could not re-login, exiting", task_name)
- return
- if self._sync_task is not None:
- coro = self._sync_check_task()
- self._sync_task = asyncio.create_task(
- coro, name=f"{SYNC_CHECK_TASK_NAME}: Async session"
- )
- try:
- await asyncio.sleep(self.keepalive_interval * 60.0 + retry_after)
- LOG.debug("Resetting timeout")
- response = await self._pulse_connection.async_query(
- ADT_TIMEOUT_URI, "POST"
- )
- if not handle_response(
- response, logging.INFO, "Failed resetting ADT Pulse cloud timeout"
- ):
- retry_after = self._check_retry_after(response, "Keepalive task")
- close_response(response)
- continue
- close_response(response)
- if self.site.gateway.next_update < time.time():
- await self.site._set_device(ADT_GATEWAY_STRING)
- except asyncio.CancelledError:
- LOG.debug("%s cancelled", task_name)
- close_response(response)
- return
-
- def _pulse_session_thread(self) -> None:
# lock is released in sync_loop()
- self._attribute_lock.acquire()
+ self._p_attribute_lock.acquire()
LOG.debug("Creating ADT Pulse background thread")
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
loop = asyncio.new_event_loop()
- self._pulse_connection.loop = loop
+ self._pulse_connection_properties.loop = loop
loop.run_until_complete(self._sync_loop())
loop.close()
- self._pulse_connection.loop = None
+ self._pulse_connection_properties.loop = None
self._session_thread = None
async def _sync_loop(self) -> None:
- result = await self.async_login()
- self._attribute_lock.release()
- if result:
- if self._timeout_task is not None:
- task_list = (self._timeout_task,)
- try:
- await asyncio.wait(task_list)
- except asyncio.CancelledError:
- pass
- except Exception as e: # pylint: disable=broad-except
- LOG.exception(
- "Received exception while waiting for ADT Pulse service %s", e
- )
- else:
- # we should never get here
- raise RuntimeError("Background pyadtpulse tasks not created")
- if self._authenticated is not None:
- while self._authenticated.is_set():
- # busy wait until logout is done
- await asyncio.sleep(0.5)
+ """
+ Asynchronous function that represents the main loop of the synchronization
+ process.
+
+ This function is responsible for executing the synchronization logic. It starts
+ by calling the `async_login` method to perform the login operation. After that,
+ it releases the `_p_attribute_lock` to allow other tasks to access the
+ attributes.
+ If the login operation was successful, it waits for the `_timeout_task` to
+ complete using the `asyncio.wait` function. If the `_timeout_task` is not set,
+ it raises a `RuntimeError` to indicate that background tasks were not created.
+
+ After the waiting process, it enters a while loop that continues as long as the
+ `_authenticated` event is set. Inside the loop, it waits for 0.5 seconds using
+ the `asyncio.sleep` function. This wait allows the logout process to complete
+ before continuing with the synchronization logic.
+ """
+ try:
+ await self.async_login()
+ except Exception as e:
+ self._login_exception = e
+ self._p_attribute_lock.release()
+ if self._login_exception is not None:
+ return
+ if self._timeout_task is not None:
+ task_list = (self._timeout_task,)
+ try:
+ await asyncio.wait(task_list)
+ except asyncio.CancelledError:
+ pass
+ except Exception as e: # pylint: disable=broad-except
+ LOG.exception(
+ "Received exception while waiting for ADT Pulse service %s", e
+ )
+ else:
+ # we should never get here
+ raise RuntimeError("Background pyadtpulse tasks not created")
+ while self._pulse_connection_status.authenticated_flag.is_set():
+ # busy wait until logout is done
+ await asyncio.sleep(0.5)
def login(self) -> None:
"""Login to ADT Pulse and generate access token.
Raises:
- AuthenticationException if could not login
+ Exception from async_login
"""
- self._attribute_lock.acquire()
+ self._p_attribute_lock.acquire()
# probably shouldn't be a daemon thread
self._session_thread = thread = Thread(
target=self._pulse_session_thread,
name="PyADTPulse Session",
daemon=True,
)
- self._attribute_lock.release()
+ self._p_attribute_lock.release()
self._session_thread.start()
time.sleep(1)
@@ -481,174 +157,11 @@ def login(self) -> None:
# thread will unlock after async_login, so attempt to obtain
# lock to block current thread until then
# if it's still alive, no exception
- self._attribute_lock.acquire()
- self._attribute_lock.release()
+ self._p_attribute_lock.acquire()
+ self._p_attribute_lock.release()
if not thread.is_alive():
- raise AuthenticationException(self._username)
-
- @property
- def attribute_lock(self) -> Union[RLock, DebugRLock]:
- """Get attribute lock for PyADTPulse object.
-
- Returns:
- RLock: thread Rlock
- """
- return self._attribute_lock
-
- @property
- def loop(self) -> Optional[asyncio.AbstractEventLoop]:
- """Get event loop.
-
- Returns:
- Optional[asyncio.AbstractEventLoop]: the event loop object or
- None if no thread is running
- """
- return self._pulse_connection.loop
-
- async def async_quick_relogin(self) -> bool:
- """Quickly re-login to Pulse.
-
- Doesn't do device queries or set connected event unless a failure occurs.
- FIXME: Should probably just re-work login logic."""
- response = await self._do_login_query()
- if not handle_response(response, logging.ERROR, "Could not re-login to Pulse"):
- await self.async_logout()
- return False
- return True
-
- def quick_relogin(self) -> bool:
- """Perform quick_relogin synchronously."""
- coro = self.async_quick_relogin()
- return asyncio.run_coroutine_threadsafe(
- coro,
- self._pulse_connection.check_sync(
- "Attempting to do call sync quick re-login from async"
- ),
- ).result()
-
- async def _do_login_query(self, timeout: int = 30) -> ClientResponse | None:
- try:
- retval = await self._pulse_connection.async_query(
- ADT_LOGIN_URI,
- method="POST",
- extra_params={
- "partner": "adt",
- "e": "ns",
- "usernameForm": self.username,
- "passwordForm": self._password,
- "fingerprint": self._fingerprint,
- "sun": "yes",
- },
- timeout=timeout,
- )
- except Exception as e: # pylint: disable=broad-except
- LOG.error("Could not log into Pulse site: %s", e)
- return None
- if retval is None:
- LOG.error("Could not log into Pulse site.")
- return None
- if not handle_response(
- retval,
- logging.ERROR,
- "Error encountered communicating with Pulse site on login",
- ):
- close_response(retval)
- return None
- self._last_login_time = int(time.time())
- return retval
-
- async def _do_logout_query(self) -> None:
- params = {}
- network: ADTPulseSite = self.site
- if network is not None:
- params.update({"network": str(network.id)})
- params.update({"partner": "adt"})
- await self._pulse_connection.async_query(
- ADT_LOGOUT_URI, extra_params=params, timeout=10
- )
-
- async def async_login(self) -> bool:
- """Login asynchronously to ADT.
-
- Returns: True if login successful
- """
- if self._authenticated is None:
- self._authenticated = asyncio.locks.Event()
- else:
- self._authenticated.clear()
-
- LOG.debug("Authenticating to ADT Pulse cloud service as %s", self._username)
- await self._pulse_connection.async_fetch_version()
-
- response = await self._do_login_query()
- if response is None:
- return False
- if self._pulse_connection.make_url(ADT_SUMMARY_URI) != str(response.url):
- # more specifically:
- # redirect to signin.jsp = username/password error
- # redirect to mfaSignin.jsp = fingerprint error
- LOG.error("Authentication error encountered logging into ADT Pulse")
- close_response(response)
- return False
-
- soup = await make_soup(
- response, logging.ERROR, "Could not log into ADT Pulse site"
- )
- if soup is None:
- return False
-
- # FIXME: should probably raise exceptions
- error = soup.find("div", {"id": "warnMsgContents"})
- if error:
- LOG.error("Invalid ADT Pulse username/password: %s", error)
- return False
- error = soup.find("div", "responsiveContainer")
- if error:
- LOG.error(
- "2FA authentiation required for ADT pulse username %s: %s",
- self.username,
- error,
- )
- return False
- # need to set authenticated here to prevent login loop
- self._authenticated.set()
- await self._update_sites(soup)
- if self._site is None:
- LOG.error("Could not retrieve any sites, login failed")
- self._authenticated.clear()
- return False
-
- # since we received fresh data on the status of the alarm, go ahead
- # and update the sites with the alarm status.
-
- if self._timeout_task is None:
- self._timeout_task = asyncio.create_task(
- self._keepalive_task(), name=f"{KEEPALIVE_TASK_NAME}"
- )
- if self._updates_exist is None:
- self._updates_exist = asyncio.locks.Event()
- await asyncio.sleep(0)
- return True
-
- async def async_logout(self) -> None:
- """Logout of ADT Pulse async."""
- LOG.info("Logging %s out of ADT Pulse", self._username)
- if self._timeout_task is not None:
- try:
- self._timeout_task.cancel()
- except asyncio.CancelledError:
- LOG.debug("%s successfully cancelled", KEEPALIVE_TASK_NAME)
- await self._timeout_task
- if self._sync_task is not None:
- try:
- self._sync_task.cancel()
- except asyncio.CancelledError:
- LOG.debug("%s successfully cancelled", SYNC_CHECK_TASK_NAME)
- await self._sync_task
- self._timeout_task = self._sync_task = None
- await self._do_logout_query()
- if self._authenticated is not None:
- self._authenticated.clear()
+ if self._login_exception is not None:
+ raise self._login_exception
def logout(self) -> None:
"""Log out of ADT Pulse."""
@@ -662,79 +175,24 @@ def logout(self) -> None:
if sync_thread is not None:
sync_thread.join()
- async def _sync_check_task(self) -> None:
- # this should never be true
- if self._sync_task is not None:
- task_name = self._sync_task.get_name()
- else:
- task_name = f"{SYNC_CHECK_TASK_NAME} - possible internal error"
-
- LOG.debug("creating %s", task_name)
- response = None
- retry_after = 0
- last_sync_text = "0-0-0"
- if self._updates_exist is None:
- raise RuntimeError(f"{task_name} started without update event initialized")
- have_updates = False
- while True:
- try:
- self.site.gateway.adjust_backoff_poll_interval()
- if not have_updates:
- pi = self.site.gateway.poll_interval
- else:
- pi = 0.0
- if retry_after == 0:
- await asyncio.sleep(pi)
- else:
- await asyncio.sleep(retry_after)
- response = await self._pulse_connection.async_query(
- ADT_SYNC_CHECK_URI,
- extra_params={"ts": str(int(time.time() * 1000))},
- )
+ @property
+ def attribute_lock(self) -> "RLock| DebugRLock":
+ """Get attribute lock for PyADTPulse object.
- if response is None:
- continue
- retry_after = self._check_retry_after(response, f"{task_name}")
- if retry_after != 0:
- close_response(response)
- continue
- text = await response.text()
- if not handle_response(
- response, logging.ERROR, "Error querying ADT sync"
- ):
- close_response(response)
- continue
- close_response(response)
- pattern = r"\d+[-]\d+[-]\d+"
- if not re.match(pattern, text):
- LOG.warning(
- "Unexpected sync check format (%s), forcing re-auth", pattern
- )
- LOG.debug("Received %s from ADT Pulse site", text)
- await self._do_logout_query()
- if not await self.async_quick_relogin():
- LOG.error("%s couldn't re-login, exiting.", task_name)
- continue
- if text != last_sync_text:
- LOG.debug("Updates exist: %s, requerying", text)
- last_sync_text = text
- have_updates = True
- continue
- if have_updates:
- have_updates = False
- if await self.async_update() is False:
- LOG.debug("Pulse data update from %s failed", task_name)
- continue
- self._updates_exist.set()
- else:
- LOG.debug(
- "Sync token %s indicates no remote updates to process", text
- )
+ Returns:
+ RLock: thread Rlock
+ """
+ return self._p_attribute_lock
- except asyncio.CancelledError:
- LOG.debug("%s cancelled", task_name)
- close_response(response)
- return
+ @property
+ def loop(self) -> asyncio.AbstractEventLoop | None:
+ """Get event loop.
+
+ Returns:
+ Optional[asyncio.AbstractEventLoop]: the event loop object or
+ None if no thread is running
+ """
+ return self._pulse_connection_properties.loop
@property
def updates_exist(self) -> bool:
@@ -743,9 +201,9 @@ def updates_exist(self) -> bool:
Returns:
bool: True if updated data exists
"""
- with self._attribute_lock:
+ with self._p_attribute_lock:
if self._sync_task is None:
- loop = self._pulse_connection.loop
+ loop = self._pulse_connection_properties.loop
if loop is None:
raise RuntimeError(
"ADT pulse sync function updates_exist() "
@@ -755,64 +213,11 @@ def updates_exist(self) -> bool:
self._sync_task = loop.create_task(
coro, name=f"{SYNC_CHECK_TASK_NAME}: Sync session"
)
- if self._updates_exist is None:
- return False
-
- if self._updates_exist.is_set():
- self._updates_exist.clear()
+ if self._pulse_properties.updates_exist.is_set():
+ self._pulse_properties.updates_exist.clear()
return True
return False
- async def wait_for_update(self) -> None:
- """Wait for update.
-
- Blocks current async task until Pulse system
- signals an update
- """
- with self._attribute_lock:
- if self._sync_task is None:
- coro = self._sync_check_task()
- self._sync_task = asyncio.create_task(
- coro, name=f"{SYNC_CHECK_TASK_NAME}: Async session"
- )
- if self._updates_exist is None:
- raise RuntimeError("Update event does not exist")
-
- await self._updates_exist.wait()
- self._updates_exist.clear()
-
- @property
- def is_connected(self) -> bool:
- """Check if connected to ADT Pulse.
-
- Returns:
- bool: True if connected
- """
- with self._attribute_lock:
- if self._authenticated is None:
- return False
- return self._authenticated.is_set()
-
- # FIXME? might have to move this to site for multiple sites
-
- async def async_update(self) -> bool:
- """Update ADT Pulse data.
-
- Returns:
- bool: True if update succeeded.
- """
- LOG.debug("Checking ADT Pulse cloud service for updates")
-
- # FIXME will have to query other URIs for camera/zwave/etc
- soup = await self._pulse_connection.query_orb(
- logging.INFO, "Error returned from ADT Pulse service check"
- )
- if soup is not None:
- await self._update_sites(soup)
- return True
-
- return False
-
def update(self) -> bool:
"""Update ADT Pulse data.
@@ -827,27 +232,20 @@ def update(self) -> bool:
),
).result()
- @property
- def sites(self) -> List[ADTPulseSite]:
- """Return all sites for this ADT Pulse account."""
- warn(
- "multiple sites being removed, use pyADTPulse.site instead",
- PendingDeprecationWarning,
- stacklevel=2,
+ async def async_login(self) -> None:
+ self._pulse_connection_properties.check_async(
+ "Cannot login asynchronously with a synchronous session"
)
- with self._attribute_lock:
- if self._site is None:
- raise RuntimeError(
- "No sites have been retrieved, have you logged in yet?"
- )
- return [self._site]
+ await super().async_login()
- @property
- def site(self) -> ADTPulseSite:
- """Return the site associated with the Pulse login."""
- with self._attribute_lock:
- if self._site is None:
- raise RuntimeError(
- "No sites have been retrieved, have you logged in yet?"
- )
- return self._site
+ async def async_logout(self) -> None:
+ self._pulse_connection_properties.check_async(
+ "Cannot logout asynchronously with a synchronous session"
+ )
+ await super().async_logout()
+
+ async def async_update(self) -> bool:
+ self._pulse_connection_properties.check_async(
+ "Cannot update asynchronously with a synchronous session"
+ )
+ return await super().async_update()
diff --git a/pyadtpulse/alarm_panel.py b/pyadtpulse/alarm_panel.py
index 2c6a6cc..401c6c8 100644
--- a/pyadtpulse/alarm_panel.py
+++ b/pyadtpulse/alarm_panel.py
@@ -8,9 +8,10 @@
from time import time
from bs4 import BeautifulSoup
+from typeguard import typechecked
from .const import ADT_ARM_DISARM_URI
-from .pulse_connection import ADTPulseConnection
+from .pulse_connection import PulseConnection
from .util import make_soup
LOG = logging.getLogger(__name__)
@@ -21,6 +22,15 @@
ADT_ALARM_ARMING = "arming"
ADT_ALARM_DISARMING = "disarming"
+ALARM_STATUSES = (
+ ADT_ALARM_AWAY,
+ ADT_ALARM_HOME,
+ ADT_ALARM_OFF,
+ ADT_ALARM_UNKNOWN,
+ ADT_ALARM_ARMING,
+ ADT_ALARM_DISARMING,
+)
+
ADT_ARM_DISARM_TIMEOUT: float = 20
@@ -47,6 +57,18 @@ def status(self) -> str:
with self._state_lock:
return self._status
+ @status.setter
+ def status(self, new_status: str) -> None:
+ """Set alarm status.
+
+ Args:
+ new_status (str): the new alarm status
+ """
+ with self._state_lock:
+ if new_status not in ALARM_STATUSES:
+ raise ValueError(f"Alarm status must be one of {ALARM_STATUSES}")
+ self._status = new_status
+
@property
def is_away(self) -> bool:
"""Return wheter the system is armed away.
@@ -117,8 +139,9 @@ def last_update(self) -> float:
with self._state_lock:
return self._last_arm_disarm
+ @typechecked
async def _arm(
- self, connection: ADTPulseConnection, mode: str, force_arm: bool
+ self, connection: PulseConnection, mode: str, force_arm: bool
) -> bool:
"""Set arm status.
@@ -161,8 +184,10 @@ async def _arm(
timeout=10,
)
- soup = await make_soup(
- response,
+ soup = make_soup(
+ response[0],
+ response[1],
+ response[2],
logging.WARNING,
f"Failed updating ADT Pulse alarm {self._sat} to {mode}",
)
@@ -188,9 +213,10 @@ async def _arm(
self._last_arm_disarm = int(time())
return True
+ @typechecked
def _sync_set_alarm_mode(
self,
- connection: ADTPulseConnection,
+ connection: PulseConnection,
mode: str,
force_arm: bool = False,
) -> bool:
@@ -202,7 +228,8 @@ def _sync_set_alarm_mode(
),
).result()
- def arm_away(self, connection: ADTPulseConnection, force_arm: bool = False) -> bool:
+ @typechecked
+ def arm_away(self, connection: PulseConnection, force_arm: bool = False) -> bool:
"""Arm the alarm in Away mode.
Args:
@@ -213,7 +240,8 @@ def arm_away(self, connection: ADTPulseConnection, force_arm: bool = False) -> b
"""
return self._sync_set_alarm_mode(connection, ADT_ALARM_AWAY, force_arm)
- def arm_home(self, connection: ADTPulseConnection, force_arm: bool = False) -> bool:
+ @typechecked
+ def arm_home(self, connection: PulseConnection, force_arm: bool = False) -> bool:
"""Arm the alarm in Home mode.
Args:
@@ -224,7 +252,8 @@ def arm_home(self, connection: ADTPulseConnection, force_arm: bool = False) -> b
"""
return self._sync_set_alarm_mode(connection, ADT_ALARM_HOME, force_arm)
- def disarm(self, connection: ADTPulseConnection) -> bool:
+ @typechecked
+ def disarm(self, connection: PulseConnection) -> bool:
"""Disarm the alarm.
Returns:
@@ -232,8 +261,9 @@ def disarm(self, connection: ADTPulseConnection) -> bool:
"""
return self._sync_set_alarm_mode(connection, ADT_ALARM_OFF, False)
+ @typechecked
async def async_arm_away(
- self, connection: ADTPulseConnection, force_arm: bool = False
+ self, connection: PulseConnection, force_arm: bool = False
) -> bool:
"""Arm alarm away async.
@@ -245,8 +275,9 @@ async def async_arm_away(
"""
return await self._arm(connection, ADT_ALARM_AWAY, force_arm)
+ @typechecked
async def async_arm_home(
- self, connection: ADTPulseConnection, force_arm: bool = False
+ self, connection: PulseConnection, force_arm: bool = False
) -> bool:
"""Arm alarm home async.
@@ -257,7 +288,8 @@ async def async_arm_home(
"""
return await self._arm(connection, ADT_ALARM_HOME, force_arm)
- async def async_disarm(self, connection: ADTPulseConnection) -> bool:
+ @typechecked
+ async def async_disarm(self, connection: PulseConnection) -> bool:
"""Disarm alarm async.
Returns:
@@ -265,30 +297,42 @@ async def async_disarm(self, connection: ADTPulseConnection) -> bool:
"""
return await self._arm(connection, ADT_ALARM_OFF, False)
- def _update_alarm_from_soup(self, summary_html_soup: BeautifulSoup) -> None:
+ @typechecked
+ def update_alarm_from_soup(self, summary_html_soup: BeautifulSoup) -> None:
+ """
+ Updates the alarm status based on the information extracted from the provided
+ HTML soup.
+
+ Args:
+ summary_html_soup (BeautifulSoup): The BeautifulSoup object representing
+ the HTML soup.
+
+ Returns:
+ None: This function does not return anything.
+ """
LOG.debug("Updating alarm status")
value = summary_html_soup.find("span", {"class": "p_boldNormalTextLarge"})
sat_location = "security_button_0"
with self._state_lock:
if value:
- text = value.text
+ text = value.text.lstrip().splitlines()[0]
last_updated = int(time())
- if re.match("Disarmed", text):
+ if text.startswith("Disarmed"):
if (
self._status != ADT_ALARM_ARMING
or last_updated - self._last_arm_disarm > ADT_ARM_DISARM_TIMEOUT
):
self._status = ADT_ALARM_OFF
self._last_arm_disarm = last_updated
- elif re.match("Armed Away", text):
+ elif text.startswith("Armed Away"):
if (
self._status != ADT_ALARM_DISARMING
or last_updated - self._last_arm_disarm > ADT_ARM_DISARM_TIMEOUT
):
self._status = ADT_ALARM_AWAY
self._last_arm_disarm = last_updated
- elif re.match("Armed Stay", text):
+ elif text.startswith("Armed Stay"):
if (
self._status != ADT_ALARM_DISARMING
or last_updated - self._last_arm_disarm > ADT_ARM_DISARM_TIMEOUT
@@ -302,23 +346,23 @@ def _update_alarm_from_soup(self, summary_html_soup: BeautifulSoup) -> None:
return
LOG.debug("Alarm status = %s", self._status)
- if self._sat == "":
- sat_button = summary_html_soup.find(
- "input", {"type": "button", "id": sat_location}
- )
- if sat_button and sat_button.has_attr("onclick"):
- on_click = sat_button["onclick"]
- match = re.search(r"sat=([a-z0-9\-]+)", on_click)
- if match:
- self._sat = match.group(1)
- elif len(self._sat) == 0:
- LOG.warning("No sat recorded and was unable extract sat.")
-
- if len(self._sat) > 0:
- LOG.debug("Extracted sat = %s", self._sat)
- else:
- LOG.warning("Unable to extract sat")
-
+ sat_button = summary_html_soup.find(
+ "input", {"type": "button", "id": sat_location}
+ )
+ if sat_button and sat_button.has_attr("onclick"):
+ on_click = sat_button["onclick"]
+ match = re.search(r"sat=([a-z0-9\-]+)", on_click)
+ if match:
+ self._sat = match.group(1)
+ elif len(self._sat) == 0:
+ LOG.warning("No sat recorded and was unable extract sat.")
+
+ if len(self._sat) > 0:
+ LOG.debug("Extracted sat = %s", self._sat)
+ else:
+ LOG.warning("Unable to extract sat")
+
+ @typechecked
def set_alarm_attributes(self, alarm_attributes: dict[str, str]) -> None:
"""
Set alarm attributes including model, manufacturer, and online status.
diff --git a/pyadtpulse/const.py b/pyadtpulse/const.py
index 22a2a0c..299b3e7 100644
--- a/pyadtpulse/const.py
+++ b/pyadtpulse/const.py
@@ -1,5 +1,7 @@
"""Constants for pyadtpulse."""
-__version__ = "1.1.5"
+
+__version__ = "1.2.0"
+
DEFAULT_API_HOST = "https://portal.adtpulse.com"
API_HOST_CA = "https://portal-ca.adtpulse.com" # Canada
@@ -7,6 +9,7 @@
ADT_LOGIN_URI = "/access/signin.jsp"
ADT_LOGOUT_URI = "/access/signout.jsp"
+ADT_MFA_FAIL_URI = "/mfa/mfaSignIn.jsp?workflow=challenge"
ADT_SUMMARY_URI = "/summary/summary.jsp"
ADT_ZONES_URI = "/ajax/homeViewDevAjax.jsp"
@@ -14,6 +17,7 @@
ADT_SYSTEM_URI = "/system/system.jsp"
ADT_DEVICE_URI = "/system/device.jsp"
ADT_STATES_URI = "/ajax/currentStates.jsp"
+ADT_GATEWAY_URI = "/system/gateway.jsp"
ADT_SYNC_CHECK_URI = "/Ajax/SyncCheckServ"
ADT_TIMEOUT_URI = "/KeepAlive"
# Intervals are all in minutes
@@ -26,24 +30,36 @@
# ADT sets their keepalive to 1 second, so poll a little more often
# than that
ADT_DEFAULT_POLL_INTERVAL = 2.0
-ADT_GATEWAY_OFFLINE_POLL_INTERVAL = 90.0
-ADT_DEFAULT_HTTP_HEADERS = {
+ADT_GATEWAY_MAX_OFFLINE_POLL_INTERVAL = 600.0
+ADT_MAX_BACKOFF: float = 15.0 * 60.0
+ADT_DEFAULT_HTTP_USER_AGENT = {
"User-Agent": (
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
"AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/100.0.4896.127 Safari/537.36 Edg/100.0.1185.44"
- ),
- "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
+ )
}
+ADT_DEFAULT_HTTP_ACCEPT_HEADERS = {
+ "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,"
+ "image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7"
+}
+ADT_DEFAULT_SEC_FETCH_HEADERS = {
+ "Sec-Fetch-User": "?1",
+ "Sec-Ch-Ua-Mobile": "?0",
+ "Sec-Fetch-Site": "same-origin",
+ "Sec-Fetch-Mode": "navigate",
+ "Upgrade-Insecure-Requests": "1",
+}
+ADT_OTHER_HTTP_ACCEPT_HEADERS = {
+ "Accept": "*/*",
+}
ADT_ARM_URI = "/quickcontrol/serv/RunRRACommand"
ADT_ARM_DISARM_URI = "/quickcontrol/armDisarm.jsp"
ADT_SYSTEM_SETTINGS = "/system/settings.jsp"
-ADT_DEFAULT_VERSION = "24.0.0-117"
-
-ADT_HTTP_REFERER_URIS = (ADT_LOGIN_URI, ADT_DEVICE_URI, ADT_SUMMARY_URI, ADT_SYSTEM_URI)
+ADT_HTTP_BACKGROUND_URIS = (ADT_ORB_URI, ADT_SYNC_CHECK_URI)
STATE_OK = "OK"
STATE_OPEN = "Open"
STATE_MOTION = "Motion"
@@ -58,3 +74,5 @@
ADT_SENSOR_SMOKE = "smoke"
ADT_SENSOR_CO = "co"
ADT_SENSOR_ALARM = "alarm"
+
+ADT_DEFAULT_LOGIN_TIMEOUT = 30
diff --git a/pyadtpulse/exceptions.py b/pyadtpulse/exceptions.py
new file mode 100644
index 0000000..aef984d
--- /dev/null
+++ b/pyadtpulse/exceptions.py
@@ -0,0 +1,144 @@
+"""Pulse exceptions."""
+
+import datetime
+from time import time
+
+from .pulse_backoff import PulseBackoff
+
+
+def compute_retry_time(retry_time: float | None) -> str:
+ """Compute the retry time."""
+ if not retry_time:
+ return "indefinitely"
+ return str(datetime.datetime.fromtimestamp(retry_time))
+
+
+class PulseExceptionWithBackoff(Exception):
+ """Exception with backoff."""
+
+ def __init__(self, message: str, backoff: PulseBackoff):
+ """Initialize exception."""
+ super().__init__(message)
+ self.backoff = backoff
+ self.backoff.increment_backoff()
+
+ def __str__(self):
+ """Return a string representation of the exception."""
+ return f"{self.__class__.__name__}: {self.args[0]}"
+
+ def __repr__(self):
+ """Return a string representation of the exception."""
+ return f"{self.__class__.__name__}(message='{self.args[0]}', backoff={self.backoff})"
+
+
+class PulseExceptionWithRetry(PulseExceptionWithBackoff):
+ """Exception with backoff
+
+ If retry_time is None, or is in the past, then just the backoff count will be incremented.
+ """
+
+ def __init__(self, message: str, backoff: PulseBackoff, retry_time: float | None):
+ """Initialize exception."""
+ # super.__init__ will increment the backoff count
+ super().__init__(message, backoff)
+ self.retry_time = retry_time
+ if retry_time and retry_time > time():
+ # set the absolute backoff time will remove the backoff count
+ self.backoff.set_absolute_backoff_time(retry_time)
+ return
+
+ def __str__(self):
+ """Return a string representation of the exception."""
+ return f"{self.__class__.__name__}: {self.args[0]}"
+
+ def __repr__(self):
+ """Return a string representation of the exception."""
+ return f"{self.__class__.__name__}(message='{self.args[0]}', backoff={self.backoff}, retry_time={self.retry_time})"
+
+
+class PulseConnectionError(Exception):
+ """Base class for connection errors"""
+
+
+class PulseServerConnectionError(PulseExceptionWithBackoff, PulseConnectionError):
+ """Server error."""
+
+ def __init__(self, message: str, backoff: PulseBackoff):
+ """Initialize Pulse server error exception."""
+ super().__init__(f"Pulse server error: {message}", backoff)
+
+
+class PulseClientConnectionError(PulseExceptionWithBackoff, PulseConnectionError):
+ """Client error."""
+
+ def __init__(self, message: str, backoff: PulseBackoff):
+ """Initialize Pulse client error exception."""
+ super().__init__(f"Client error connecting to Pulse: {message}", backoff)
+
+
+class PulseServiceTemporarilyUnavailableError(
+ PulseExceptionWithRetry, PulseConnectionError
+):
+ """Service temporarily unavailable error.
+
+ For HTTP 503 and 429 errors.
+ """
+
+ def __init__(self, backoff: PulseBackoff, retry_time: float | None = None):
+ """Initialize Pusle service temporarily unavailable error exception."""
+ super().__init__(
+ f"Pulse service temporarily unavailable until {compute_retry_time(retry_time)}",
+ backoff,
+ retry_time,
+ )
+
+
+class PulseLoginException(Exception):
+ """Login exceptions.
+
+ Base class for catching all login exceptions."""
+
+
+class PulseAuthenticationError(PulseLoginException):
+ """Authentication error."""
+
+ def __init__(self):
+ """Initialize Pulse Authentication error exception."""
+ super().__init__("Error authenticating to Pulse")
+
+
+class PulseAccountLockedError(PulseExceptionWithRetry, PulseLoginException):
+ """Account locked error."""
+
+ def __init__(self, backoff: PulseBackoff, retry: float):
+ """Initialize Pulse Account locked error exception."""
+ super().__init__(
+ f"Pulse Account is locked until {compute_retry_time(retry)}", backoff, retry
+ )
+
+
+class PulseGatewayOfflineError(PulseExceptionWithBackoff):
+ """Gateway offline error."""
+
+ def __init__(self, backoff: PulseBackoff):
+ """Initialize Pulse Gateway offline error exception."""
+ super().__init__("Gateway is offline", backoff)
+
+
+class PulseMFARequiredError(PulseLoginException):
+ """MFA required error."""
+
+ def __init__(self):
+ """Initialize Pulse MFA required error exception."""
+ super().__init__("Authentication failed because MFA is required")
+
+
+class PulseNotLoggedInError(PulseLoginException):
+ """Exception to indicate that the application code is not logged in.
+
+ Used for signalling waiters.
+ """
+
+ def __init__(self):
+ """Initialize Pulse Not logged in error exception."""
+ super().__init__("Not logged into Pulse")
diff --git a/pyadtpulse/gateway.py b/pyadtpulse/gateway.py
index dee0393..cb8c67b 100644
--- a/pyadtpulse/gateway.py
+++ b/pyadtpulse/gateway.py
@@ -1,12 +1,16 @@
"""ADT Pulse Gateway Dataclass."""
import logging
+import re
from dataclasses import dataclass
from ipaddress import IPv4Address, IPv6Address, ip_address
from threading import RLock
-from typing import Any, Optional
+from typing import Any
-from .const import ADT_DEFAULT_POLL_INTERVAL, ADT_GATEWAY_OFFLINE_POLL_INTERVAL
+from typeguard import typechecked
+
+from .const import ADT_DEFAULT_POLL_INTERVAL, ADT_GATEWAY_MAX_OFFLINE_POLL_INTERVAL
+from .pulse_backoff import PulseBackoff
from .util import parse_pulse_datetime
LOG = logging.getLogger(__name__)
@@ -41,25 +45,26 @@ class ADTPulseGateway:
manufacturer: str = "Unknown"
_status_text: str = "OFFLINE"
- _current_poll_interval: float = ADT_DEFAULT_POLL_INTERVAL
- _initial_poll_interval: float = ADT_DEFAULT_POLL_INTERVAL
+ backoff = PulseBackoff(
+ "Gateway", ADT_DEFAULT_POLL_INTERVAL, ADT_GATEWAY_MAX_OFFLINE_POLL_INTERVAL
+ )
_attribute_lock = RLock()
- model: Optional[str] = None
- serial_number: Optional[str] = None
+ model: str | None = None
+ serial_number: str | None = None
next_update: int = 0
last_update: int = 0
- firmware_version: Optional[str] = None
- hardware_version: Optional[str] = None
- primary_connection_type: Optional[str] = None
- broadband_connection_status: Optional[str] = None
- cellular_connection_status: Optional[str] = None
- cellular_connection_signal_strength: float = 0.0
- broadband_lan_ip_address: Optional[IPv4Address | IPv6Address] = None
- broadband_lan_mac: Optional[str] = None
- device_lan_ip_address: Optional[IPv4Address | IPv6Address] = None
- device_lan_mac: Optional[str] = None
- router_lan_ip_address: Optional[IPv4Address | IPv6Address] = None
- router_wan_ip_address: Optional[IPv4Address | IPv6Address] = None
+ firmware_version: str | None = None
+ hardware_version: str | None = None
+ primary_connection_type: str | None = None
+ broadband_connection_status: str | None = None
+ cellular_connection_status: str | None = None
+ _cellular_connection_signal_strength: float = 0.0
+ broadband_lan_ip_address: IPv4Address | IPv6Address | None = None
+ _broadband_lan_mac: str | None = None
+ device_lan_ip_address: IPv4Address | IPv6Address | None = None
+ _device_lan_mac: str | None = None
+ router_lan_ip_address: IPv4Address | IPv6Address | None = None
+ router_wan_ip_address: IPv4Address | IPv6Address | None = None
@property
def is_online(self) -> bool:
@@ -72,87 +77,100 @@ def is_online(self) -> bool:
return self._status_text == "ONLINE"
@is_online.setter
+ @typechecked
def is_online(self, status: bool) -> None:
"""Set gateway status.
Args:
status (bool): True if gateway is online
-
- Also changes the polling intervals
"""
with self._attribute_lock:
if status == self.is_online:
return
-
+ old_status = self._status_text
self._status_text = "ONLINE"
if not status:
self._status_text = "OFFLINE"
- self._current_poll_interval = ADT_GATEWAY_OFFLINE_POLL_INTERVAL
- else:
- self._current_poll_interval = self._initial_poll_interval
LOG.info(
- "ADT Pulse gateway %s, poll interval=%f",
+ "ADT Pulse gateway %s",
self._status_text,
- self._current_poll_interval,
+ )
+ if old_status == "OFFLINE":
+ self.backoff.reset_backoff()
+ LOG.debug(
+ "Gateway poll interval: %d",
+ (
+ self.backoff.initial_backoff_interval
+ if self._status_text == "ONLINE"
+ else self.backoff.get_current_backoff_interval()
+ ),
)
@property
def poll_interval(self) -> float:
- """Set polling interval.
-
- Returns:
- float: number of seconds between polls
- """
+ """Get initial poll interval."""
with self._attribute_lock:
- return self._current_poll_interval
+ return self.backoff.initial_backoff_interval
@poll_interval.setter
- def poll_interval(self, new_interval: Optional[float]) -> None:
- """Set polling interval.
-
- Args:
- new_interval (float): polling interval if gateway is online,
- if set to None, resets to ADT_DEFAULT_POLL_INTERVAL
-
- Raises:
- ValueError: if new_interval is less than 0
- """
- if new_interval is None:
- new_interval = ADT_DEFAULT_POLL_INTERVAL
- elif new_interval < 0.0:
- raise ValueError("ADT Pulse polling interval must be greater than 0")
+ @typechecked
+ def poll_interval(self, new_interval: float) -> None:
with self._attribute_lock:
- self._initial_poll_interval = new_interval
- if self._current_poll_interval != ADT_GATEWAY_OFFLINE_POLL_INTERVAL:
- self._current_poll_interval = new_interval
- LOG.debug("Set poll interval to %f", self._initial_poll_interval)
+ self.backoff.initial_backoff_interval = new_interval
- def adjust_backoff_poll_interval(self) -> None:
- """Calculates the backoff poll interval.
+ @staticmethod
+ def _check_mac_address(mac_address: str) -> bool:
+ pattern = r"^([0-9A-Fa-f]{2}[:-]){5}([0-9A-Fa-f]{2})$"
+ return re.match(pattern, mac_address) is not None
- Each call will adjust current_poll interval with exponential backoff,
- unless gateway is online, in which case, poll interval will be reset to
- initial_poll interval."""
+ @property
+ def broadband_lan_mac(self) -> str | None:
+ """Get current gateway MAC address."""
+ return self._broadband_lan_mac
+
+ @broadband_lan_mac.setter
+ @typechecked
+ def broadband_lan_mac(self, new_mac: str | None) -> None:
+ """Set gateway MAC address."""
+ if new_mac is not None and not self._check_mac_address(new_mac):
+ raise ValueError("Invalid MAC address")
+ self._broadband_lan_mac = new_mac
- with self._attribute_lock:
- if self.is_online:
- self._current_poll_interval = self._initial_poll_interval
- return
- # use an exponential backoff
- self._current_poll_interval = self._current_poll_interval * 2
- if self._current_poll_interval > ADT_GATEWAY_OFFLINE_POLL_INTERVAL:
- self._current_poll_interval = ADT_DEFAULT_POLL_INTERVAL
- LOG.debug(
- "Setting current poll interval to %f", self._current_poll_interval
- )
+ @property
+ def device_lan_mac(self) -> str | None:
+ """Get current gateway MAC address."""
+ return self._device_lan_mac
+
+ @device_lan_mac.setter
+ @typechecked
+ def device_lan_mac(self, new_mac: str | None) -> None:
+ """Set gateway MAC address."""
+ if new_mac is not None and not self._check_mac_address(new_mac):
+ raise ValueError("Invalid MAC address")
+ self._device_lan_mac = new_mac
+
+ @property
+ def cellular_connection_signal_strength(self) -> float:
+ """Get current gateway MAC address."""
+ return self._cellular_connection_signal_strength
+
+ @cellular_connection_signal_strength.setter
+ @typechecked
+ def cellular_connection_signal_strength(
+ self, new_signal_strength: float | None
+ ) -> None:
+ """Set gateway MAC address."""
+ if not new_signal_strength:
+ new_signal_strength = 0.0
+ self._cellular_connection_signal_strength = new_signal_strength
def set_gateway_attributes(self, gateway_attributes: dict[str, str]) -> None:
"""Set gateway attributes from dictionary.
Args:
gateway_attributes (dict[str,str]): dictionary of gateway attributes
- """ """"""
+ """
for i in (
STRING_UPDATEABLE_FIELDS
+ IPADDR_UPDATEABLE_FIELDS
@@ -174,4 +192,5 @@ def set_gateway_attributes(self, gateway_attributes: dict[str, str]) -> None:
temp = int(parse_pulse_datetime(temp).timestamp())
except ValueError:
temp = None
- setattr(self, i, temp)
+ if hasattr(self, i):
+ setattr(self, i, temp)
diff --git a/pyadtpulse/pulse_authentication_properties.py b/pyadtpulse/pulse_authentication_properties.py
new file mode 100644
index 0000000..5d0f98f
--- /dev/null
+++ b/pyadtpulse/pulse_authentication_properties.py
@@ -0,0 +1,135 @@
+"""Pulse Authentication Properties."""
+
+from re import match
+
+from typeguard import typechecked
+
+from .util import set_debug_lock
+
+
+class PulseAuthenticationProperties:
+ """Pulse Authentication Properties."""
+
+ __slots__ = (
+ "_username",
+ "_password",
+ "_fingerprint",
+ "_paa_attribute_lock",
+ "_last_login_time",
+ "_site_id",
+ )
+
+ @staticmethod
+ def check_username(username: str) -> None:
+ """Check if username is valid.
+
+ Raises ValueError if a login parameter is not valid."""
+ if not username:
+ raise ValueError("Username is mandatory")
+ pattern = r"\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,}\b"
+ if not match(pattern, username):
+ raise ValueError("Username must be an email address")
+
+ @staticmethod
+ @typechecked
+ def check_password(password: str) -> None:
+ """Check if password is valid.
+
+ Raises ValueError if password is not valid.
+ """
+ if not password:
+ raise ValueError("Password is mandatory")
+
+ @staticmethod
+ @typechecked
+ def check_fingerprint(fingerprint: str) -> None:
+ """Check if fingerprint is valid.
+
+ Raises ValueError if password is not valid.
+ """
+ if not fingerprint:
+ raise ValueError("Fingerprint is required")
+
+ @typechecked
+ def __init__(
+ self,
+ username: str,
+ password: str,
+ fingerprint: str,
+ debug_locks: bool = False,
+ ) -> None:
+ """Initialize Pulse Authentication Properties."""
+ self.check_username(username)
+ self.check_password(password)
+ self.check_fingerprint(fingerprint)
+ self._username = username
+ self._password = password
+ self._fingerprint = fingerprint
+ self._paa_attribute_lock = set_debug_lock(
+ debug_locks, "pyadtpulse.paa_attribute_lock"
+ )
+ self._last_login_time = 0
+ self._site_id = ""
+
+ @property
+ def last_login_time(self) -> int:
+ """Get the last login time."""
+ with self._paa_attribute_lock:
+ return self._last_login_time
+
+ @last_login_time.setter
+ @typechecked
+ def last_login_time(self, login_time: int) -> None:
+ with self._paa_attribute_lock:
+ self._last_login_time = login_time
+
+ @property
+ def username(self) -> str:
+ """Get the username."""
+ with self._paa_attribute_lock:
+ return self._username
+
+ @username.setter
+ @typechecked
+ def username(self, username: str) -> None:
+ self.check_username(username)
+ with self._paa_attribute_lock:
+ self._username = username
+
+ @property
+ def password(self) -> str:
+ """Get the password."""
+ with self._paa_attribute_lock:
+ return self._password
+
+ @password.setter
+ @typechecked
+ def password(self, password: str) -> None:
+ self.check_password(password)
+ with self._paa_attribute_lock:
+ self._password = password
+
+ @property
+ def fingerprint(self) -> str:
+ """Get the fingerprint."""
+ with self._paa_attribute_lock:
+ return self._fingerprint
+
+ @fingerprint.setter
+ @typechecked
+ def fingerprint(self, fingerprint: str) -> None:
+ self.check_fingerprint(fingerprint)
+ with self._paa_attribute_lock:
+ self._fingerprint = fingerprint
+
+ @property
+ def site_id(self) -> str:
+ """Get the site ID."""
+ with self._paa_attribute_lock:
+ return self._site_id
+
+ @site_id.setter
+ @typechecked
+ def site_id(self, site_id: str) -> None:
+ with self._paa_attribute_lock:
+ self._site_id = site_id
diff --git a/pyadtpulse/pulse_backoff.py b/pyadtpulse/pulse_backoff.py
new file mode 100644
index 0000000..9c3278c
--- /dev/null
+++ b/pyadtpulse/pulse_backoff.py
@@ -0,0 +1,192 @@
+"""Pulse backoff object."""
+
+import asyncio
+import datetime
+from logging import getLogger
+from time import time
+
+from typeguard import typechecked
+
+from .const import ADT_MAX_BACKOFF
+from .util import set_debug_lock
+
+LOG = getLogger(__name__)
+
+
+class PulseBackoff:
+ """Pulse backoff object."""
+
+ __slots__ = (
+ "_b_lock",
+ "_initial_backoff_interval",
+ "_max_backoff_interval",
+ "_backoff_count",
+ "_expiration_time",
+ "_name",
+ "_detailed_debug_logging",
+ "_threshold",
+ )
+
+ @typechecked
+ def __init__(
+ self,
+ name: str,
+ initial_backoff_interval: float,
+ max_backoff_interval: float = ADT_MAX_BACKOFF,
+ threshold: int = 0,
+ debug_locks: bool = False,
+ detailed_debug_logging=False,
+ ) -> None:
+ """Initialize backoff.
+
+ Args:
+ name (str): Name of the backoff.
+ initial_backoff_interval (float): Initial backoff interval in seconds.
+ max_backoff_interval (float, optional): Maximum backoff interval in seconds.
+ Defaults to ADT_MAX_BACKOFF.
+ threshold (int, optional): Threshold for backoff. Defaults to 0.
+ debug_locks (bool, optional): Enable debug locks. Defaults to False.
+ detailed_debug_logging (bool, optional): Enable detailed debug logging.
+ Defaults to False.
+ """
+ self._check_intervals(initial_backoff_interval, max_backoff_interval)
+ self._b_lock = set_debug_lock(debug_locks, "pyadtpulse._b_lock")
+ self._initial_backoff_interval = initial_backoff_interval
+ self._max_backoff_interval = max_backoff_interval
+ self._backoff_count = 0
+ self._expiration_time = 0.0
+ self._name = name
+ self._detailed_debug_logging = detailed_debug_logging
+ self._threshold = threshold
+
+ def _calculate_backoff_interval(self) -> float:
+ """Calculate backoff time."""
+ if self._backoff_count == 0:
+ return 0.0
+ if self._backoff_count <= (self._threshold + 1):
+ return self._initial_backoff_interval
+ return min(
+ self._initial_backoff_interval
+ * 2 ** (self._backoff_count - self._threshold - 1),
+ self._max_backoff_interval,
+ )
+
+ @staticmethod
+ def _check_intervals(
+ initial_backoff_interval: float, max_backoff_interval: float
+ ) -> None:
+ """Check max_backoff_interval is >= initial_backoff_interval
+ and that both invervals are positive."""
+ if initial_backoff_interval <= 0:
+ raise ValueError("initial_backoff_interval must be greater than 0")
+ if max_backoff_interval < initial_backoff_interval:
+ raise ValueError("max_backoff_interval must be >= initial_backoff_interval")
+
+ def get_current_backoff_interval(self) -> float:
+ """Return current backoff time."""
+ with self._b_lock:
+ return self._calculate_backoff_interval()
+
+ def increment_backoff(self) -> None:
+ """Increment backoff."""
+ with self._b_lock:
+ self._backoff_count += 1
+ if self._detailed_debug_logging:
+ LOG.debug(
+ "Pulse backoff %s: incremented to %s",
+ self._name,
+ self._backoff_count,
+ )
+
+ def reset_backoff(self) -> None:
+ """Reset backoff."""
+ with self._b_lock:
+ if self._expiration_time < time():
+ if self._detailed_debug_logging and self._backoff_count != 0:
+ LOG.debug("Pulse backoff %s reset", self._name)
+ self._backoff_count = 0
+ self._expiration_time = 0.0
+
+ @typechecked
+ def set_absolute_backoff_time(self, backoff_time: float) -> None:
+ """Set absolute backoff time."""
+ curr_time = time()
+ if backoff_time < curr_time:
+ raise ValueError("Absolute backoff time must be greater than current time")
+ with self._b_lock:
+ if self._detailed_debug_logging:
+ LOG.debug(
+ "Pulse backoff %s: set to %s",
+ self._name,
+ datetime.datetime.fromtimestamp(backoff_time).strftime(
+ "%m/%d/%Y %H:%M:%S"
+ ),
+ )
+ self._expiration_time = backoff_time
+ self._backoff_count = 0
+
+ async def wait_for_backoff(self) -> None:
+ """Wait for backoff."""
+ with self._b_lock:
+ curr_time = time()
+ if self._expiration_time < curr_time:
+ if self.backoff_count == 0:
+ return
+ diff = self._calculate_backoff_interval()
+ else:
+ diff = self._expiration_time - curr_time
+ if diff > 0:
+ if self._detailed_debug_logging:
+ LOG.debug("Backoff %s: waiting for %s", self._name, diff)
+ await asyncio.sleep(diff)
+
+ def will_backoff(self) -> bool:
+ """Return if backoff is needed."""
+ with self._b_lock:
+ return (
+ self._backoff_count > self._threshold or self._expiration_time >= time()
+ )
+
+ @property
+ def backoff_count(self) -> int:
+ """Return backoff count."""
+ with self._b_lock:
+ return self._backoff_count
+
+ @property
+ def expiration_time(self) -> float:
+ """Return backoff expiration time."""
+ with self._b_lock:
+ return self._expiration_time
+
+ @property
+ def initial_backoff_interval(self) -> float:
+ """Return initial backoff interval."""
+ with self._b_lock:
+ return self._initial_backoff_interval
+
+ @initial_backoff_interval.setter
+ @typechecked
+ def initial_backoff_interval(self, new_interval: float) -> None:
+ """Set initial backoff interval."""
+ with self._b_lock:
+ self._check_intervals(new_interval, self._max_backoff_interval)
+ self._initial_backoff_interval = new_interval
+
+ @property
+ def name(self) -> str:
+ """Return name."""
+ return self._name
+
+ @property
+ def detailed_debug_logging(self) -> bool:
+ """Return detailed debug logging."""
+ with self._b_lock:
+ return self._detailed_debug_logging
+
+ @detailed_debug_logging.setter
+ @typechecked
+ def detailed_debug_logging(self, new_value: bool) -> None:
+ """Set detailed debug logging."""
+ with self._b_lock:
+ self._detailed_debug_logging = new_value
diff --git a/pyadtpulse/pulse_connection.py b/pyadtpulse/pulse_connection.py
index 7eb950f..7863e50 100644
--- a/pyadtpulse/pulse_connection.py
+++ b/pyadtpulse/pulse_connection.py
@@ -1,330 +1,327 @@
-"""ADT Pulse connection. End users should probably not call this directly."""
+"""ADT Pulse connection. End users should probably not call this directly.
+
+This is the main interface to the http functions to access ADT Pulse.
+"""
import logging
-import asyncio
import re
-from random import uniform
-from threading import Lock, RLock
-from typing import Dict, Optional, Union
-
-from aiohttp import (
- ClientConnectionError,
- ClientConnectorError,
- ClientResponse,
- ClientResponseError,
- ClientSession,
-)
+from asyncio import AbstractEventLoop
+from time import time
+
from bs4 import BeautifulSoup
+from typeguard import typechecked
+from yarl import URL
from .const import (
- ADT_DEFAULT_HTTP_HEADERS,
- ADT_DEFAULT_VERSION,
- ADT_DEVICE_URI,
- ADT_HTTP_REFERER_URIS,
+ ADT_DEFAULT_LOGIN_TIMEOUT,
ADT_LOGIN_URI,
- ADT_ORB_URI,
- ADT_SYSTEM_URI,
- API_PREFIX,
+ ADT_LOGOUT_URI,
+ ADT_MFA_FAIL_URI,
+ ADT_SUMMARY_URI,
)
-from .util import DebugRLock, close_response, make_soup
+from .exceptions import (
+ PulseAccountLockedError,
+ PulseAuthenticationError,
+ PulseClientConnectionError,
+ PulseMFARequiredError,
+ PulseNotLoggedInError,
+ PulseServerConnectionError,
+ PulseServiceTemporarilyUnavailableError,
+)
+from .pulse_authentication_properties import PulseAuthenticationProperties
+from .pulse_backoff import PulseBackoff
+from .pulse_connection_properties import PulseConnectionProperties
+from .pulse_connection_status import PulseConnectionStatus
+from .pulse_query_manager import PulseQueryManager
+from .util import make_soup, set_debug_lock
-RECOVERABLE_ERRORS = [429, 500, 502, 503, 504]
LOG = logging.getLogger(__name__)
-class ADTPulseConnection:
- """ADT Pulse connection related attributes."""
+SESSION_COOKIES = {"X-mobile-browser": "false", "ICLocal": "en_US"}
- _api_version = ADT_DEFAULT_VERSION
- _class_threadlock = Lock()
+
+class PulseConnection(PulseQueryManager):
+ """ADT Pulse connection related attributes."""
__slots__ = (
- "_api_host",
- "_allocated_session",
- "_session",
- "_attribute_lock",
- "_loop",
+ "_pc_attribute_lock",
+ "_authentication_properties",
+ "_login_backoff",
+ "_login_in_progress",
)
+ @typechecked
def __init__(
self,
- host: str,
- session: Optional[ClientSession] = None,
- user_agent: str = ADT_DEFAULT_HTTP_HEADERS["User-Agent"],
+ pulse_connection_status: PulseConnectionStatus,
+ pulse_connection_properties: PulseConnectionProperties,
+ pulse_authentication: PulseAuthenticationProperties,
debug_locks: bool = False,
):
"""Initialize ADT Pulse connection."""
- self._api_host = host
- self._allocated_session = False
- if session is None:
- self._allocated_session = True
- self._session = ClientSession()
- else:
- self._session = session
- self._session.headers.update({"User-Agent": user_agent})
- self._attribute_lock: Union[RLock, DebugRLock]
- if not debug_locks:
- self._attribute_lock = RLock()
- else:
- self._attribute_lock = DebugRLock("ADTPulseConnection._attribute_lock")
- self._loop: Optional[asyncio.AbstractEventLoop] = None
-
- def __del__(self):
- """Destructor for ADTPulseConnection."""
- if self._allocated_session and self._session is not None:
- self._session.detach()
- @property
- def api_version(self) -> str:
- """Get the API version."""
- with self._class_threadlock:
- return self._api_version
+ # need to initialize this after the session since we set cookies
+ # based on it
+ super().__init__(
+ pulse_connection_status,
+ pulse_connection_properties,
+ debug_locks,
+ )
+ self._pc_attribute_lock = set_debug_lock(
+ debug_locks, "pyadtpulse.pc_attribute_lock"
+ )
+ self._connection_properties = pulse_connection_properties
+ self._connection_status = pulse_connection_status
+ self._authentication_properties = pulse_authentication
+ self._login_backoff = PulseBackoff(
+ "Login",
+ pulse_connection_status._backoff.initial_backoff_interval,
+ detailed_debug_logging=self._connection_properties.detailed_debug_logging,
+ )
+ self._login_in_progress = False
+ self._debug_locks = debug_locks
- @property
- def service_host(self) -> str:
- """Get the host prefix for connections."""
- with self._attribute_lock:
- return self._api_host
-
- @service_host.setter
- def service_host(self, host: str) -> None:
- """Set the host prefix for connections."""
- with self._attribute_lock:
- self._session.headers.update({"Host": host})
- self._api_host = host
+ @typechecked
+ def check_login_errors(
+ self, response: tuple[int, str | None, URL | None]
+ ) -> BeautifulSoup:
+ """Check response for login errors.
- @property
- def loop(self) -> Optional[asyncio.AbstractEventLoop]:
- """Get the event loop."""
- with self._attribute_lock:
- return self._loop
-
- @loop.setter
- def loop(self, loop: Optional[asyncio.AbstractEventLoop]) -> None:
- """Set the event loop."""
- with self._attribute_lock:
- self._loop = loop
-
- def check_sync(self, message: str) -> asyncio.AbstractEventLoop:
- """Checks if sync login was performed.
-
- Returns the loop to use for run_coroutine_threadsafe if so.
- Raises RuntimeError with given message if not."""
- with self._attribute_lock:
- if self._loop is None:
- raise RuntimeError(message)
- return self._loop
-
- async def async_query(
- self,
- uri: str,
- method: str = "GET",
- extra_params: Optional[Dict[str, str]] = None,
- extra_headers: Optional[Dict[str, str]] = None,
- timeout=1,
- ) -> Optional[ClientResponse]:
- """Query ADT Pulse async.
+ Will handle setting backoffs and raising exceptions.
Args:
- uri (str): URI to query
- method (str, optional): method to use. Defaults to "GET".
- extra_params (Optional[Dict], optional): query parameters. Defaults to None.
- extra_headers (Optional[Dict], optional): extra HTTP headers.
- Defaults to None.
- timeout (int, optional): timeout in seconds. Defaults to 1.
+ response (tuple[int, str | None, URL | None]): The response
Returns:
- Optional[ClientResponse]: aiohttp.ClientResponse object
- None on failure
- ClientResponse will already be closed.
+ BeautifulSoup: The parsed response
+
+ Raises:
+ PulseAuthenticationError: if login fails due to incorrect username/password
+ PulseServerConnectionError: if login fails due to server error
+ PulseAccountLockedError: if login fails due to account locked
+ PulseMFARequiredError: if login fails due to MFA required
+ PulseNotLoggedInError: if login fails due to not logged in
"""
- response = None
- with ADTPulseConnection._class_threadlock:
- if ADTPulseConnection._api_version == ADT_DEFAULT_VERSION:
- await self.async_fetch_version()
- url = self.make_url(uri)
- if uri in ADT_HTTP_REFERER_URIS:
- new_headers = {"Accept": ADT_DEFAULT_HTTP_HEADERS["Accept"]}
- else:
- new_headers = {"Accept": "*/*"}
-
- LOG.debug("Updating HTTP headers: %s", new_headers)
- self._session.headers.update(new_headers)
-
- LOG.debug(
- "Attempting %s %s params=%s timeout=%d", method, uri, extra_params, timeout
- )
- # FIXME: reauthenticate if received:
- # "You have not yet signed in or you
- # have been signed out due to inactivity."
-
- # define connection method
- retry = 0
- max_retries = 3
- while retry < max_retries:
- try:
- if method == "GET":
- async with self._session.get(
- url, headers=extra_headers, params=extra_params, timeout=timeout
- ) as response:
- await response.text()
- elif method == "POST":
- async with self._session.post(
- url, headers=extra_headers, data=extra_params, timeout=timeout
- ) as response:
- await response.text()
+ def extract_seconds_from_string(s: str) -> int:
+ seconds = 0
+ match = re.search(r"\d+", s)
+ if match:
+ seconds = int(match.group())
+ if "minutes" in s:
+ seconds *= 60
+ return seconds
+
+ def determine_error_type():
+ """Determine what type of error we have from the url and the parsed page.
+
+ Will raise the appropriate exception.
+ """
+ self._login_in_progress = False
+ url = self._connection_properties.make_url(ADT_LOGIN_URI)
+ if url == response_url_string:
+ error = soup.find("div", {"id": "warnMsgContents"})
+ if error:
+ error_text = error.get_text()
+ LOG.error("Error logging into pulse: %s", error_text)
+ if "Try again in" in error_text:
+ if (retry_after := extract_seconds_from_string(error_text)) > 0:
+ raise PulseAccountLockedError(
+ self._login_backoff,
+ retry_after + time(),
+ )
+ elif "You have not yet signed in" in error_text:
+ raise PulseNotLoggedInError()
+ elif "Sign In Unsuccessful" in error_text:
+ raise PulseAuthenticationError()
else:
- LOG.error("Invalid request method %s", method)
- return None
-
- if response.status in RECOVERABLE_ERRORS:
- retry = retry + 1
- LOG.info(
- "query returned recoverable error code %s, "
- "retrying (count = %d)",
- response.status,
- retry,
- )
- if retry == max_retries:
- LOG.warning(
- "Exceeded max retries of %d, giving up", max_retries
- )
- response.raise_for_status()
- await asyncio.sleep(2**retry + uniform(0.0, 1.0))
- continue
-
- response.raise_for_status()
- # success, break loop
- retry = 4
- except (
- asyncio.TimeoutError,
- ClientConnectionError,
- ClientConnectorError,
- ) as ex:
- LOG.debug(
- "Error %s occurred making %s request to %s, retrying",
- ex.args,
- method,
- url,
- exc_info=True,
- )
- await asyncio.sleep(2**retry + uniform(0.0, 1.0))
- continue
- except ClientResponseError as err:
- code = err.code
- LOG.exception(
- "Received HTTP error code %i in request to ADT Pulse", code
- )
- return None
-
- # success!
- # FIXME? login uses redirects so final url is wrong
- if uri in ADT_HTTP_REFERER_URIS:
- if uri == ADT_DEVICE_URI:
- referer = self.make_url(ADT_SYSTEM_URI)
+ raise PulseNotLoggedInError()
else:
- if response is not None and response.url is not None:
- referer = str(response.url)
- LOG.debug("Setting Referer to: %s", referer)
- self._session.headers.update({"Referer": referer})
+ url = self._connection_properties.make_url(ADT_MFA_FAIL_URI)
+ if url == response_url_string:
+ raise PulseMFARequiredError()
+
+ soup = make_soup(
+ response[0],
+ response[1],
+ response[2],
+ logging.ERROR,
+ "Could not log into ADT Pulse site",
+ )
+ # this probably should have been handled by async_query()
+ if soup is None:
+ raise PulseServerConnectionError(
+ f"Could not log into ADT Pulse site: code {response[0]}: URL: {response[2]}, response: {response[1]}",
+ self._login_backoff,
+ )
+ url = self._connection_properties.make_url(ADT_SUMMARY_URI)
+ response_url_string = str(response[2])
+ if url != response_url_string:
+ determine_error_type()
+ raise PulseAuthenticationError()
+ return soup
+
+ @typechecked
+ async def async_do_login_query(
+ self, timeout: int = ADT_DEFAULT_LOGIN_TIMEOUT
+ ) -> BeautifulSoup | None:
+ """
+ Performs a login query to the Pulse site.
- return response
+ Will backoff on login failures.
- def query(
- self,
- uri: str,
- method: str = "GET",
- extra_params: Optional[Dict[str, str]] = None,
- extra_headers: Optional[Dict[str, str]] = None,
- timeout=1,
- ) -> Optional[ClientResponse]:
- """Query ADT Pulse async.
+ Will set login in progress flag.
Args:
- uri (str): URI to query
- method (str, optional): method to use. Defaults to "GET".
- extra_params (Optional[Dict], optional): query parameters. Defaults to None.
- extra_headers (Optional[Dict], optional): extra HTTP headers.
- Defaults to None.
- timeout (int, optional): timeout in seconds. Defaults to 1.
+ timeout (int, optional): The timeout value for the query in seconds.
+ Defaults to ADT_DEFAULT_LOGIN_TIMEOUT.
+
Returns:
- Optional[ClientResponse]: aiohttp.ClientResponse object
- None on failure
- ClientResponse will already be closed.
+ soup: Optional[BeautifulSoup]: A BeautifulSoup object containing
+ summary.jsp, or None if failure
+ Raises:
+ ValueError: if login parameters are not correct
+ PulseAuthenticationError: if login fails due to incorrect username/password
+ PulseServerConnectionError: if login fails due to server error
+ PulseServiceTemporarilyUnavailableError: if login fails due to too many requests or
+ server is temporarily unavailable
+ PulseAccountLockedError: if login fails due to account locked
+ PulseMFARequiredError: if login fails due to MFA required
+ PulseNotLoggedInError: if login fails due to not logged in (which is probably an internal error)
"""
- coro = self.async_query(uri, method, extra_params, extra_headers, timeout)
- return asyncio.run_coroutine_threadsafe(
- coro, self.check_sync("Attempting to run sync query from async login")
- ).result()
-
- async def query_orb(
- self, level: int, error_message: str
- ) -> Optional[BeautifulSoup]:
- """Query ADT Pulse ORB.
- Args:
- level (int): error level to log on failure
- error_message (str): error message to use on failure
+ if self.login_in_progress:
+ return None
+ await self.quick_logout()
+ # just raise exceptions if we're not going to be able to log in
+ lockout_time = self._login_backoff.expiration_time
+ if lockout_time > time():
+ raise PulseAccountLockedError(self._login_backoff, lockout_time)
+ cs_backoff = self._connection_status.get_backoff()
+ lockout_time = cs_backoff.expiration_time
+ if lockout_time > time():
+ raise PulseServiceTemporarilyUnavailableError(cs_backoff, lockout_time)
+ self.login_in_progress = True
+ data = {
+ "usernameForm": self._authentication_properties.username,
+ "passwordForm": self._authentication_properties.password,
+ "networkid": self._authentication_properties.site_id,
+ "fingerprint": self._authentication_properties.fingerprint,
+ }
+ await self._login_backoff.wait_for_backoff()
+ try:
+ response = await self.async_query(
+ ADT_LOGIN_URI,
+ "POST",
+ extra_params=data,
+ timeout=timeout,
+ requires_authentication=False,
+ )
+ except (
+ PulseClientConnectionError,
+ PulseServerConnectionError,
+ PulseServiceTemporarilyUnavailableError,
+ ) as e:
+ LOG.error("Could not log into Pulse site: %s", e)
+ self.login_in_progress = False
+ raise
+ soup = self.check_login_errors(response)
+ self._connection_status.authenticated_flag.set()
+ self._authentication_properties.last_login_time = int(time())
+ self._login_backoff.reset_backoff()
+ self.login_in_progress = False
+ return soup
+
+ @typechecked
+ async def async_do_logout_query(self, site_id: str | None = None) -> None:
+ """Performs a logout query to the ADT Pulse site."""
+ params = {}
+ si = ""
+ self._connection_status.authenticated_flag.clear()
+ if site_id is not None and site_id != "":
+ self._authentication_properties.site_id = site_id
+ si = site_id
+ params.update({"networkid": si})
+
+ params.update({"partner": "adt"})
+ try:
+ await self.async_query(
+ ADT_LOGOUT_URI,
+ extra_params=params,
+ timeout=10,
+ requires_authentication=False,
+ )
+ # FIXME: do we care if this raises exceptions?
+ except (
+ PulseClientConnectionError,
+ PulseServiceTemporarilyUnavailableError,
+ PulseServerConnectionError,
+ ) as e:
+ LOG.debug("Could not logout from Pulse site: %s", e)
- Returns:
- Optional[BeautifulSoup]: A Beautiful Soup object, or None if failure
- """
- response = await self.async_query(ADT_ORB_URI)
+ @property
+ def is_connected(self) -> bool:
+ """Check if ADT Pulse is connected."""
+ return (
+ self._connection_status.authenticated_flag.is_set()
+ and not self._login_in_progress
+ )
- return await make_soup(response, level, error_message)
+ @property
+ def login_backoff(self) -> PulseBackoff:
+ """Return backoff object."""
+ with self._pc_attribute_lock:
+ return self._login_backoff
- def make_url(self, uri: str) -> str:
- """Create a URL to service host from a URI.
+ def check_sync(self, message: str) -> AbstractEventLoop:
+ """Convenience method to check if running from sync context."""
+ return self._connection_properties.check_sync(message)
- Args:
- uri (str): the URI to convert
+ @property
+ def debug_locks(self):
+ """Return debug locks."""
+ return self._debug_locks
- Returns:
- str: the converted string
+ @property
+ def login_in_progress(self) -> bool:
+ """Return login in progress."""
+ with self._pc_attribute_lock:
+ return self._login_in_progress
+
+ @login_in_progress.setter
+ @typechecked
+ def login_in_progress(self, value: bool) -> None:
+ """Set login in progress."""
+ with self._pc_attribute_lock:
+ self._login_in_progress = value
+
+ async def quick_logout(self) -> None:
+ """Quickly logout.
+
+ This just resets the authenticated flag and clears the ClientSession.
"""
- with self._attribute_lock:
- return f"{self._api_host}{API_PREFIX}{ADTPulseConnection._api_version}{uri}"
-
- async def async_fetch_version(self) -> None:
- """Fetch ADT Pulse version."""
- with ADTPulseConnection._class_threadlock:
- if ADTPulseConnection._api_version != ADT_DEFAULT_VERSION:
- return
- response = None
- signin_url = f"{self.service_host}/myhome{ADT_LOGIN_URI}"
- if self._session:
- try:
- async with self._session.get(signin_url) as response:
- # we only need the headers here, don't parse response
- response.raise_for_status()
- except (ClientResponseError, ClientConnectionError):
- LOG.warning(
- "Error occurred during API version fetch, defaulting to %s",
- ADT_DEFAULT_VERSION,
- )
- close_response(response)
- return
-
- if response is None:
- LOG.warning(
- "Error occurred during API version fetch, defaulting to %s",
- ADT_DEFAULT_VERSION,
- )
- return
-
- m = re.search("/myhome/(.+)/[a-z]*/", response.real_url.path)
- close_response(response)
- if m is not None:
- ADTPulseConnection._api_version = m.group(1)
- LOG.debug(
- "Discovered ADT Pulse version %s at %s",
- ADTPulseConnection._api_version,
- self.service_host,
- )
- return
-
- LOG.warning(
- "Couldn't auto-detect ADT Pulse version, defaulting to %s",
- ADT_DEFAULT_VERSION,
- )
+ LOG.debug("Resetting session")
+ self._connection_status.authenticated_flag.clear()
+ await self._connection_properties.clear_session()
+
+ @property
+ def detailed_debug_logging(self) -> bool:
+ """Return detailed debug logging."""
+ return (
+ self._login_backoff.detailed_debug_logging
+ and self._connection_properties.detailed_debug_logging
+ and self._connection_status.detailed_debug_logging
+ )
+
+ @detailed_debug_logging.setter
+ @typechecked
+ def detailed_debug_logging(self, value: bool):
+ with self._pc_attribute_lock:
+ self._login_backoff.detailed_debug_logging = value
+ self._connection_properties.detailed_debug_logging = value
+ self._connection_status.detailed_debug_logging = value
+
+ def get_login_backoff(self) -> PulseBackoff:
+ """Return login backoff."""
+ return self._login_backoff
diff --git a/pyadtpulse/pulse_connection_properties.py b/pyadtpulse/pulse_connection_properties.py
new file mode 100644
index 0000000..6342ff0
--- /dev/null
+++ b/pyadtpulse/pulse_connection_properties.py
@@ -0,0 +1,238 @@
+"""Pulse connection info."""
+
+from asyncio import AbstractEventLoop
+from re import search
+
+from aiohttp import ClientSession
+from typeguard import typechecked
+
+from .const import (
+ ADT_DEFAULT_HTTP_ACCEPT_HEADERS,
+ ADT_DEFAULT_HTTP_USER_AGENT,
+ ADT_DEFAULT_SEC_FETCH_HEADERS,
+ API_HOST_CA,
+ API_PREFIX,
+ DEFAULT_API_HOST,
+)
+from .util import set_debug_lock
+
+
+class PulseConnectionProperties:
+ """Pulse connection info."""
+
+ __slots__ = (
+ "_api_host",
+ "_session",
+ "_user_agent",
+ "_loop",
+ "_api_version",
+ "_pci_attribute_lock",
+ "_detailed_debug_logging",
+ "_debug_locks",
+ )
+
+ @staticmethod
+ @typechecked
+ def check_service_host(service_host: str) -> None:
+ """Check if service host is valid."""
+ if service_host is None or service_host == "":
+ raise ValueError("Service host is mandatory")
+ if service_host not in (DEFAULT_API_HOST, API_HOST_CA):
+ raise ValueError(
+ f"Service host must be one of {DEFAULT_API_HOST}" f" or {API_HOST_CA}"
+ )
+
+ @staticmethod
+ def get_api_version(response_path: str) -> str | None:
+ """Regex used to exctract the API version.
+
+ Use for testing.
+ """
+ version: str | None = None
+ if not response_path:
+ return None
+ m = search(f"{API_PREFIX}(.+)/[a-z]*/", response_path)
+ if m is not None:
+ version = m.group(1)
+ return version
+
+ def __init__(
+ self,
+ host: str,
+ user_agent=ADT_DEFAULT_HTTP_USER_AGENT["User-Agent"],
+ detailed_debug_logging=False,
+ debug_locks=False,
+ ) -> None:
+ """Initialize Pulse connection information."""
+ self._pci_attribute_lock = set_debug_lock(
+ debug_locks, "pyadtpulse.pci_attribute_lock"
+ )
+ self.debug_locks = debug_locks
+ self.detailed_debug_logging = detailed_debug_logging
+ self._loop: AbstractEventLoop | None = None
+ self._session: ClientSession | None = None
+ self.service_host = host
+ self._api_version = ""
+ self._user_agent = user_agent
+
+ def __del__(self):
+ """Destructor for ADTPulseConnection."""
+ if self._session is not None and not self._session.closed:
+ self._session.detach()
+
+ def _set_headers(self) -> None:
+ if self._session is not None:
+ self._session.headers.update(ADT_DEFAULT_HTTP_ACCEPT_HEADERS)
+ self._session.headers.update(ADT_DEFAULT_SEC_FETCH_HEADERS)
+ self._session.headers.update({"User-Agent": self._user_agent})
+
+ @property
+ def service_host(self) -> str:
+ """Get the service host."""
+ with self._pci_attribute_lock:
+ return self._api_host
+
+ @service_host.setter
+ @typechecked
+ def service_host(self, host: str):
+ """Set the service host.
+
+ Raises:
+ ValueError if host is not valid.
+ """
+ self.check_service_host(host)
+ with self._pci_attribute_lock:
+ self._api_host = host
+
+ @property
+ def detailed_debug_logging(self) -> bool:
+ """Get the detailed debug logging flag."""
+ with self._pci_attribute_lock:
+ return self._detailed_debug_logging
+
+ @detailed_debug_logging.setter
+ @typechecked
+ def detailed_debug_logging(self, value: bool):
+ """Set the detailed debug logging flag."""
+ with self._pci_attribute_lock:
+ self._detailed_debug_logging = value
+
+ @property
+ def debug_locks(self) -> bool:
+ """Get the debug locks flag."""
+ with self._pci_attribute_lock:
+ return self._debug_locks
+
+ @debug_locks.setter
+ @typechecked
+ def debug_locks(self, value: bool):
+ """Set the debug locks flag."""
+ with self._pci_attribute_lock:
+ self._debug_locks = value
+
+ @typechecked
+ def check_sync(self, message: str) -> AbstractEventLoop:
+ """Checks if sync login was performed.
+
+ Returns the loop to use for run_coroutine_threadsafe if so.
+ Raises RuntimeError with given message if not.
+ """
+ with self._pci_attribute_lock:
+ if self._loop is None:
+ raise RuntimeError(message)
+ return self._loop
+
+ @typechecked
+ def check_async(self, message: str) -> None:
+ """Checks if async login was performed.
+
+ Raises RuntimeError with given message if not.
+ """
+ with self._pci_attribute_lock:
+ if self._loop is not None:
+ raise RuntimeError(message)
+
+ @property
+ def loop(self) -> AbstractEventLoop | None:
+ """Get the event loop."""
+ with self._pci_attribute_lock:
+ return self._loop
+
+ @loop.setter
+ @typechecked
+ def loop(self, loop: AbstractEventLoop | None):
+ """Set the event loop."""
+ with self._pci_attribute_lock:
+ self._loop = loop
+
+ @property
+ def session(self) -> ClientSession:
+ """Get the session."""
+ with self._pci_attribute_lock:
+ if self._session is None:
+ self._session = ClientSession()
+ self._set_headers()
+ return self._session
+
+ @property
+ def api_version(self) -> str:
+ """Get the API version."""
+ with self._pci_attribute_lock:
+ return self._api_version
+
+ @api_version.setter
+ @typechecked
+ def api_version(self, version: str):
+ """Set the API version.
+
+ Raises:
+ ValueError: if version is not in the form major.minor.patch-subpatch
+ """
+
+ def check_version_string(value: str):
+ parts = value.split("-")
+ if len(parts) == 2:
+ version_parts = parts[0].split(".")
+ if not (
+ version_parts[0].isdigit()
+ and version_parts[1].isdigit()
+ and version_parts[2].isdigit()
+ and parts[1].isdigit()
+ ):
+ raise ValueError(
+ "API version must be in the form major.minor.patch-subpatch"
+ )
+ if len(version_parts) == 3 and version_parts[0].isdigit():
+ major_version = int(version_parts[0])
+ if major_version >= 26:
+ return
+ else:
+ raise ValueError("API version is numeric but less than 26")
+ raise ValueError(
+ "API version must be in the form major.minor.patch-subpatch"
+ )
+
+ with self._pci_attribute_lock:
+ check_version_string(version)
+ self._api_version = version
+
+ @typechecked
+ def make_url(self, uri: str) -> str:
+ """Create a URL to service host from a URI.
+
+ Args:
+ uri (str): the URI to convert
+
+ Returns:
+ str: the converted string
+ """
+ with self._pci_attribute_lock:
+ return f"{self._api_host}{API_PREFIX}{self._api_version}{uri}"
+
+ async def clear_session(self):
+ """Clear the session."""
+ with self._pci_attribute_lock:
+ old_session = self._session
+ self._session = None
+ if old_session:
+ await old_session.close()
diff --git a/pyadtpulse/pulse_connection_status.py b/pyadtpulse/pulse_connection_status.py
new file mode 100644
index 0000000..288a8b0
--- /dev/null
+++ b/pyadtpulse/pulse_connection_status.py
@@ -0,0 +1,73 @@
+"""Pulse Connection Status."""
+
+from asyncio import Event
+
+from typeguard import typechecked
+
+from .pulse_backoff import PulseBackoff
+from .util import set_debug_lock
+
+
+class PulseConnectionStatus:
+ """Pulse Connection Status."""
+
+ __slots__ = (
+ "_backoff",
+ "_authenticated_flag",
+ "_pcs_attribute_lock",
+ )
+
+ @typechecked
+ def __init__(self, debug_locks: bool = False, detailed_debug_logging=False):
+ self._pcs_attribute_lock = set_debug_lock(
+ debug_locks, "pyadtpulse.pcs_attribute_lock"
+ )
+ """Initialize the connection status object.
+
+ Args:
+ debug_locks (bool, optional): Enable debug locks. Defaults to False.
+ detailed_debug_logging (bool, optional): Enable detailed debug logging for the backoff.
+ Defaults to False.
+ """
+ self._backoff = PulseBackoff(
+ "Connection Status",
+ initial_backoff_interval=1,
+ detailed_debug_logging=detailed_debug_logging,
+ )
+ self._authenticated_flag = Event()
+
+ @property
+ def authenticated_flag(self) -> Event:
+ """Get the authenticated flag."""
+ with self._pcs_attribute_lock:
+ return self._authenticated_flag
+
+ @property
+ def retry_after(self) -> float:
+ """Get the number of seconds to wait before retrying HTTP requests."""
+ with self._pcs_attribute_lock:
+ return self._backoff.expiration_time
+
+ @retry_after.setter
+ @typechecked
+ def retry_after(self, seconds: float) -> None:
+ """Set time after which HTTP requests can be retried."""
+ with self._pcs_attribute_lock:
+ self._backoff.set_absolute_backoff_time(seconds)
+
+ def get_backoff(self) -> PulseBackoff:
+ """Get the backoff object."""
+ return self._backoff
+
+ @property
+ def detailed_debug_logging(self) -> bool:
+ """Get the detailed debug logging flag."""
+ with self._pcs_attribute_lock:
+ return self._backoff.detailed_debug_logging
+
+ @detailed_debug_logging.setter
+ @typechecked
+ def detailed_debug_logging(self, value: bool):
+ """Set the detailed debug logging flag."""
+ with self._pcs_attribute_lock:
+ self._backoff.detailed_debug_logging = value
diff --git a/pyadtpulse/pulse_query_manager.py b/pyadtpulse/pulse_query_manager.py
new file mode 100644
index 0000000..abfb3eb
--- /dev/null
+++ b/pyadtpulse/pulse_query_manager.py
@@ -0,0 +1,438 @@
+"""Pulse Query Manager."""
+
+from logging import getLogger
+from asyncio import wait_for
+from datetime import datetime
+from http import HTTPStatus
+from time import time
+
+from aiohttp import (
+ ClientConnectionError,
+ ClientConnectorError,
+ ClientError,
+ ClientResponse,
+ ClientResponseError,
+ ServerConnectionError,
+ ServerDisconnectedError,
+ ServerTimeoutError,
+)
+from bs4 import BeautifulSoup
+from typeguard import typechecked
+from yarl import URL
+
+from .const import (
+ ADT_DEFAULT_LOGIN_TIMEOUT,
+ ADT_HTTP_BACKGROUND_URIS,
+ ADT_ORB_URI,
+ ADT_OTHER_HTTP_ACCEPT_HEADERS,
+)
+from .exceptions import (
+ PulseClientConnectionError,
+ PulseNotLoggedInError,
+ PulseServerConnectionError,
+ PulseServiceTemporarilyUnavailableError,
+)
+from .pulse_backoff import PulseBackoff
+from .pulse_connection_properties import PulseConnectionProperties
+from .pulse_connection_status import PulseConnectionStatus
+from .util import make_soup, set_debug_lock
+
+LOG = getLogger(__name__)
+
+RECOVERABLE_ERRORS = {
+ HTTPStatus.INTERNAL_SERVER_ERROR,
+ HTTPStatus.BAD_GATEWAY,
+ HTTPStatus.GATEWAY_TIMEOUT,
+}
+
+MAX_REQUERY_RETRIES = 3
+
+
+class PulseQueryManager:
+ """Pulse Query Manager."""
+
+ __slots__ = (
+ "_pqm_attribute_lock",
+ "_connection_properties",
+ "_connection_status",
+ "_debug_locks",
+ )
+
+ @staticmethod
+ @typechecked
+ def _get_http_status_description(status_code: int) -> str:
+ """Get HTTP status description."""
+ status = HTTPStatus(status_code)
+ return status.description
+
+ @typechecked
+ def __init__(
+ self,
+ connection_status: PulseConnectionStatus,
+ connection_properties: PulseConnectionProperties,
+ debug_locks: bool = False,
+ ) -> None:
+ """Initialize Pulse Query Manager."""
+ self._pqm_attribute_lock = set_debug_lock(
+ debug_locks, "pyadtpulse.pqm_attribute_lock"
+ )
+ self._connection_status = connection_status
+ self._connection_properties = connection_properties
+ self._debug_locks = debug_locks
+
+ @staticmethod
+ @typechecked
+ async def _handle_query_response(
+ response: ClientResponse | None,
+ ) -> tuple[int, str | None, URL | None, str | None]:
+ if response is None:
+ return 0, None, None, None
+ response_text = await response.text()
+
+ return (
+ response.status,
+ response_text,
+ response.url,
+ response.headers.get("Retry-After"),
+ )
+
+ @typechecked
+ def _handle_http_errors(
+ self, return_value: tuple[int, str | None, URL | None, str | None]
+ ) -> None:
+ """Handle HTTP errors.
+
+ Parameters:
+ return_value (tuple[int, str | None, URL | None, str | None]):
+ The return value from _handle_query_response.
+
+ Raises:
+ PulseServerConnectionError: If the server returns an error code.
+ PulseServiceTemporarilyUnavailableError: If the server returns a
+ Retry-After header."""
+
+ def get_retry_after(retry_after: str) -> int | None:
+ """
+ Parse the value of the "Retry-After" header.
+
+ Parameters:
+ retry_after (str): The value of the "Retry-After" header
+
+ Returns:
+ int | None: The timestamp in seconds to wait before retrying, or None if the header is invalid.
+ """
+ if retry_after.isnumeric():
+ retval = int(retry_after) + int(time())
+ else:
+ try:
+ retval = int(
+ datetime.strptime(
+ retry_after, "%a, %d %b %Y %H:%M:%S %Z"
+ ).timestamp()
+ )
+ except ValueError:
+ return None
+ return retval
+
+ if return_value[0] in (
+ HTTPStatus.TOO_MANY_REQUESTS,
+ HTTPStatus.SERVICE_UNAVAILABLE,
+ ):
+ retry = None
+ if return_value[3]:
+ retry = get_retry_after(return_value[3])
+ raise PulseServiceTemporarilyUnavailableError(
+ self._connection_status.get_backoff(),
+ retry,
+ )
+ raise PulseServerConnectionError(
+ f"HTTP error {return_value[0]}: {return_value[1]} connecting to {return_value[2]}",
+ self._connection_status.get_backoff(),
+ )
+
+ @typechecked
+ def _handle_network_errors(self, e: Exception) -> None:
+ if type(e) in (
+ ServerConnectionError,
+ ServerTimeoutError,
+ ServerDisconnectedError,
+ ):
+ raise PulseServerConnectionError(
+ str(e), self._connection_status.get_backoff()
+ )
+ if (
+ isinstance(e, ClientConnectionError)
+ and "Connection refused" in str(e)
+ or ("timed out") in str(e)
+ ):
+ raise PulseServerConnectionError(
+ str(e), self._connection_status.get_backoff()
+ )
+ if isinstance(e, ClientConnectorError) and e.os_error not in (
+ TimeoutError,
+ BrokenPipeError,
+ ):
+ raise PulseServerConnectionError(
+ str(e), self._connection_status.get_backoff()
+ )
+ raise PulseClientConnectionError(str(e), self._connection_status.get_backoff())
+
+ @typechecked
+ async def async_query(
+ self,
+ uri: str,
+ method: str = "GET",
+ extra_params: dict[str, str] | None = None,
+ extra_headers: dict[str, str] | None = None,
+ timeout: int = 1,
+ requires_authentication: bool = True,
+ ) -> tuple[int, str | None, URL | None]:
+ """
+ Query ADT Pulse async.
+
+ Args:
+ uri (str): URI to query
+ method (str, optional): method to use. Defaults to "GET".
+ extra_params (Optional[Dict], optional): query/body parameters.
+ Defaults to None.
+ extra_headers (Optional[Dict], optional): extra HTTP headers.
+ Defaults to None.
+ timeout (int, optional): timeout in seconds. Defaults to 1.
+ requires_authentication (bool, optional): True if authentication is
+ required to perform query.
+ Defaults to True.
+ If true and authenticated flag not
+ set, will wait for flag to be set.
+
+ Returns:
+ tuple with integer return code, optional response text, and optional URL of
+ response
+
+ Raises:
+ PulseClientConnectionError: If the client cannot connect
+ PulseServerConnectionError: If there is a server error
+ PulseServiceTemporarilyUnavailableError: If the server returns a Retry-After header
+ PulseNotLoggedInError: if not logged in and task is waiting for longer than
+ ADT_DEFAULT_LOGIN_TIMEOUT seconds
+ """
+
+ async def setup_query():
+ if method not in ("GET", "POST"):
+ raise ValueError("method must be GET or POST")
+ await self._connection_status.get_backoff().wait_for_backoff()
+ if not self._connection_properties.api_version:
+ await self.async_fetch_version()
+ if not self._connection_properties.api_version:
+ raise ValueError("Could not determine API version for connection")
+
+ retry_after = self._connection_status.retry_after
+ now = time()
+ if retry_after > now:
+ raise PulseServiceTemporarilyUnavailableError(
+ self._connection_status.get_backoff(), retry_after
+ )
+ await setup_query()
+ url = self._connection_properties.make_url(uri)
+ headers = extra_headers if extra_headers is not None else {}
+ if uri in ADT_HTTP_BACKGROUND_URIS:
+ headers.setdefault("Accept", ADT_OTHER_HTTP_ACCEPT_HEADERS["Accept"])
+ if self._connection_properties.detailed_debug_logging:
+ LOG.debug(
+ "Attempting %s %s params=%s timeout=%d",
+ method,
+ url,
+ extra_params,
+ timeout,
+ )
+ retry = 0
+ return_value: tuple[int, str | None, URL | None, str | None] = (
+ HTTPStatus.OK.value,
+ None,
+ None,
+ None,
+ )
+ query_backoff = PulseBackoff(
+ f"Query:{method} {uri}",
+ self._connection_status.get_backoff().initial_backoff_interval,
+ threshold=0,
+ debug_locks=self._debug_locks,
+ detailed_debug_logging=self._connection_properties.detailed_debug_logging,
+ )
+ max_retries = (
+ MAX_REQUERY_RETRIES
+ if not self._connection_status.get_backoff().will_backoff()
+ else 1
+ )
+ while retry < max_retries:
+ try:
+ await query_backoff.wait_for_backoff()
+ retry += 1
+ if (
+ requires_authentication
+ and not self._connection_status.authenticated_flag.is_set()
+ ):
+ if self._connection_properties.detailed_debug_logging:
+ LOG.debug(
+ "%s for %s waiting for authenticated flag to be set",
+ method,
+ uri,
+ )
+ # wait for authenticated flag to be set
+ # use a timeout to prevent waiting forever
+ try:
+ await wait_for(
+ self._connection_status.authenticated_flag.wait(),
+ ADT_DEFAULT_LOGIN_TIMEOUT,
+ )
+ except TimeoutError as ex:
+ LOG.warning(
+ "%s for %s timed out waiting for authenticated flag to be set",
+ method,
+ uri,
+ )
+ raise PulseNotLoggedInError() from ex
+ async with self._connection_properties.session.request(
+ method,
+ url,
+ headers=extra_headers,
+ params=extra_params if method == "GET" else None,
+ data=extra_params if method == "POST" else None,
+ timeout=timeout,
+ ) as response:
+ return_value = await self._handle_query_response(response)
+ if return_value[0] in RECOVERABLE_ERRORS:
+ LOG.debug(
+ "query returned recoverable error code %s: %s,"
+ "retrying (count = %d)",
+ return_value[0],
+ self._get_http_status_description(return_value[0]),
+ retry,
+ )
+ if max_retries > 1 and retry == max_retries:
+ LOG.debug(
+ "Exceeded max retries of %d, giving up", max_retries
+ )
+ else:
+ query_backoff.increment_backoff()
+ response.raise_for_status()
+ continue
+ response.raise_for_status()
+ break
+
+ except ClientResponseError:
+ self._handle_http_errors(return_value)
+ except (
+ ClientConnectorError,
+ ServerTimeoutError,
+ ClientError,
+ ServerConnectionError,
+ ServerDisconnectedError,
+ ) as ex:
+ LOG.debug(
+ "Error %s occurred making %s request to %s",
+ ex.args,
+ method,
+ url,
+ exc_info=True,
+ )
+ if retry == max_retries:
+ self._handle_network_errors(ex)
+ query_backoff.increment_backoff()
+ continue
+ except TimeoutError as ex:
+ if retry == max_retries:
+ LOG.debug("Exceeded max retries of %d, giving up", max_retries)
+ raise PulseServerConnectionError(
+ "Timeout error",
+ self._connection_status.get_backoff(),
+ ) from ex
+ query_backoff.increment_backoff()
+ continue
+ # success
+ self._connection_status.get_backoff().reset_backoff()
+ return (return_value[0], return_value[1], return_value[2])
+
+ async def query_orb(self, level: int, error_message: str) -> BeautifulSoup | None:
+ """Query ADT Pulse ORB.
+
+ Args:
+ level (int): error level to log on failure
+ error_message (str): error message to use on failure
+
+ Returns:
+ Optional[BeautifulSoup]: A Beautiful Soup object, or None if failure
+
+ Raises:
+ PulseClientConnectionError: If the client cannot connect
+ PulseServerConnectionError: If there is a server error
+ PulseServiceTemporarilyUnavailableError: If the server returns a Retry-After header
+ """
+ code, response, url = await self.async_query(
+ ADT_ORB_URI,
+ extra_headers={"Sec-Fetch-Mode": "cors", "Sec-Fetch-Dest": "empty"},
+ )
+
+ return make_soup(code, response, url, level, error_message)
+
+ async def async_fetch_version(self) -> None:
+ """Fetch ADT Pulse version.
+
+ Exceptions are passed through to the caller since if this fails, there is
+ probably some underlying connection issue.
+ """
+ response_values: tuple[int, str | None, URL | None, str | None] = (
+ HTTPStatus.OK.value,
+ None,
+ None,
+ None,
+ )
+ if self._connection_properties.api_version:
+ return
+
+ signin_url = self._connection_properties.service_host
+ try:
+ async with self._connection_properties.session.get(
+ signin_url, timeout=10
+ ) as response:
+ response_values = await self._handle_query_response(response)
+ response.raise_for_status()
+
+ except ClientResponseError as ex:
+ LOG.error(
+ "Error %s occurred determining Pulse API version",
+ ex.args,
+ exc_info=True,
+ )
+ self._handle_http_errors(response_values)
+ return
+ except (
+ ClientConnectorError,
+ ServerTimeoutError,
+ ClientError,
+ ServerConnectionError,
+ ) as ex:
+ LOG.error(
+ "Error %s occurred determining Pulse API version",
+ ex.args,
+ exc_info=True,
+ )
+ self._handle_network_errors(ex)
+ except TimeoutError as ex:
+ LOG.error(
+ "Timeout occurred determining Pulse API version %s",
+ ex.args,
+ exc_info=True,
+ )
+ raise PulseServerConnectionError(
+ "Timeout occurred determining Pulse API version",
+ self._connection_status.get_backoff(),
+ ) from ex
+ version = self._connection_properties.get_api_version(str(response_values[2]))
+ if version is not None:
+ self._connection_properties.api_version = version
+ LOG.debug(
+ "Discovered ADT Pulse version %s at %s",
+ self._connection_properties.api_version,
+ self._connection_properties.service_host,
+ )
+ self._connection_status.get_backoff().reset_backoff()
diff --git a/pyadtpulse/pyadtpulse_async.py b/pyadtpulse/pyadtpulse_async.py
new file mode 100644
index 0000000..360087f
--- /dev/null
+++ b/pyadtpulse/pyadtpulse_async.py
@@ -0,0 +1,753 @@
+"""ADT Pulse Async API."""
+
+import logging
+import asyncio
+import re
+import time
+from random import randint
+from warnings import warn
+
+from bs4 import BeautifulSoup
+from typeguard import typechecked
+from yarl import URL
+
+from .alarm_panel import ADT_ALARM_UNKNOWN
+from .const import (
+ ADT_DEFAULT_HTTP_USER_AGENT,
+ ADT_DEFAULT_KEEPALIVE_INTERVAL,
+ ADT_DEFAULT_RELOGIN_INTERVAL,
+ ADT_GATEWAY_STRING,
+ ADT_SYNC_CHECK_URI,
+ ADT_TIMEOUT_URI,
+ DEFAULT_API_HOST,
+)
+from .exceptions import (
+ PulseAccountLockedError,
+ PulseAuthenticationError,
+ PulseClientConnectionError,
+ PulseGatewayOfflineError,
+ PulseMFARequiredError,
+ PulseNotLoggedInError,
+ PulseServerConnectionError,
+ PulseServiceTemporarilyUnavailableError,
+)
+from .pulse_authentication_properties import PulseAuthenticationProperties
+from .pulse_connection import PulseConnection
+from .pulse_connection_properties import PulseConnectionProperties
+from .pulse_connection_status import PulseConnectionStatus
+from .pyadtpulse_properties import PyADTPulseProperties
+from .site import ADTPulseSite
+from .util import handle_response, set_debug_lock
+
+LOG = logging.getLogger(__name__)
+SYNC_CHECK_TASK_NAME = "ADT Pulse Sync Check Task"
+KEEPALIVE_TASK_NAME = "ADT Pulse Keepalive Task"
+# backoff time before warning in wait_for_update()
+WARN_TRANSIENT_FAILURE_THRESHOLD = 2
+
+
+class PyADTPulseAsync:
+ """ADT Pulse Async API."""
+
+ __slots__ = (
+ "_sync_task",
+ "_timeout_task",
+ "_pa_attribute_lock",
+ "_pulse_properties",
+ "_authentication_properties",
+ "_pulse_connection_properties",
+ "_pulse_connection",
+ "_pulse_connection_status",
+ "_site",
+ "_detailed_debug_logging",
+ "_sync_check_exception",
+ )
+
+ @typechecked
+ def __init__(
+ self,
+ username: str,
+ password: str,
+ fingerprint: str,
+ service_host: str = DEFAULT_API_HOST,
+ user_agent=ADT_DEFAULT_HTTP_USER_AGENT["User-Agent"],
+ debug_locks: bool = False,
+ keepalive_interval: int = ADT_DEFAULT_KEEPALIVE_INTERVAL,
+ relogin_interval: int = ADT_DEFAULT_RELOGIN_INTERVAL,
+ detailed_debug_logging: bool = False,
+ ) -> None:
+ """Create a PyADTPulse object.
+ Args:
+ username (str): Username.
+ password (str): Password.
+ fingerprint (str): 2FA fingerprint.
+ service_host (str, optional): host prefix to use
+ i.e. https://portal.adtpulse.com or
+ https://portal-ca.adtpulse.com
+ user_agent (str, optional): User Agent.
+ Defaults to ADT_DEFAULT_HTTP_HEADERS["User-Agent"].
+ debug_locks: (bool, optional): use debugging locks
+ Defaults to False
+ keepalive_interval (int, optional): number of minutes between
+ keepalive checks, defaults to ADT_DEFAULT_KEEPALIVE_INTERVAL,
+ maxiumum is ADT_MAX_KEEPALIVE_INTERVAL
+ relogin_interval (int, optional): number of minutes between relogin checks
+ defaults to ADT_DEFAULT_RELOGIN_INTERVAL,
+ minimum is ADT_MIN_RELOGIN_INTERVAL
+ detailed_debug_logging (bool, optional): enable detailed debug logging
+ """
+ self._pa_attribute_lock = set_debug_lock(
+ debug_locks, "pyadtpulse.pa_attribute_lock"
+ )
+ self._pulse_connection_properties = PulseConnectionProperties(
+ service_host, user_agent, detailed_debug_logging, debug_locks
+ )
+ self._authentication_properties = PulseAuthenticationProperties(
+ username=username,
+ password=password,
+ fingerprint=fingerprint,
+ debug_locks=debug_locks,
+ )
+ self._pulse_connection_status = PulseConnectionStatus(
+ debug_locks=debug_locks, detailed_debug_logging=detailed_debug_logging
+ )
+ self._pulse_properties = PyADTPulseProperties(
+ keepalive_interval=keepalive_interval,
+ relogin_interval=relogin_interval,
+ debug_locks=debug_locks,
+ )
+ self._pulse_connection = PulseConnection(
+ self._pulse_connection_status,
+ self._pulse_connection_properties,
+ self._authentication_properties,
+ debug_locks,
+ )
+ self._sync_task: asyncio.Task | None = None
+ self._timeout_task: asyncio.Task | None = None
+ self._site: ADTPulseSite | None = None
+ self._detailed_debug_logging = detailed_debug_logging
+ pc_backoff = self._pulse_connection.get_login_backoff()
+ self._sync_check_exception: Exception | None = PulseNotLoggedInError()
+ pc_backoff.reset_backoff()
+
+ def __repr__(self) -> str:
+ """Object representation."""
+ return (
+ f"<{self.__class__.__name__}: {self._authentication_properties.username}>"
+ )
+
+ async def _update_sites(self, soup: BeautifulSoup) -> None:
+ with self._pa_attribute_lock:
+ if self._site is None:
+ await self._initialize_sites(soup)
+ if self._site is None:
+ raise RuntimeError("pyadtpulse could not retrieve site")
+ self._site.alarm_control_panel.update_alarm_from_soup(soup)
+ self._site.update_zone_from_soup(soup)
+
+ async def _initialize_sites(self, soup: BeautifulSoup) -> None:
+ """
+ Initializes the sites in the ADT Pulse account.
+
+ Args:
+ soup (BeautifulSoup): The parsed HTML soup object.
+
+ Raises:
+ PulseGatewayOfflineError: if the gateway is offline
+ """
+ # typically, ADT Pulse accounts have only a single site (premise/location)
+ single_premise = soup.find("span", {"id": "p_singlePremise"})
+ if single_premise:
+ site_name = single_premise.text
+
+ # FIXME: this code works, but it doesn't pass the linter
+ signout_link = str(
+ soup.find("a", {"class": "p_signoutlink"}).get("href") # type: ignore
+ )
+ if signout_link:
+ m = re.search("networkid=(.+)&", signout_link)
+ if m and m.group(1) and m.group(1):
+ site_id = m.group(1)
+ LOG.debug("Discovered site id %s: %s", site_id, site_name)
+ new_site = ADTPulseSite(self._pulse_connection, site_id, site_name)
+
+ # fetch zones first, so that we can have the status
+ # updated with _update_alarm_status
+ if not await new_site.fetch_devices(None):
+ LOG.error("Could not fetch zones from ADT site")
+ new_site.alarm_control_panel.update_alarm_from_soup(soup)
+ if new_site.alarm_control_panel.status == ADT_ALARM_UNKNOWN:
+ new_site.gateway.is_online = False
+ new_site.update_zone_from_soup(soup)
+ self._site = new_site
+ return
+ else:
+ LOG.warning(
+ "Couldn't find site id for %s in %s", site_name, signout_link
+ )
+ else:
+ LOG.error("ADT Pulse accounts with MULTIPLE sites not supported!!!")
+
+ # ...and current network id from:
+ #
+ #
+ # ... or perhaps better, just extract all from /system/settings.jsp
+
+ def _get_task_name(self, task: asyncio.Task | None, default_name) -> str:
+ """
+ Get the name of a task.
+
+ Parameters:
+ task (Task): The task object.
+ default_name (str): The default name to use if the task is None.
+
+ Returns:
+ str: The name of the task if it is not None, otherwise the default name
+ with a suffix indicating a possible internal error.
+ """
+ if task is not None:
+ return task.get_name()
+ return f"{default_name} - possible internal error"
+
+ def _get_sync_task_name(self) -> str:
+ return self._get_task_name(self._sync_task, SYNC_CHECK_TASK_NAME)
+
+ def _get_timeout_task_name(self) -> str:
+ return self._get_task_name(self._timeout_task, KEEPALIVE_TASK_NAME)
+
+ def _set_update_exception(self, e: Exception | None) -> None:
+ self.sync_check_exception = e
+ self._pulse_properties.updates_exist.set()
+
+ async def _keepalive_task(self) -> None:
+ """
+ Asynchronous function that runs a keepalive task to maintain the connection
+ with the ADT Pulse cloud.
+ """
+
+ async def reset_pulse_cloud_timeout() -> tuple[int, str | None, URL | None]:
+ return await self._pulse_connection.async_query(ADT_TIMEOUT_URI, "POST")
+
+ async def update_gateway_device_if_needed() -> None:
+ if self.site.gateway.next_update < time.time():
+ await self.site.set_device(ADT_GATEWAY_STRING)
+
+ def should_relogin(relogin_interval: int) -> bool:
+ return (
+ relogin_interval != 0
+ and time.time() - self._authentication_properties.last_login_time
+ > randint(int(0.75 * relogin_interval), relogin_interval)
+ )
+
+ response: str | None
+ task_name: str = self._get_task_name(self._timeout_task, KEEPALIVE_TASK_NAME)
+ LOG.debug("creating %s", task_name)
+
+ while True:
+ relogin_interval = self._pulse_properties.relogin_interval * 60
+ try:
+ await asyncio.sleep(self._pulse_properties.keepalive_interval * 60)
+ if self._pulse_connection_status.retry_after > time.time():
+ LOG.debug(
+ "%s: Skipping actions because retry_after > now", task_name
+ )
+ continue
+ if not self._pulse_connection.is_connected:
+ LOG.debug("%s: Skipping relogin because not connected", task_name)
+ continue
+ elif should_relogin(relogin_interval):
+ await self._pulse_connection.quick_logout()
+ try:
+ await self._login_looped(task_name)
+ except (PulseAuthenticationError, PulseMFARequiredError) as ex:
+ LOG.error("%s task exiting due to %s", task_name, ex.args[0])
+ return
+ continue
+ LOG.debug("Resetting timeout")
+ try:
+ code, response, url = await reset_pulse_cloud_timeout()
+ except (
+ PulseServiceTemporarilyUnavailableError,
+ PulseClientConnectionError,
+ PulseServerConnectionError,
+ ) as ex:
+ LOG.debug(
+ "Could not reset ADT Pulse cloud timeout due to %s, skipping",
+ ex.args[0],
+ )
+ continue
+ if (
+ not handle_response(
+ code,
+ url,
+ logging.WARNING,
+ "Could not reset ADT Pulse cloud timeout",
+ )
+ or response is None
+ ):
+ continue
+ await update_gateway_device_if_needed()
+
+ except asyncio.CancelledError:
+ LOG.debug("%s cancelled", task_name)
+ return
+
+ async def _clean_done_tasks(self) -> None:
+ with self._pa_attribute_lock:
+ if self._sync_task is not None and self._sync_task.done():
+ await self._sync_task
+ self._sync_task = None
+ if self._timeout_task is not None and self._timeout_task.done():
+ await self._timeout_task
+ self._timeout_task = None
+
+ async def _cancel_task(self, task: asyncio.Task | None) -> None:
+ """
+ Cancel a given asyncio task.
+
+ Args:
+ task (asyncio.Task | None): The task to be cancelled.
+ """
+ await self._clean_done_tasks()
+ if task is None:
+ return
+ task_name = task.get_name()
+ LOG.debug("cancelling %s", task_name)
+ task.cancel()
+ try:
+ await task
+ except asyncio.CancelledError:
+ pass
+ if task == self._sync_task:
+ with self._pa_attribute_lock:
+ self._sync_task = None
+ else:
+ with self._pa_attribute_lock:
+ self._timeout_task = None
+ LOG.debug("%s successfully cancelled", task_name)
+
+ async def _login_looped(self, task_name: str) -> None:
+ """
+ Logs in and loops until successful.
+
+ Args:
+ None.
+ Returns:
+ None
+ """
+ count = 0
+ log_level = logging.DEBUG
+
+ while True:
+ count += 1
+ if count > 5:
+ log_level = logging.WARNING
+ LOG.log(log_level, "%s performming loop login", task_name)
+ try:
+ await self.async_login()
+ except (
+ PulseClientConnectionError,
+ PulseServerConnectionError,
+ ) as ex:
+ LOG.log(
+ log_level,
+ "loop login in task %s received exception %s, retrying",
+ task_name,
+ ex.args[0],
+ )
+ if (
+ log_level == logging.WARNING
+ and self._sync_check_exception is None
+ or self._sync_check_exception != ex
+ ):
+ self._set_update_exception(ex)
+ continue
+ # success, return
+ return
+
+ async def _sync_check_task(self) -> None:
+ """Asynchronous function that performs a synchronization check task."""
+
+ async def perform_sync_check_query():
+ return await self._pulse_connection.async_query(
+ ADT_SYNC_CHECK_URI,
+ extra_headers={"Sec-Fetch-Mode": "iframe"},
+ extra_params={"ts": str(int(time.time() * 1000))},
+ )
+
+ task_name = self._get_sync_task_name()
+ LOG.debug("creating %s", task_name)
+
+ response_text: str | None = None
+ code: int = 200
+ have_updates = False
+ url: URL | None = None
+
+ def check_sync_check_response() -> bool:
+ """
+ Validates the sync check response received from the ADT Pulse site.
+ Returns:
+ bool: True if the sync check response indicates updates, False otherwise
+
+ Raises:
+ PulseAccountLockedError if the account is locked and no retry time is available.
+ PulseAuthenticationError if the ADT Pulse site returns an authentication error.
+ PulseMFAError if the ADT Pulse site returns an MFA error.
+ PulseNotLoggedInError if the ADT Pulse site returns a not logged in error.
+ """
+ if response_text is None:
+ LOG.warning("Internal Error: response_text is None")
+ return False
+ pattern = r"\d+[-]\d+[-]\d+"
+ if not re.match(pattern, response_text):
+ LOG.warning(
+ "Unexpected sync check format",
+ )
+ self._pulse_connection.check_login_errors((code, response_text, url))
+ return False
+ split_text = response_text.split("-")
+ if int(split_text[0]) > 9 or int(split_text[1]) > 9:
+ return False
+ return True
+
+ async def handle_no_updates_exist() -> None:
+ if have_updates:
+ try:
+ success = await self.async_update()
+ except (
+ PulseClientConnectionError,
+ PulseServerConnectionError,
+ PulseGatewayOfflineError,
+ ) as e:
+ LOG.debug("Pulse update failed in task %s due to %s", task_name, e)
+ self._set_update_exception(e)
+ return
+ except PulseNotLoggedInError:
+ LOG.info(
+ "Pulse update failed in task %s due to not logged in, relogging in...",
+ task_name,
+ )
+ await self._pulse_connection.quick_logout()
+ await self._login_looped(task_name)
+ return
+ if not success:
+ LOG.debug("Pulse data update failed in task %s", task_name)
+ return
+ self._set_update_exception(None)
+ else:
+ additional_msg = ""
+ if not self.site.gateway.is_online:
+ # bump backoff and resignal since offline and nothing updated
+ self._set_update_exception(
+ PulseGatewayOfflineError(self.site.gateway.backoff)
+ )
+ additional_msg = ", gateway offline so backoff incremented"
+ if self._detailed_debug_logging:
+ LOG.debug(
+ "Sync token %s indicates no remote updates to process %s ",
+ response_text,
+ additional_msg,
+ )
+
+ async def shutdown_task(ex: Exception):
+ await self._pulse_connection.quick_logout()
+ await self._cancel_task(self._timeout_task)
+ self._set_update_exception(ex)
+
+ while True:
+ try:
+ if not have_updates and not self.site.gateway.is_online:
+ # gateway going back online will trigger a sync check of 1-0-0
+ await self.site.gateway.backoff.wait_for_backoff()
+ else:
+ await asyncio.sleep(
+ self.site.gateway.poll_interval if not have_updates else 0.0
+ )
+
+ try:
+ code, response_text, url = await perform_sync_check_query()
+ except (
+ PulseClientConnectionError,
+ PulseServerConnectionError,
+ ) as e:
+ # temporarily unavailble errors should be reported immediately
+ # since the next query will sleep until the retry-after is over
+ msg = ""
+ if e.backoff.backoff_count > WARN_TRANSIENT_FAILURE_THRESHOLD:
+ self._set_update_exception(e)
+ else:
+ msg = ", ignoring..."
+ LOG.debug("Pulse sync check query failed due to %s%s", e, msg)
+ continue
+ except (
+ PulseServiceTemporarilyUnavailableError,
+ PulseNotLoggedInError,
+ ) as e:
+ if isinstance(e, PulseServiceTemporarilyUnavailableError):
+ status = "temporarily unavailable"
+ else:
+ status = "not logged in"
+ LOG.warning("Pulse service %s, ending %s task", status, task_name)
+ await shutdown_task(e)
+ return
+ if not handle_response(
+ code, url, logging.WARNING, "Error querying ADT sync"
+ ):
+ continue
+ if response_text is None:
+ LOG.warning("Sync check received no response from ADT Pulse site")
+ continue
+ more_updates = True
+ try:
+ if have_updates:
+ more_updates = check_sync_check_response()
+ else:
+ have_updates = check_sync_check_response()
+ except PulseNotLoggedInError:
+ LOG.info("Pulse sync check indicates logged out, re-logging in....")
+ await self._pulse_connection.quick_logout()
+ await self._login_looped(task_name)
+ except (
+ PulseAuthenticationError,
+ PulseMFARequiredError,
+ PulseAccountLockedError,
+ ) as ex:
+ LOG.error(
+ "Task %s exiting due to error: %s",
+ task_name,
+ ex.args[0],
+ )
+ await shutdown_task(ex)
+ return
+ if have_updates and more_updates:
+ LOG.debug("Updates exist: %s, requerying", response_text)
+ continue
+ await handle_no_updates_exist()
+ have_updates = False
+ continue
+ except asyncio.CancelledError:
+ LOG.debug("%s cancelled", task_name)
+ return
+
+ async def async_login(self) -> None:
+ """Login asynchronously to ADT.
+
+ Returns: None
+
+ Raises:
+ PulseClientConnectionError: if client connection fails
+ PulseServerConnectionError: if server connection fails
+ PulseServiceTemporarilyUnavailableError: if server returns a Retry-After header
+ PulseAuthenticationError: if authentication fails
+ PulseAccountLockedError: if account is locked
+ PulseMFARequiredError: if MFA is required
+ PulseNotLoggedInError: if login fails
+ """
+ if self._pulse_connection.login_in_progress:
+ LOG.debug("Login already in progress, returning")
+ return
+ LOG.debug(
+ "Authenticating to ADT Pulse cloud service as %s",
+ self._authentication_properties.username,
+ )
+ await self._pulse_connection.async_fetch_version()
+ soup = await self._pulse_connection.async_do_login_query()
+ if soup is None:
+ await self._pulse_connection.quick_logout()
+ ex = PulseNotLoggedInError()
+ self.sync_check_exception = ex
+ raise ex
+ self.sync_check_exception = None
+ # if tasks are started, we've already logged in before
+ # clean up completed tasks first
+ await self._clean_done_tasks()
+ if self._timeout_task is not None:
+ return
+ if not self._site:
+ await self._update_sites(soup)
+ if self._site is None:
+ LOG.error("Could not retrieve any sites, login failed")
+ await self._pulse_connection.quick_logout()
+ ex = PulseNotLoggedInError()
+ self.sync_check_exception = ex
+ raise ex
+ self.sync_check_exception = None
+ self._timeout_task = asyncio.create_task(
+ self._keepalive_task(), name=KEEPALIVE_TASK_NAME
+ )
+ await asyncio.sleep(0)
+
+ async def async_logout(self) -> None:
+ """Logout of ADT Pulse async."""
+ if self._pulse_connection.login_in_progress:
+ LOG.debug("Login in progress, returning")
+ return
+ self._set_update_exception(PulseNotLoggedInError())
+ LOG.info(
+ "Logging %s out of ADT Pulse", self._authentication_properties.username
+ )
+ if asyncio.current_task() not in (self._sync_task, self._timeout_task):
+ await self._cancel_task(self._timeout_task)
+ await self._cancel_task(self._sync_task)
+ await self._pulse_connection.async_do_logout_query(self.site.id)
+
+ async def async_update(self) -> bool:
+ """Update ADT Pulse data.
+
+ Returns:
+ bool: True if update succeeded.
+
+ Raises:
+ PulseGatewayOfflineError: if the gateway is offline
+ """
+ LOG.debug("Checking ADT Pulse cloud service for updates")
+
+ # FIXME will have to query other URIs for camera/zwave/etc
+ soup = await self._pulse_connection.query_orb(
+ logging.INFO, "Error returned from ADT Pulse service check"
+ )
+ if soup is not None:
+ await self._update_sites(soup)
+ return True
+
+ return False
+
+ async def wait_for_update(self) -> None:
+ """Wait for update.
+
+ Blocks current async task until Pulse system
+ signals an update
+
+ Raises:
+ Every exception from exceptions.py are possible
+ """
+ # FIXME?: This code probably won't work with multiple waiters.
+ await self._clean_done_tasks()
+ if self.sync_check_exception:
+ raise self.sync_check_exception
+ with self._pa_attribute_lock:
+ if self._timeout_task is None:
+ raise PulseNotLoggedInError()
+ if self._sync_task is None:
+ coro = self._sync_check_task()
+ self._sync_task = asyncio.create_task(
+ coro, name=f"{SYNC_CHECK_TASK_NAME}: Async session"
+ )
+ await asyncio.sleep(0)
+
+ await self._pulse_properties.updates_exist.wait()
+ self._pulse_properties.updates_exist.clear()
+ curr_exception = self.sync_check_exception
+ self.sync_check_exception = None
+ if curr_exception:
+ raise curr_exception
+
+ @property
+ def sites(self) -> list[ADTPulseSite]:
+ """Return all sites for this ADT Pulse account."""
+ warn(
+ "multiple sites being removed, use pyADTPulse.site instead",
+ PendingDeprecationWarning,
+ stacklevel=2,
+ )
+ with self._pa_attribute_lock:
+ if self._site is None:
+ raise RuntimeError(
+ "No sites have been retrieved, have you logged in yet?"
+ )
+ return [self._site]
+
+ @property
+ def site(self) -> ADTPulseSite:
+ """Return the site associated with the Pulse login."""
+ with self._pa_attribute_lock:
+ if self._site is None:
+ raise RuntimeError(
+ "No sites have been retrieved, have you logged in yet?"
+ )
+ return self._site
+
+ @property
+ def is_connected(self) -> bool:
+ """Convenience method to return whether ADT Pulse is connected."""
+ return self._pulse_connection.is_connected
+
+ @property
+ def detailed_debug_logging(self) -> bool:
+ """Return detailed debug logging."""
+ return self._pulse_connection.detailed_debug_logging
+
+ @detailed_debug_logging.setter
+ @typechecked
+ def detailed_debug_logging(self, value: bool) -> None:
+ """Set detailed debug logging."""
+ self._pulse_connection.detailed_debug_logging = value
+
+ @property
+ def keepalive_interval(self) -> int:
+ """Get the keepalive interval in minutes.
+
+ Returns:
+ int: the keepalive interval
+ """
+ return self._pulse_properties.keepalive_interval
+
+ @keepalive_interval.setter
+ @typechecked
+ def keepalive_interval(self, interval: int | None) -> None:
+ """Set the keepalive interval in minutes.
+
+ Args:
+ interval (int|None): The number of minutes between keepalive calls
+ If set to None, resets to ADT_DEFAULT_KEEPALIVE_INTERVAL
+
+ Raises:
+ ValueError: if a keepalive interval of greater than ADT_MAX_KEEPALIVE_INTERVAL
+ minutes is specified
+ """
+ self._pulse_properties.keepalive_interval = interval
+
+ @property
+ def relogin_interval(self) -> int:
+ """Get the relogin interval in minutes.
+
+ Returns:
+ int: the relogin interval
+ """
+ return self._pulse_properties.relogin_interval
+
+ @relogin_interval.setter
+ @typechecked
+ def relogin_interval(self, interval: int | None) -> None:
+ """Set the relogin interval in minutes.
+
+ If set to None, resets to ADT_DEFAULT_RELOGIN_INTERVAL
+ """
+ self._pulse_properties.relogin_interval = interval
+
+ @property
+ def sync_check_exception(self) -> Exception | None:
+ """Return sync check exception.
+
+ This should not be used by external code.
+
+ Returns:
+ Exception: sync check exception
+ """
+ with self._pa_attribute_lock:
+ return self._sync_check_exception
+
+ @sync_check_exception.setter
+ @typechecked
+ def sync_check_exception(self, value: Exception | None) -> None:
+ """Set sync check exception.
+
+ This should not be used by external code.
+
+ Args:
+ value (Exception): sync check exception
+ """
+ with self._pa_attribute_lock:
+ self._sync_check_exception = value
diff --git a/pyadtpulse/pyadtpulse_properties.py b/pyadtpulse/pyadtpulse_properties.py
new file mode 100644
index 0000000..d4a1a73
--- /dev/null
+++ b/pyadtpulse/pyadtpulse_properties.py
@@ -0,0 +1,173 @@
+"""PyADTPulse Properties."""
+
+import logging
+import asyncio
+from warnings import warn
+
+from typeguard import typechecked
+
+from .const import (
+ ADT_DEFAULT_KEEPALIVE_INTERVAL,
+ ADT_DEFAULT_RELOGIN_INTERVAL,
+ ADT_MAX_KEEPALIVE_INTERVAL,
+ ADT_MIN_RELOGIN_INTERVAL,
+)
+from .site import ADTPulseSite
+from .util import set_debug_lock
+
+LOG = logging.getLogger(__name__)
+
+
+class PyADTPulseProperties:
+ """PyADTPulse Properties."""
+
+ __slots__ = (
+ "_updates_exist",
+ "_pp_attribute_lock",
+ "_relogin_interval",
+ "_keepalive_interval",
+ "_site",
+ )
+
+ @staticmethod
+ @typechecked
+ def _check_keepalive_interval(keepalive_interval: int) -> None:
+ if keepalive_interval > ADT_MAX_KEEPALIVE_INTERVAL or keepalive_interval <= 0:
+ raise ValueError(
+ f"keepalive interval ({keepalive_interval}) must be "
+ f"greater than 0 and less than {ADT_MAX_KEEPALIVE_INTERVAL}"
+ )
+
+ @staticmethod
+ @typechecked
+ def _check_relogin_interval(relogin_interval: int) -> None:
+ if relogin_interval < ADT_MIN_RELOGIN_INTERVAL:
+ raise ValueError(
+ f"relogin interval ({relogin_interval}) must be "
+ f"greater than {ADT_MIN_RELOGIN_INTERVAL}"
+ )
+
+ @typechecked
+ def __init__(
+ self,
+ keepalive_interval: int = ADT_DEFAULT_KEEPALIVE_INTERVAL,
+ relogin_interval: int = ADT_DEFAULT_RELOGIN_INTERVAL,
+ debug_locks: bool = False,
+ ) -> None:
+ """Create a PyADTPulse properties object.
+ Args:
+ pulse_authentication_properties (PulseAuthenticationProperties):
+ an instance of PulseAuthenticationProperties
+ pulse_connection_properties (PulseConnectionProperties):
+ """
+ # FIXME use thread event/condition, regular condition?
+ # defer initialization to make sure we have an event loop
+
+ self._updates_exist = asyncio.locks.Event()
+
+ self._pp_attribute_lock = set_debug_lock(
+ debug_locks, "pyadtpulse.async_attribute_lock"
+ )
+
+ self._site: ADTPulseSite | None = None
+ self.keepalive_interval = keepalive_interval
+ self.relogin_interval = relogin_interval
+
+ @property
+ def relogin_interval(self) -> int:
+ """Get re-login interval.
+
+ Returns:
+ int: number of minutes to re-login to Pulse
+ 0 means disabled
+ """
+ with self._pp_attribute_lock:
+ return self._relogin_interval
+
+ @relogin_interval.setter
+ @typechecked
+ def relogin_interval(self, interval: int | None) -> None:
+ """Set re-login interval.
+
+ Args:
+ interval (int|None): The number of minutes between logins.
+ If set to None, resets to ADT_DEFAULT_RELOGIN_INTERVAL
+
+ Raises:
+ ValueError: if a relogin interval of less than ADT_MIN_RELOGIN_INTERVAL
+ minutes is specified
+ """
+ if interval is None:
+ interval = ADT_DEFAULT_RELOGIN_INTERVAL
+ else:
+ self._check_relogin_interval(interval)
+ with self._pp_attribute_lock:
+ self._relogin_interval = interval
+ LOG.debug("relogin interval set to %d", self._relogin_interval)
+
+ @property
+ def keepalive_interval(self) -> int:
+ """Get the keepalive interval in minutes.
+
+ Returns:
+ int: the keepalive interval
+ """
+ with self._pp_attribute_lock:
+ return self._keepalive_interval
+
+ @keepalive_interval.setter
+ @typechecked
+ def keepalive_interval(self, interval: int | None) -> None:
+ """Set the keepalive interval in minutes.
+
+ Args:
+ interval (int|None): The number of minutes between keepalive calls
+ If set to None, resets to ADT_DEFAULT_KEEPALIVE_INTERVAL
+
+ Raises:
+ ValueError: if a keepalive interval of greater than ADT_MAX_KEEPALIVE_INTERVAL
+ minutes is specified
+ """
+ if interval is None:
+ interval = ADT_DEFAULT_KEEPALIVE_INTERVAL
+ else:
+ self._check_keepalive_interval(interval)
+ with self._pp_attribute_lock:
+ self._keepalive_interval = interval
+ LOG.debug("keepalive interval set to %d", self._keepalive_interval)
+
+ @property
+ def sites(self) -> list[ADTPulseSite]:
+ """Return all sites for this ADT Pulse account."""
+ warn(
+ "multiple sites being removed, use pyADTPulse.site instead",
+ PendingDeprecationWarning,
+ stacklevel=2,
+ )
+ with self._pp_attribute_lock:
+ if self._site is None:
+ raise RuntimeError(
+ "No sites have been retrieved, have you logged in yet?"
+ )
+ return [self._site]
+
+ @property
+ def site(self) -> ADTPulseSite:
+ """Return the site associated with the Pulse login."""
+ with self._pp_attribute_lock:
+ if self._site is None:
+ raise RuntimeError(
+ "No sites have been retrieved, have you logged in yet?"
+ )
+ return self._site
+
+ def set_update_status(self) -> None:
+ """Sets updates_exist to notify wait_for_update."""
+ with self._pp_attribute_lock:
+ self.updates_exist.set()
+
+ @property
+ def updates_exist(self) -> asyncio.locks.Event:
+ """Check if updates exist."""
+ with self._pp_attribute_lock:
+ return self._updates_exist
diff --git a/pyadtpulse/site.py b/pyadtpulse/site.py
index b010e86..f9afe59 100644
--- a/pyadtpulse/site.py
+++ b/pyadtpulse/site.py
@@ -1,194 +1,85 @@
"""Module representing an ADT Pulse Site."""
+
import logging
import re
from asyncio import Task, create_task, gather, get_event_loop, run_coroutine_threadsafe
from datetime import datetime
-from threading import RLock
from time import time
-from typing import List, Optional, Union
-from warnings import warn
-
-# import dateparser
-from bs4 import BeautifulSoup
-from .alarm_panel import ADTPulseAlarmPanel
-from .const import ADT_DEVICE_URI, ADT_GATEWAY_STRING, ADT_SYSTEM_URI
-from .gateway import ADTPulseGateway
-from .pulse_connection import ADTPulseConnection
-from .util import DebugRLock, make_soup, parse_pulse_datetime, remove_prefix
+from bs4 import BeautifulSoup, ResultSet
+from typeguard import typechecked
+
+from .const import ADT_DEVICE_URI, ADT_GATEWAY_STRING, ADT_GATEWAY_URI, ADT_SYSTEM_URI
+from .exceptions import (
+ PulseClientConnectionError,
+ PulseGatewayOfflineError,
+ PulseServerConnectionError,
+ PulseServiceTemporarilyUnavailableError,
+)
+from .pulse_connection import PulseConnection
+from .site_properties import ADTPulseSiteProperties
+from .util import make_soup, parse_pulse_datetime, remove_prefix
from .zones import ADTPulseFlattendZone, ADTPulseZones
LOG = logging.getLogger(__name__)
+SECURITY_PANEL_ID = "1"
+SECURITY_PANEL_NAME = "Security Panel"
-class ADTPulseSite:
+
+class ADTPulseSite(ADTPulseSiteProperties):
"""Represents an individual ADT Pulse site."""
- __slots__ = (
- "_pulse_connection",
- "_id",
- "_name",
- "_last_updated",
- "_alarm_panel",
- "_zones",
- "_site_lock",
- "_gateway",
- )
-
- def __init__(self, pulse_connection: ADTPulseConnection, site_id: str, name: str):
+ __slots__ = ("_pulse_connection",)
+
+ @typechecked
+ def __init__(self, pulse_connection: PulseConnection, site_id: str, name: str):
"""Initialize.
Args:
- adt_service (PyADTPulse): a PyADTPulse object
- site_id (str): site ID
- name (str): site name
+ pulse_connection (PulseConnection): Pulse connection.
+ site_id (str): Site ID.
+ name (str): Site name.
"""
self._pulse_connection = pulse_connection
- self._id = site_id
- self._name = name
- self._last_updated: int = 0
- self._zones = ADTPulseZones()
- self._site_lock: Union[RLock, DebugRLock]
- if isinstance(self._pulse_connection._attribute_lock, DebugRLock):
- self._site_lock = DebugRLock("ADTPulseSite._site_lock")
- else:
- self._site_lock = RLock()
- self._alarm_panel = ADTPulseAlarmPanel()
- self._gateway = ADTPulseGateway()
-
- @property
- def id(self) -> str:
- """Get site id.
-
- Returns:
- str: the site id
- """
- return self._id
-
- @property
- def name(self) -> str:
- """Get site name.
-
- Returns:
- str: the site name
- """
- return self._name
-
- # FIXME: should this actually return if the alarm is going off!? How do we
- # return state that shows the site is compromised??
-
- @property
- def last_updated(self) -> int:
- """Return time site last updated.
-
- Returns:
- int: the time site last updated as datetime
- """
- with self._site_lock:
- return self._last_updated
-
- @property
- def site_lock(self) -> Union[RLock, DebugRLock]:
- """Get thread lock for site data.
-
- Not needed for async
-
- Returns:
- RLock: thread RLock
- """
- return self._site_lock
+ super().__init__(site_id, name, pulse_connection.debug_locks)
+ @typechecked
def arm_home(self, force_arm: bool = False) -> bool:
"""Arm system home."""
- if self.alarm_control_panel is None:
- raise RuntimeError("Cannot arm system home, no control panels exist")
return self.alarm_control_panel.arm_home(
self._pulse_connection, force_arm=force_arm
)
+ @typechecked
def arm_away(self, force_arm: bool = False) -> bool:
"""Arm system away."""
- if self.alarm_control_panel is None:
- raise RuntimeError("Cannot arm system away, no control panels exist")
return self.alarm_control_panel.arm_away(
self._pulse_connection, force_arm=force_arm
)
def disarm(self) -> bool:
"""Disarm system."""
- if self.alarm_control_panel is None:
- raise RuntimeError("Cannot disarm system, no control panels exist")
return self.alarm_control_panel.disarm(self._pulse_connection)
+ @typechecked
async def async_arm_home(self, force_arm: bool = False) -> bool:
"""Arm system home async."""
- if self.alarm_control_panel is None:
- raise RuntimeError("Cannot arm system home, no control panels exist")
return await self.alarm_control_panel.async_arm_home(
self._pulse_connection, force_arm=force_arm
)
+ @typechecked
async def async_arm_away(self, force_arm: bool = False) -> bool:
"""Arm system away async."""
- if self.alarm_control_panel is None:
- raise RuntimeError("Cannot arm system away, no control panels exist")
return await self.alarm_control_panel.async_arm_away(
self._pulse_connection, force_arm=force_arm
)
async def async_disarm(self) -> bool:
"""Disarm system async."""
- if self.alarm_control_panel is None:
- raise RuntimeError("Cannot disarm system, no control panels exist")
return await self.alarm_control_panel.async_disarm(self._pulse_connection)
- @property
- def zones(self) -> Optional[List[ADTPulseFlattendZone]]:
- """Return all zones registered with the ADT Pulse account.
-
- (cached copy of last fetch)
- See Also fetch_zones()
- """
- with self._site_lock:
- if not self._zones:
- raise RuntimeError("No zones exist")
- return self._zones.flatten()
-
- @property
- def zones_as_dict(self) -> Optional[ADTPulseZones]:
- """Return zone information in dictionary form.
-
- Returns:
- ADTPulseZones: all zone information
- """
- with self._site_lock:
- if not self._zones:
- raise RuntimeError("No zones exist")
- return self._zones
-
- @property
- def alarm_control_panel(self) -> ADTPulseAlarmPanel:
- """Return the alarm panel object for the site.
-
- Returns:
- Optional[ADTPulseAlarmPanel]: the alarm panel object
- """
- return self._alarm_panel
-
- @property
- def gateway(self) -> ADTPulseGateway:
- """Get gateway device object.
-
- Returns:
- ADTPulseGateway: Gateway device
- """
- return self._gateway
-
- @property
- def history(self):
- """Return log of history for this zone (NOT IMPLEMENTED)."""
- raise NotImplementedError
-
# status_orb = summary_html_soup.find('canvas', {'id': 'ic_orb'})
# if status_orb:
# self._status = status_orb['orb']
@@ -200,50 +91,70 @@ def history(self):
# if we should also update the zone details, force a fresh fetch
# of data from ADT Pulse
- async def _get_device_attributes(self, device_id: str) -> Optional[dict[str, str]]:
+ async def _get_device_attributes(self, device_id: str) -> dict[str, str] | None:
+ """
+ Retrieves the attributes of a device.
+
+ Args:
+ device_id (str): The ID of the device to retrieve attributes for.
+
+ Returns:
+ Optional[dict[str, str]]: A dictionary of attribute names and their
+ corresponding values,
+ or None if the device response soup is None.
+ """
result: dict[str, str] = {}
if device_id == ADT_GATEWAY_STRING:
- deviceResponse = await self._pulse_connection.async_query(
- "/system/gateway.jsp", timeout=10
+ device_response = await self._pulse_connection.async_query(
+ ADT_GATEWAY_URI, timeout=10
)
else:
- deviceResponse = await self._pulse_connection.async_query(
+ device_response = await self._pulse_connection.async_query(
ADT_DEVICE_URI, extra_params={"id": device_id}
)
- deviceResponseSoup = await make_soup(
- deviceResponse,
+ device_response_soup = make_soup(
+ device_response[0],
+ device_response[1],
+ device_response[2],
logging.DEBUG,
"Failed loading device attributes from ADT Pulse service",
)
- if deviceResponseSoup is None:
+ if device_response_soup is None:
return None
- for devInfoRow in deviceResponseSoup.find_all(
+ for dev_info_row in device_response_soup.find_all(
"td", {"class", "InputFieldDescriptionL"}
):
- identityText = (
- str(devInfoRow.get_text())
+ identity_text = (
+ str(dev_info_row.get_text())
.lower()
.strip()
.rstrip(":")
.replace(" ", "_")
.replace("/", "_")
)
- sibling = devInfoRow.find_next_sibling()
+ sibling = dev_info_row.find_next_sibling()
if not sibling:
value = "Unknown"
else:
value = str(sibling.get_text()).strip()
- result.update({identityText: value})
+ result.update({identity_text: value})
return result
- async def _set_device(self, device_id: str) -> None:
+ @typechecked
+ async def set_device(self, device_id: str) -> None:
+ """
+ Sets the device attributes for the given device ID.
+
+ Args:
+ device_id (str): The ID of the device.
+ """
dev_attr = await self._get_device_attributes(device_id)
if dev_attr is None:
return
if device_id == ADT_GATEWAY_STRING:
self._gateway.set_gateway_attributes(dev_attr)
return
- if device_id == "1":
+ if device_id == SECURITY_PANEL_ID:
self._alarm_panel.set_alarm_attributes(dev_attr)
return
if device_id.isdigit():
@@ -251,98 +162,117 @@ async def _set_device(self, device_id: str) -> None:
else:
LOG.debug("Zone %s is not an integer, skipping", device_id)
- async def _fetch_devices(self, soup: Optional[BeautifulSoup]) -> bool:
- """Fetch devices for a site.
+ @typechecked
+ async def fetch_devices(self, soup: BeautifulSoup | None) -> bool:
+ """
+ Fetches the devices from the given BeautifulSoup object and updates
+ the zone attributes.
Args:
- soup (BeautifulSoup, Optional): a BS4 object with data fetched from
- ADT Pulse web site
- Returns:
- ADTPulseZones
+ soup (Optional[BeautifulSoup]): The BeautifulSoup object containing
+ the devices.
- None if an error occurred
+ Returns:
+ bool: True if the devices were fetched and zone attributes were updated
+ successfully, False otherwise.
"""
+ regex_device = r"goToUrl\('device.jsp\?id=(\d*)'\);"
task_list: list[Task] = []
+ zone_id: str | None = None
+
+ def add_zone_from_row(row_tds: ResultSet) -> str | None:
+ """Adds a zone from a bs4 row.
+
+ Returns None if successful, otherwise the zone ID if present.
+ """
+ zone_id: str | None = None
+ if row_tds and len(row_tds) > 4:
+ zone_name: str = row_tds[1].get_text().strip()
+ zone_id = row_tds[2].get_text().strip()
+ zone_type: str = row_tds[4].get_text().strip()
+ zone_status: str = row_tds[0].find("canvas").get("title").strip()
+ if (
+ zone_id is not None
+ and zone_id.isdecimal()
+ and zone_name
+ and zone_type
+ ):
+ self._zones.update_zone_attributes(
+ {
+ "name": zone_name,
+ "zone": zone_id,
+ "type_model": zone_type,
+ "status": zone_status,
+ }
+ )
+ return None
+ return zone_id
+
+ def check_panel_or_gateway(
+ regex_device: str,
+ device_name: str,
+ zone_id: str | None,
+ on_click_value_text: str,
+ ) -> Task | None:
+ result = re.findall(regex_device, on_click_value_text)
+ if result:
+ device_id = result[0]
+ if device_id == SECURITY_PANEL_ID or device_name == SECURITY_PANEL_NAME:
+ return create_task(self.set_device(device_id))
+ if zone_id and zone_id.isdecimal():
+ return create_task(self.set_device(device_id))
+ LOG.debug("Skipping %s as it doesn't have an ID", device_name)
+ return None
+
if not soup:
response = await self._pulse_connection.async_query(ADT_SYSTEM_URI)
- soup = await make_soup(
- response,
+ soup = make_soup(
+ response[0],
+ response[1],
+ response[2],
logging.WARNING,
"Failed loading zone status from ADT Pulse service",
)
if not soup:
return False
-
- regexDevice = r"goToUrl\('device.jsp\?id=(\d*)'\);"
with self._site_lock:
for row in soup.find_all("tr", {"class": "p_listRow", "onclick": True}):
device_name = row.find("a").get_text()
row_tds = row.find_all("td")
- zone_id = None
- # see if we can create a zone without calling device.jsp
- if row_tds is not None and len(row_tds) > 4:
- zone_name = row_tds[1].get_text().strip()
- zone_id = row_tds[2].get_text().strip()
- zone_type = row_tds[4].get_text().strip()
- zone_status = row_tds[0].find("canvas").get("title").strip()
- if (
- zone_id.isdecimal()
- and zone_name is not None
- and zone_type is not None
- ):
- self._zones.update_zone_attributes(
- {
- "name": zone_name,
- "zone": zone_id,
- "type_model": zone_type,
- "status": zone_status,
- }
- )
- continue
- onClickValueText = row.get("onclick")
+ zone_id = add_zone_from_row(row_tds)
+ if zone_id is None:
+ continue
+ on_click_value_text = row.get("onclick")
if (
- onClickValueText == "goToUrl('gateway.jsp');"
+ on_click_value_text in ("goToUrl('gateway.jsp');", "Gateway")
or device_name == "Gateway"
):
- task_list.append(create_task(self._set_device(ADT_GATEWAY_STRING)))
- continue
- result = re.findall(regexDevice, onClickValueText)
-
- # only proceed if regex succeeded, as some users have onClick
- # links that include gateway.jsp
- if not result:
- LOG.debug(
- "Failed regex match #%s on #%s "
- "from ADT Pulse service, ignoring",
- regexDevice,
- onClickValueText,
+ task_list.append(create_task(self.set_device(ADT_GATEWAY_STRING)))
+ elif (
+ result := check_panel_or_gateway(
+ regex_device,
+ device_name,
+ zone_id,
+ on_click_value_text,
)
- continue
- # alarm panel case
- if result[0] == "1" or device_name == "Security Panel":
- task_list.append(create_task(self._set_device(result[0])))
- continue
- # zone case if we couldn't just call update_zone_attributes
- if zone_id is not None and zone_id.isdecimal():
- task_list.append(create_task(self._set_device(result[0])))
- continue
- else:
- LOG.debug("Skipping %s as it doesn't have an ID", device_name)
+ ) is not None:
+ task_list.append(result)
- await gather(*task_list)
- self._last_updated = int(time())
- return True
-
- # FIXME: ensure the zones for the correct site are being loaded!!!
+ await gather(*task_list)
+ self._last_updated = int(time())
+ return True
async def _async_update_zones_as_dict(
- self, soup: Optional[BeautifulSoup]
- ) -> Optional[ADTPulseZones]:
+ self, soup: BeautifulSoup | None
+ ) -> ADTPulseZones | None:
"""Update zone status information asynchronously.
Returns:
ADTPulseZones: a dictionary of zones with status
None if an error occurred
+
+ Raises:
+ PulseGatewayOffline: If the gateway is offline.
"""
with self._site_lock:
if self._zones is None:
@@ -351,17 +281,52 @@ async def _async_update_zones_as_dict(
LOG.debug("fetching zones for site %s", self._id)
if not soup:
# call ADT orb uri
- soup = await self._pulse_connection.query_orb(
- logging.WARNING, "Could not fetch zone status updates"
- )
+ try:
+ soup = await self._pulse_connection.query_orb(
+ logging.WARNING, "Could not fetch zone status updates"
+ )
+ except (
+ PulseServiceTemporarilyUnavailableError,
+ PulseServerConnectionError,
+ PulseClientConnectionError,
+ ) as ex:
+ LOG.warning(
+ "Could not fetch zone status updates from orb: %s", ex.args[0]
+ )
+ return None
if soup is None:
return None
- return self._update_zone_from_soup(soup)
+ self.update_zone_from_soup(soup)
+ return self._zones
+
+ def update_zone_from_soup(self, soup: BeautifulSoup) -> None:
+ """
+ Updates the zone information based on the provided BeautifulSoup object.
+
+ Args:
+ soup (BeautifulSoup): The BeautifulSoup object containing the parsed HTML.
+
+ Returns:
+ None
+
+ Raises:
+ PulseGatewayOffline: If the gateway is offline.
+ """
- def _update_zone_from_soup(self, soup: BeautifulSoup) -> Optional[ADTPulseZones]:
# parse ADT's convulated html to get sensor status
with self._site_lock:
- gateway_online = False
+ orb_status = soup.find("canvas", {"id": "ic_orb"})
+ if orb_status:
+ alarm_status = orb_status.get("orb")
+ if not alarm_status:
+ LOG.error("Failed to retrieve alarm status from orb!")
+ elif alarm_status == "offline":
+ self.gateway.is_online = False
+ raise PulseGatewayOfflineError(self.gateway.backoff)
+ else:
+ self.gateway.is_online = True
+ self.gateway.backoff.reset_backoff()
+
for row in soup.find_all("tr", {"class": "p_listRow"}):
temp = row.find("div", {"class": "p_grayNormalText"})
# v26 and lower: temp = row.find("span", {"class": "p_grayNormalText"})
@@ -414,9 +379,6 @@ def _update_zone_from_soup(self, soup: BeautifulSoup) -> Optional[ADTPulseZones]
else:
status = "Online"
- # parse out last activity (required dealing with "Yesterday 1:52Â PM")
- # last_activity = time.time()
-
# id: [integer]
# name: device name
# tags: sensor,[doorWindow,motion,glass,co,fire]
@@ -432,8 +394,6 @@ def _update_zone_from_soup(self, soup: BeautifulSoup) -> Optional[ADTPulseZones]
if not self._zones:
LOG.warning("No zones exist")
return None
- if state != "Unknown":
- gateway_online = True
self._zones.update_device_info(zone, state, status, last_update)
LOG.debug(
"Set zone %d - to %s, status %s with timestamp %s",
@@ -442,11 +402,9 @@ def _update_zone_from_soup(self, soup: BeautifulSoup) -> Optional[ADTPulseZones]
status,
last_update,
)
- self._gateway.is_online = gateway_online
self._last_updated = int(time())
- return self._zones
- async def _async_update_zones(self) -> Optional[List[ADTPulseFlattendZone]]:
+ async def _async_update_zones(self) -> list[ADTPulseFlattendZone] | None:
"""Update zones asynchronously.
Returns:
@@ -462,7 +420,7 @@ async def _async_update_zones(self) -> Optional[List[ADTPulseFlattendZone]]:
return None
return zonelist.flatten()
- def update_zones(self) -> Optional[List[ADTPulseFlattendZone]]:
+ def update_zones(self) -> list[ADTPulseFlattendZone] | None:
"""Update zone status information.
Returns:
@@ -470,47 +428,3 @@ def update_zones(self) -> Optional[List[ADTPulseFlattendZone]]:
"""
coro = self._async_update_zones()
return run_coroutine_threadsafe(coro, get_event_loop()).result()
-
- @property
- def updates_may_exist(self) -> bool:
- """Query whether updated sensor data exists.
-
- Deprecated, use method on pyADTPulse object instead
- """
- # FIXME: this should actually capture the latest version
- # and compare if different!!!
- # ...this doesn't actually work if other components are also checking
- # if updates exist
- warn(
- "updates_may_exist on site object is deprecated, "
- "use method on pyADTPulse object instead",
- DeprecationWarning,
- stacklevel=2,
- )
- return False
-
- async def async_update(self) -> bool:
- """Force update site/zone data async with current data.
-
- Deprecated, use method on pyADTPulse object instead
- """
- warn(
- "updating zones from site object is deprecated, "
- "use method on pyADTPulse object instead",
- DeprecationWarning,
- stacklevel=2,
- )
- return False
-
- def update(self) -> bool:
- """Force update site/zones with current data.
-
- Deprecated, use method on pyADTPulse object instead
- """
- warn(
- "updating zones from site object is deprecated, "
- "use method on pyADTPulse object instead",
- DeprecationWarning,
- stacklevel=2,
- )
- return False
diff --git a/pyadtpulse/site_properties.py b/pyadtpulse/site_properties.py
new file mode 100644
index 0000000..4313c84
--- /dev/null
+++ b/pyadtpulse/site_properties.py
@@ -0,0 +1,164 @@
+"""Pulse Site Properties."""
+
+from threading import RLock
+from warnings import warn
+
+from typeguard import typechecked
+
+from .alarm_panel import ADTPulseAlarmPanel
+from .gateway import ADTPulseGateway
+from .util import DebugRLock, set_debug_lock
+from .zones import ADTPulseFlattendZone, ADTPulseZones
+
+
+class ADTPulseSiteProperties:
+ """Pulse Site Properties."""
+
+ __slots__ = (
+ "_id",
+ "_name",
+ "_last_updated",
+ "_alarm_panel",
+ "_zones",
+ "_site_lock",
+ "_gateway",
+ )
+
+ @typechecked
+ def __init__(self, site_id: str, name: str, debug_locks: bool = False):
+ self._id = site_id
+ self._name = name
+ self._last_updated: int = 0
+ self._zones = ADTPulseZones()
+ self._site_lock: RLock | DebugRLock
+ self._site_lock = set_debug_lock(debug_locks, "pyadtpulse.site_property_lock")
+ self._alarm_panel = ADTPulseAlarmPanel()
+ self._gateway = ADTPulseGateway()
+
+ @property
+ def id(self) -> str:
+ """Get site id.
+
+ Returns:
+ str: the site id
+ """
+ return self._id
+
+ @property
+ def name(self) -> str:
+ """Get site name.
+
+ Returns:
+ str: the site name
+ """
+ return self._name
+
+ # FIXME: should this actually return if the alarm is going off!? How do we
+ # return state that shows the site is compromised??
+
+ @property
+ def last_updated(self) -> int:
+ """Return time site last updated.
+
+ Returns:
+ int: the time site last updated as datetime
+ """
+ with self._site_lock:
+ return self._last_updated
+
+ @property
+ def site_lock(self) -> "RLock| DebugRLock":
+ """Get thread lock for site data.
+
+ Not needed for async
+
+ Returns:
+ RLock: thread RLock
+ """
+ return self._site_lock
+
+ @property
+ def zones(self) -> list[ADTPulseFlattendZone] | None:
+ """Return all zones registered with the ADT Pulse account.
+
+ (cached copy of last fetch)
+ See Also fetch_zones()
+ """
+ with self._site_lock:
+ if not self._zones:
+ raise RuntimeError("No zones exist")
+ return self._zones.flatten()
+
+ @property
+ def zones_as_dict(self) -> ADTPulseZones | None:
+ """Return zone information in dictionary form.
+
+ Returns:
+ ADTPulseZones: all zone information
+ """
+ with self._site_lock:
+ if not self._zones:
+ raise RuntimeError("No zones exist")
+ return self._zones
+
+ @property
+ def alarm_control_panel(self) -> ADTPulseAlarmPanel:
+ """Return the alarm panel object for the site.
+
+ Returns:
+ Optional[ADTPulseAlarmPanel]: the alarm panel object
+ """
+ return self._alarm_panel
+
+ @property
+ def gateway(self) -> ADTPulseGateway:
+ """Get gateway device object.
+
+ Returns:
+ ADTPulseGateway: Gateway device
+ """
+ return self._gateway
+
+ @property
+ def updates_may_exist(self) -> bool:
+ """Query whether updated sensor data exists.
+
+ Deprecated, use method on pyADTPulse object instead
+ """
+ # FIXME: this should actually capture the latest version
+ # and compare if different!!!
+ # ...this doesn't actually work if other components are also checking
+ # if updates exist
+ warn(
+ "updates_may_exist on site object is deprecated, "
+ "use method on pyADTPulse object instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return False
+
+ async def async_update(self) -> bool:
+ """Force update site/zone data async with current data.
+
+ Deprecated, use method on pyADTPulse object instead
+ """
+ warn(
+ "updating zones from site object is deprecated, "
+ "use method on pyADTPulse object instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return False
+
+ def update(self) -> bool:
+ """Force update site/zones with current data.
+
+ Deprecated, use method on pyADTPulse object instead
+ """
+ warn(
+ "updating zones from site object is deprecated, "
+ "use method on pyADTPulse object instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return False
diff --git a/pyadtpulse/util.py b/pyadtpulse/util.py
index 15fa564..dd1e361 100644
--- a/pyadtpulse/util.py
+++ b/pyadtpulse/util.py
@@ -1,4 +1,5 @@
"""Utility functions for pyadtpulse."""
+
import logging
import string
import sys
@@ -7,83 +8,67 @@
from pathlib import Path
from random import randint
from threading import RLock, current_thread
-from typing import Optional
-from aiohttp import ClientResponse
from bs4 import BeautifulSoup
+from yarl import URL
LOG = logging.getLogger(__name__)
-def handle_response(
- response: Optional[ClientResponse], level: int, error_message: str
-) -> bool:
- """Handle the response from query().
+def remove_prefix(text: str, prefix: str) -> str:
+ """Remove prefix from a string.
Args:
- response (Optional[Response]): the response from the query()
- level (int): Level to log on error (i.e. INFO, DEBUG)
- error_message (str): the error message
+ text (str): original text
+ prefix (str): prefix to remove
Returns:
- bool: True if no error occurred.
- """
- if response is None:
- LOG.log(level, "%s", error_message)
- return False
-
- if response.ok:
- return True
-
- LOG.log(level, "%s: error code = %s", error_message, response.status)
-
- return False
-
-
-def close_response(response: Optional[ClientResponse]) -> None:
- """Close a response object, handles None.
-
- Args:
- response (Optional[ClientResponse]): ClientResponse object to close
+ str: modified string
"""
- if response is not None and not response.closed:
- response.close()
+ return text[text.startswith(prefix) and len(prefix) :]
-def remove_prefix(text: str, prefix: str) -> str:
- """Remove prefix from a string.
+def handle_response(code: int, url: URL | None, level: int, error_message: str) -> bool:
+ """Handle the response from query().
Args:
- text (str): original text
- prefix (str): prefix to remove
+ code (int): the return code
+ level (int): Level to log on error (i.e. INFO, DEBUG)
+ error_message (str): the error message
Returns:
- str: modified string
+ bool: True if no error occurred.
"""
- return text[text.startswith(prefix) and len(prefix) :]
+ if code >= 400:
+ LOG.log(level, "%s: error code = %s from %s", error_message, code, url)
+ return False
+ return True
-async def make_soup(
- response: Optional[ClientResponse], level: int, error_message: str
-) -> Optional[BeautifulSoup]:
+def make_soup(
+ code: int,
+ response_text: str | None,
+ url: URL | None,
+ level: int,
+ error_message: str,
+) -> BeautifulSoup | None:
"""Make a BS object from a Response.
Args:
- response (Optional[Response]): the response
+ code (int): the return code
+ response_text Optional(str): the response text
level (int): the logging level on error
error_message (str): the error message
Returns:
Optional[BeautifulSoup]: a BS object, or None on failure
"""
- if not handle_response(response, level, error_message):
+ if not handle_response(code, url, level, error_message):
return None
-
- if response is None: # shut up type checker
+ if response_text is None:
+ LOG.log(level, "%s: no response received from %s", error_message, url)
return None
- body_text = await response.text()
- response.close()
- return BeautifulSoup(body_text, "html.parser")
+ return BeautifulSoup(response_text, "html.parser")
FINGERPRINT_LENGTH = 2292
@@ -227,7 +212,8 @@ def parse_pulse_datetime(datestring: str) -> datetime:
Returns:
datetime: time value of given string
"""
- split_string = datestring.split("\xa0")
+ datestring = datestring.replace("\xa0", " ").rstrip()
+ split_string = [s for s in datestring.split(" ") if s.strip()]
if len(split_string) < 3:
raise ValueError("Invalid datestring")
t = datetime.today()
@@ -247,13 +233,16 @@ def parse_pulse_datetime(datestring: str) -> datetime:
return last_update
-class AuthenticationException(RuntimeError):
- """Raised when a login failed."""
+def set_debug_lock(debug_lock: bool, name: str) -> "RLock | DebugRLock":
+ """Set lock or debug lock
- def __init__(self, username: str):
- """Create the exception.
+ Args:
+ debug_lock (bool): set a debug lock
+ name (str): debug lock name
- Args:
- username (str): Username used to login
- """
- super().__init__(f"Could not log into ADT site with username {username}")
+ Returns:
+ RLock | DebugRLock: lock object to return
+ """
+ if debug_lock:
+ return DebugRLock(name)
+ return RLock()
diff --git a/pyadtpulse/zones.py b/pyadtpulse/zones.py
index 1402b7e..f1a9942 100644
--- a/pyadtpulse/zones.py
+++ b/pyadtpulse/zones.py
@@ -1,11 +1,14 @@
"""ADT Pulse zone info."""
+
import logging
from collections import UserDict
from dataclasses import dataclass
from datetime import datetime
-from typing import List, Tuple, TypedDict
+from typing import TypedDict
+
+from typeguard import typechecked
-ADT_NAME_TO_DEFAULT_TAGS = {
+ADT_NAME_TO_DEFAULT_TAGS: dict[str, tuple[str, str]] = {
"Door": ("sensor", "doorWindow"),
"Window": ("sensor", "doorWindow"),
"Motion": ("sensor", "motion"),
@@ -38,10 +41,38 @@ class ADTPulseZoneData:
name: str
id_: str
- tags: Tuple = ADT_NAME_TO_DEFAULT_TAGS["Window"]
+ _tags: tuple[str, str] = ADT_NAME_TO_DEFAULT_TAGS["Window"]
status: str = "Unknown"
state: str = "Unknown"
- last_activity_timestamp: int = 0
+ _last_activity_timestamp: int = 0
+
+ @property
+ def last_activity_timestamp(self) -> int:
+ """Return the last activity timestamp."""
+ return self._last_activity_timestamp
+
+ @last_activity_timestamp.setter
+ @typechecked
+ def last_activity_timestamp(self, value: int) -> None:
+ """Set the last activity timestamp."""
+ if value < 1420070400:
+ raise ValueError(
+ "last_activity_timestamp must be greater than that of 01-Jan-2015"
+ )
+ self._last_activity_timestamp = value
+
+ @property
+ def tags(self) -> tuple[str, str]:
+ """Return the tags."""
+ return self._tags
+
+ @tags.setter
+ @typechecked
+ def tags(self, value: tuple[str, str]) -> None:
+ """Set the tags."""
+ if value not in ADT_NAME_TO_DEFAULT_TAGS.values():
+ raise ValueError("tags must be one of: " + str(ADT_NAME_TO_DEFAULT_TAGS))
+ self._tags = value
class ADTPulseFlattendZone(TypedDict):
@@ -60,7 +91,7 @@ class ADTPulseFlattendZone(TypedDict):
zone: int
name: str
id_: str
- tags: Tuple
+ tags: tuple
status: str
state: str
last_activity_timestamp: int
@@ -112,6 +143,7 @@ def __setitem__(self, key: int, value: ADTPulseZoneData) -> None:
value.name = "Sensor for Zone " + str(key)
super().__setitem__(key, value)
+ @typechecked
def update_status(self, key: int, status: str) -> None:
"""Update zone status.
@@ -123,6 +155,7 @@ def update_status(self, key: int, status: str) -> None:
temp.status = status
self.__setitem__(key, temp)
+ @typechecked
def update_state(self, key: int, state: str) -> None:
"""Update zone state.
@@ -134,6 +167,7 @@ def update_state(self, key: int, state: str) -> None:
temp.state = state
self.__setitem__(key, temp)
+ @typechecked
def update_last_activity_timestamp(self, key: int, dt: datetime) -> None:
"""Update timestamp.
@@ -145,6 +179,7 @@ def update_last_activity_timestamp(self, key: int, dt: datetime) -> None:
temp.last_activity_timestamp = int(dt.timestamp())
self.__setitem__(key, temp)
+ @typechecked
def update_device_info(
self,
key: int,
@@ -170,13 +205,13 @@ def update_device_info(
temp.last_activity_timestamp = int(last_activity.timestamp())
self.__setitem__(key, temp)
- def flatten(self) -> List[ADTPulseFlattendZone]:
+ def flatten(self) -> list[ADTPulseFlattendZone]:
"""Flattens ADTPulseZones into a list of ADTPulseFlattenedZones.
Returns:
List[ADTPulseFlattendZone]
"""
- result: List[ADTPulseFlattendZone] = []
+ result: list[ADTPulseFlattendZone] = []
for k, i in self.items():
if not isinstance(i, ADTPulseZoneData):
raise ValueError("Invalid Zone data in ADTPulseZones")
@@ -193,41 +228,42 @@ def flatten(self) -> List[ADTPulseFlattendZone]:
)
return result
+ @typechecked
def update_zone_attributes(self, dev_attr: dict[str, str]) -> None:
"""Update zone attributes."""
- dName = dev_attr.get("name", "Unknown")
- dType = dev_attr.get("type_model", "Unknown")
- dZone = dev_attr.get("zone", "Unknown")
- dStatus = dev_attr.get("status", "Unknown")
+ d_name = dev_attr.get("name", "Unknown")
+ d_type = dev_attr.get("type_model", "Unknown")
+ d_zone = dev_attr.get("zone", "Unknown")
+ d_status = dev_attr.get("status", "Unknown")
- if dZone != "Unknown":
+ if d_zone != "Unknown":
tags = None
for search_term, default_tags in ADT_NAME_TO_DEFAULT_TAGS.items():
# convert to uppercase first
- if search_term.upper() in dType.upper():
+ if search_term.upper() in d_type.upper():
tags = default_tags
break
if not tags:
LOG.warning(
- "Unknown sensor type for '%s', defaulting to doorWindow", dType
+ "Unknown sensor type for '%s', defaulting to doorWindow", d_type
)
tags = ("sensor", "doorWindow")
LOG.debug(
"Retrieved sensor %s id: sensor-%s Status: %s, tags %s",
- dName,
- dZone,
- dStatus,
+ d_name,
+ d_zone,
+ d_status,
tags,
)
- if "Unknown" in (dName, dStatus, dZone) or not dZone.isdecimal():
+ if "Unknown" in (d_name, d_status, d_zone) or not d_zone.isdecimal():
LOG.debug("Zone data incomplete, skipping...")
else:
- tmpzone = ADTPulseZoneData(dName, f"sensor-{dZone}", tags, dStatus)
- self.update({int(dZone): tmpzone})
+ tmpzone = ADTPulseZoneData(d_name, f"sensor-{d_zone}", tags, d_status)
+ self.update({int(d_zone): tmpzone})
else:
LOG.debug(
"Skipping incomplete zone name: %s, zone: %s status: %s",
- dName,
- dZone,
- dStatus,
+ d_name,
+ d_zone,
+ d_status,
)
diff --git a/pyproject.toml b/pyproject.toml
index b770167..ce39162 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,30 +1,59 @@
-[build-system]
-requires = ["setuptools>=61.0"]
-build-backend = "setuptools.build_meta"
-
-[project]
+[tool.poetry]
name = "pyadtpulse"
-dynamic = ["version"]
-description="Python interface for ADT Pulse security systems"
+version = "1.2.0"
+description = "Python interface for ADT Pulse security systems"
+authors = ["Ryan Snodgrass"]
+maintainers = ["Robert Lippmann"]
+license = "Apache-2.0"
readme = "README.md"
-authors = [{name = "Ryan Snodgrass"}]
-maintainers = [{name = "Robert Lippmann"}]
-license = {file = "LICENSE.md"}
-dependencies = ["aiohttp>=3.8.1", "uvloop>=0.17.0", "beautifulsoup4>=4.11.1"]
-keywords = ["security system", "adt", "home automation", "security alarm"]
+repository = "https://github.com/rlippmann/pyadtpulse"
classifiers = [
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache Software License",
- "Operating System :: OS Independent",
+ "Operating System :: OS Independent"
]
-[project.urls]
+
+[tool.poetry.dependencies]
+python = "^3.11"
+aiohttp = "3.9.1"
+beautifulsoup4 = "^4.12.2"
+uvloop = "^0.19.0"
+bs4 = "^0.0.1"
+typeguard = "^4.1.5"
+
+
+[tool.poetry.urls]
"Changelog" = "https://github.com/rlippmann/pyadtpulse/blob/master/CHANGELOG.md"
-"Source" = "https://github.com/rlippmann/pyadtpulse"
"Issues" = "https://github.com/rlippmann/pyadtpulse/issues"
-[tool.setuptools.dynamic]
-version = {attr = "pyadtpulse.const.__version__"}
+[tool.poetry.group.test.dependencies]
+pytest = "^7.4.3"
+pytest-asyncio = "^0.21.1"
+pytest-mock = "^3.12.0"
+pytest-aiohttp = "^1.0.5"
+pytest-timeout = "^2.2.0"
+aioresponses = "^0.7.6"
+freezegun = "^1.2.2"
+pytest-coverage = "^0.0"
+pytest-xdist = "^3.5.0"
+
+
+[tool.poetry.group.dev.dependencies]
+pre-commit = "^3.5.0"
+ruff = "^0.1.4"
+pycln = "^2.3.0"
+pyupgrade = "^3.15.0"
+isort = "^5.12.0"
+black = "^23.10.1"
+mypy = "^1.6.1"
+pylint = "^3.0.2"
+types-beautifulsoup4 = "^4.12.0.7"
+refurb = "^1.22.1"
+
+[build-system]
+requires = ["poetry-core"]
+build-backend = ["poetry.core.masonry.api"]
[tool.isort]
profile = "black"
@@ -36,3 +65,9 @@ line-length = 90
[tool.pycln]
all = true
+
+[tool.pytest.ini_options]
+timeout = 30
+# addopts = "--cov=pyadtpulse --cov-report=html"
+
+[tool.pyright]
diff --git a/requirements.txt b/requirements.txt
index 50b37e6..e256003 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,3 +1,4 @@
beautifulsoup4>=4.11.1
-aiohttp>=3.8.1
+aiohttp>=3.9.1
uvloop>=0.17.0
+typeguard>=4.1.5
diff --git a/tests/data_files/device_1.html b/tests/data_files/device_1.html
new file mode 100644
index 0000000..f248389
--- /dev/null
+++ b/tests/data_files/device_1.html
@@ -0,0 +1,458 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ADT Pulse(TM) Interactive Solutions - System - Robert Lippmann
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/data_files/device_10.html b/tests/data_files/device_10.html
new file mode 100644
index 0000000..a1ad892
--- /dev/null
+++ b/tests/data_files/device_10.html
@@ -0,0 +1,435 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ADT Pulse(TM) Interactive Solutions - System - Robert Lippmann
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/data_files/device_11.html b/tests/data_files/device_11.html
new file mode 100644
index 0000000..ba700c6
--- /dev/null
+++ b/tests/data_files/device_11.html
@@ -0,0 +1,435 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ADT Pulse(TM) Interactive Solutions - System - Robert Lippmann
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/data_files/device_16.html b/tests/data_files/device_16.html
new file mode 100644
index 0000000..2d9be60
--- /dev/null
+++ b/tests/data_files/device_16.html
@@ -0,0 +1,435 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ADT Pulse(TM) Interactive Solutions - System - Robert Lippmann
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/data_files/device_2.html b/tests/data_files/device_2.html
new file mode 100644
index 0000000..ca7507d
--- /dev/null
+++ b/tests/data_files/device_2.html
@@ -0,0 +1,435 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ADT Pulse(TM) Interactive Solutions - System - Robert Lippmann
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/data_files/device_24.html b/tests/data_files/device_24.html
new file mode 100644
index 0000000..52085d6
--- /dev/null
+++ b/tests/data_files/device_24.html
@@ -0,0 +1,453 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ADT Pulse(TM) Interactive Solutions - System - Robert Lippmann
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/data_files/device_25.html b/tests/data_files/device_25.html
new file mode 100644
index 0000000..deb9552
--- /dev/null
+++ b/tests/data_files/device_25.html
@@ -0,0 +1,435 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ADT Pulse(TM) Interactive Solutions - System - Robert Lippmann
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/data_files/device_26.html b/tests/data_files/device_26.html
new file mode 100644
index 0000000..3b4393a
--- /dev/null
+++ b/tests/data_files/device_26.html
@@ -0,0 +1,435 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ADT Pulse(TM) Interactive Solutions - System - Robert Lippmann
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/data_files/device_27.html b/tests/data_files/device_27.html
new file mode 100644
index 0000000..cac9597
--- /dev/null
+++ b/tests/data_files/device_27.html
@@ -0,0 +1,435 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ADT Pulse(TM) Interactive Solutions - System - Robert Lippmann
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/data_files/device_28.html b/tests/data_files/device_28.html
new file mode 100644
index 0000000..8a02013
--- /dev/null
+++ b/tests/data_files/device_28.html
@@ -0,0 +1,435 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ADT Pulse(TM) Interactive Solutions - System - Robert Lippmann
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/data_files/device_29.html b/tests/data_files/device_29.html
new file mode 100644
index 0000000..2ba6fd7
--- /dev/null
+++ b/tests/data_files/device_29.html
@@ -0,0 +1,435 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ADT Pulse(TM) Interactive Solutions - System - Robert Lippmann
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/data_files/device_3.html b/tests/data_files/device_3.html
new file mode 100644
index 0000000..f4da4b7
--- /dev/null
+++ b/tests/data_files/device_3.html
@@ -0,0 +1,435 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ADT Pulse(TM) Interactive Solutions - System - Robert Lippmann
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/data_files/device_30.html b/tests/data_files/device_30.html
new file mode 100644
index 0000000..c178404
--- /dev/null
+++ b/tests/data_files/device_30.html
@@ -0,0 +1,435 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ADT Pulse(TM) Interactive Solutions - System - Robert Lippmann
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/data_files/device_34.html b/tests/data_files/device_34.html
new file mode 100644
index 0000000..d5ffb17
--- /dev/null
+++ b/tests/data_files/device_34.html
@@ -0,0 +1,441 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ADT Pulse(TM) Interactive Solutions - System - Robert Lippmann
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/data_files/device_69.html b/tests/data_files/device_69.html
new file mode 100644
index 0000000..95f2122
--- /dev/null
+++ b/tests/data_files/device_69.html
@@ -0,0 +1,435 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ADT Pulse(TM) Interactive Solutions - System - Robert Lippmann
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/data_files/device_70.html b/tests/data_files/device_70.html
new file mode 100644
index 0000000..68f5766
--- /dev/null
+++ b/tests/data_files/device_70.html
@@ -0,0 +1,435 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ADT Pulse(TM) Interactive Solutions - System - Robert Lippmann
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/data_files/gateway.html b/tests/data_files/gateway.html
new file mode 100644
index 0000000..a85f6d8
--- /dev/null
+++ b/tests/data_files/gateway.html
@@ -0,0 +1,395 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ADT Pulse(TM) Interactive Solutions - System - Robert Lippmann
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+  |
+  |
+  |
+
+
+
+
+
+
+
+ |
+
+
+
+
+
+
+
+ |
+ |
+ |
+
+
+ |
+
+
+
+
+
+
+ |
+
+
+ Status: |
+ |
+
+ Manufacturer: | ADT Pulse Gateway |
+
+ Model: | PGZNG1 |
+ Serial Number: | 5U020CN3007E3 |
+ Next Update: | Today 1:21 AM |
+ Last Update: | Today 7:21 PM |
+ Firmware Version: | 24.0.0-9 |
+ Hardware Version: | HW=3, BL=1.1.9b, PL=9.4.0.32.5, SKU=PGZNG1-2ADNAS |
+
+ |
+ Communication Link Status |
+ Primary Connection Type: | Broadband |
+ Broadband Connection Status: | Active |
+ Cellular Connection Status: | N/A |
+ Cellular Signal Strength: | N/A |
+ |
+ Network Address Information |
+ Broadband LAN IP Address: | 192.168.1.31 |
+ Broadband LAN MAC: | a4:11:62:35:07:96 |
+ Device LAN IP Address: | 192.168.107.1 |
+ Device LAN MAC: | a4:11:62:35:07:97 |
+ Router LAN IP Address: | 192.168.1.1 |
+ Router WAN IP Address: | |
+
+
+ |
+ |
+
+
+
+ |
+ |
+
+
+ |
+ |
+ |
+
+
+
+
+
+
+ |
+ |
+
+
+
+
+
+
+
+ |
+ |
+ |
+
+
+
+
+ |
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/data_files/mfa.html b/tests/data_files/mfa.html
new file mode 100644
index 0000000..5087001
--- /dev/null
+++ b/tests/data_files/mfa.html
@@ -0,0 +1,150 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ADT Pulse(TM) Interactive Solutions - Multi-factor Authentication
+
+
+
+
+
+
+
+
+
diff --git a/tests/data_files/not_signed_in.html b/tests/data_files/not_signed_in.html
new file mode 100644
index 0000000..9608e41
--- /dev/null
+++ b/tests/data_files/not_signed_in.html
@@ -0,0 +1,177 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ADT Pulse(TM) Interactive Solutions - Sign In
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/data_files/orb.html b/tests/data_files/orb.html
new file mode 100644
index 0000000..57c808d
--- /dev/null
+++ b/tests/data_files/orb.html
@@ -0,0 +1,323 @@
+
+
+
+
+
+
+
+
+ Disarmed.
+ All Quiet.
+
+
+
+
+
+
+
+
+
+
+ |
+
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Back Door
+ Zone 14
+
+ |
+ Closed |
+
+
+
+ |
+
+
+ |
+
+
+ |
+
+
+ |
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Foyer Motion
+ Zone 15
+
+ |
+ No Motion |
+
+
+ |
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Front Door
+ Zone 9
+
+ |
+ Closed |
+
+
+ |
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Garage Door
+ Zone 10
+
+ |
+ Closed |
+
+
+ |
+
+
+ |
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Main Gas
+ Zone 23
+
+ |
+ Okay |
+
+
+ |
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Patio Door
+ Zone 11
+
+ |
+ Closed |
+
+
+ |
+
+
+ |
+
+
+ | |
+
diff --git a/tests/data_files/orb_garage.html b/tests/data_files/orb_garage.html
new file mode 100644
index 0000000..13a3cc4
--- /dev/null
+++ b/tests/data_files/orb_garage.html
@@ -0,0 +1,323 @@
+
+
+
+
+
+
+
+
+ Disarmed.
+ 1 Sensor Open.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Garage Door
+ Zone 10
+
+ |
+ Open |
+
+
+
+ |
+
+
+
+
+ |
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Back Door
+ Zone 14
+
+ |
+ Closed |
+
+
+ |
+
+
+ |
+
+
+ |
+
+
+ |
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Foyer Motion
+ Zone 15
+
+ |
+ No Motion |
+
+
+ |
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Front Door
+ Zone 9
+
+ |
+ Closed |
+
+
+ |
+
+
+ |
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Main Gas
+ Zone 23
+
+ |
+ Okay |
+
+
+ |
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Patio Door
+ Zone 11
+
+ |
+ Closed |
+
+
+ |
+
+
+ |
+
+
+ | |
+
diff --git a/tests/data_files/orb_gateway_offline.html b/tests/data_files/orb_gateway_offline.html
new file mode 100644
index 0000000..2083506
--- /dev/null
+++ b/tests/data_files/orb_gateway_offline.html
@@ -0,0 +1,293 @@
+
+
+
+
+
+
+
+
+ Status Unavailable.
+
+
+
+
+
+
+
+
+
+
+
+
+ |
+
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Back Door
+ Zone 14
+
+ |
+ Unknown |
+
+
+
+ |
+
+
+ |
+
+
+ |
+
+
+ |
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Foyer Motion
+ Zone 15
+
+ |
+ Unknown |
+
+
+ |
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Front Door
+ Zone 9
+
+ |
+ Unknown |
+
+
+ |
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Garage Door
+ Zone 10
+
+ |
+ Unknown |
+
+
+ |
+
+
+ |
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Main Gas
+ Zone 23
+
+ |
+ Unknown |
+
+
+ |
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Patio Door
+ Zone 11
+
+ |
+ Unknown |
+
+
+ |
+
+
+ |
+
+
+ | |
+
diff --git a/tests/data_files/orb_patio_garage.html b/tests/data_files/orb_patio_garage.html
new file mode 100644
index 0000000..0c42926
--- /dev/null
+++ b/tests/data_files/orb_patio_garage.html
@@ -0,0 +1,323 @@
+
+
+
+
+
+
+
+
+ Disarmed.
+ 2 Sensors Open.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Garage Door
+ Zone 10
+
+ |
+ Open |
+
+
+
+ |
+
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Patio Door
+ Zone 11
+
+ |
+ Open |
+
+
+
+ |
+
+
+ |
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Back Door
+ Zone 14
+
+ |
+ Closed |
+
+
+ |
+
+
+ |
+
+
+ |
+
+
+ |
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Foyer Motion
+ Zone 15
+
+ |
+ No Motion |
+
+
+ |
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Front Door
+ Zone 9
+
+ |
+ Closed |
+
+
+ |
+
+
+ |
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Main Gas
+ Zone 23
+
+ |
+ Okay |
+
+
+ |
+
+
+ |
+
+
+ | |
+
diff --git a/tests/data_files/orb_patio_opened.html b/tests/data_files/orb_patio_opened.html
new file mode 100644
index 0000000..dfdd36f
--- /dev/null
+++ b/tests/data_files/orb_patio_opened.html
@@ -0,0 +1,323 @@
+
+
+
+
+
+
+
+
+ Disarmed.
+ 1 Sensor Open.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Patio Door
+ Zone 11
+
+ |
+ Open |
+
+
+
+ |
+
+
+
+
+ |
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Back Door
+ Zone 14
+
+ |
+ Closed |
+
+
+ |
+
+
+ |
+
+
+ |
+
+
+ |
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Foyer Motion
+ Zone 15
+
+ |
+ No Motion |
+
+
+ |
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Front Door
+ Zone 9
+
+ |
+ Closed |
+
+
+ |
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Garage Door
+ Zone 10
+
+ |
+ Closed |
+
+
+ |
+
+
+ |
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Main Gas
+ Zone 23
+
+ |
+ Okay |
+
+
+ |
+
+
+ |
+
+
+ | |
+
diff --git a/tests/data_files/signin.html b/tests/data_files/signin.html
new file mode 100644
index 0000000..c82a460
--- /dev/null
+++ b/tests/data_files/signin.html
@@ -0,0 +1,177 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ADT Pulse(TM) Interactive Solutions - Sign In
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/data_files/signin_fail.html b/tests/data_files/signin_fail.html
new file mode 100644
index 0000000..d7b52a4
--- /dev/null
+++ b/tests/data_files/signin_fail.html
@@ -0,0 +1,176 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ADT Pulse(TM) Interactive Solutions - Sign In
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/data_files/signin_locked.html b/tests/data_files/signin_locked.html
new file mode 100644
index 0000000..027079f
--- /dev/null
+++ b/tests/data_files/signin_locked.html
@@ -0,0 +1,176 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ADT Pulse(TM) Interactive Solutions - Sign In
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/data_files/summary.html b/tests/data_files/summary.html
new file mode 100644
index 0000000..e61e087
--- /dev/null
+++ b/tests/data_files/summary.html
@@ -0,0 +1,534 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ADT Pulse(TM) Interactive Solutions - Summary - Robert Lippmann
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ADT Pulse Home
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Disarmed. All Quiet.
+
+
+
+
+
+
+
+ |
+ |
+ |
+ |
+ |
+ |
+ |
+ |
+ |
+ |
+ |
+ |
+ |
+
+
+
+ |
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ No other devices installed. |
|
+ |
+ |
+
+
+
+ |
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ |
+  |
+
+
+
+
+
+
+
+
+
+
+ |
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 
+
+
+
+ No pictures or clips.
+
+
+
+
+
+
+
+ |
+
+
+
+
+
+
+
+
+
+
+
+
+ |
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/data_files/summary_gateway_offline.html b/tests/data_files/summary_gateway_offline.html
new file mode 100644
index 0000000..266fc06
--- /dev/null
+++ b/tests/data_files/summary_gateway_offline.html
@@ -0,0 +1,533 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ADT Pulse(TM) Interactive Solutions - Summary - Robert Lippmann
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ADT Pulse Home
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Status Unavailable.
+
+
+
+
+
+
+
+ |
+ |
+ |
+ |
+ |
+ |
+ |
+ |
+ |
+ |
+ |
+ |
+ |
+
+
+
+ |
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ |
+  |
+
+
+
+
+
+
+
+
+
+
+ |
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ No pictures or clips.
+
+
+
+
+
+
+
+ |
+
+
+
+
+
+
+
+
+
+
+
+
+ |
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/data_files/system.html b/tests/data_files/system.html
new file mode 100644
index 0000000..736f9e5
--- /dev/null
+++ b/tests/data_files/system.html
@@ -0,0 +1,530 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ADT Pulse(TM) Interactive Solutions - System - Robert Lippmann
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Devices
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ |
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/test_backoff.py b/tests/test_backoff.py
new file mode 100644
index 0000000..29fa88e
--- /dev/null
+++ b/tests/test_backoff.py
@@ -0,0 +1,879 @@
+"""Test for pulse_backoff."""
+
+from time import time
+
+import pytest
+
+from pyadtpulse.pulse_backoff import PulseBackoff
+
+
+# Test that the PulseBackoff class can be initialized with valid parameters.
+def test_initialize_backoff_valid_parameters():
+ """
+ Test that the PulseBackoff class can be initialized with valid parameters.
+ """
+ # Arrange
+ name = "test_backoff"
+ initial_backoff_interval = 1.0
+ max_backoff_interval = 10.0
+ threshold = 0
+ debug_locks = False
+ detailed_debug_logging = False
+
+ # Store the current time
+ current_time = time()
+
+ # Act
+ backoff = PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+
+ # Assert
+ assert backoff.name == name
+ assert backoff.initial_backoff_interval == initial_backoff_interval
+ assert backoff._max_backoff_interval == max_backoff_interval
+ assert backoff._backoff_count == 0
+ assert backoff._expiration_time == 0.0
+
+
+# Get current backoff interval
+def test_get_current_backoff_interval():
+ """
+ Test that the get_current_backoff_interval method returns the correct current backoff interval.
+ """
+ # Arrange
+ name = "test_backoff"
+ initial_backoff_interval = 1.0
+ max_backoff_interval = 10.0
+ threshold = 0
+ debug_locks = False
+ detailed_debug_logging = False
+ backoff = PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+
+ # Act
+ current_backoff_interval = backoff.get_current_backoff_interval()
+ assert current_backoff_interval == 0.0
+ backoff.increment_backoff()
+ current_backoff_interval = backoff.get_current_backoff_interval()
+ # Assert
+ assert current_backoff_interval == initial_backoff_interval
+ backoff.increment_backoff()
+ current_backoff_interval = backoff.get_current_backoff_interval()
+ assert current_backoff_interval == initial_backoff_interval * 2
+
+
+# Increment backoff
+def test_increment_backoff():
+ """
+ Test that the increment_backoff method increments the backoff count.
+ """
+ # Arrange
+ name = "test_backoff"
+ initial_backoff_interval = 1.0
+ max_backoff_interval = 10.0
+ threshold = 0
+ debug_locks = False
+ detailed_debug_logging = False
+ backoff = PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+
+ # Act
+ backoff.increment_backoff()
+
+ # Assert
+ assert backoff._backoff_count == 1
+ backoff.increment_backoff()
+ assert backoff._backoff_count == 2
+
+
+# Reset backoff
+def test_reset_backoff():
+ """
+ Test that the reset_backoff method resets the backoff count and expiration time.
+ """
+ # Arrange
+ name = "test_backoff"
+ initial_backoff_interval = 1.0
+ max_backoff_interval = 10.0
+ threshold = 0
+ debug_locks = False
+ detailed_debug_logging = False
+ backoff = PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+ backoff.increment_backoff()
+
+ # Act
+ backoff.reset_backoff()
+
+ # Assert
+ assert backoff._backoff_count == 0
+
+
+# Test that the wait_for_backoff method waits for the correct amount of time.
+@pytest.mark.asyncio
+async def test_wait_for_backoff2(mock_sleep):
+ """
+ Test that the wait_for_backoff method waits for the correct amount of time.
+ """
+ # Arrange
+
+ name = "test_backoff"
+ initial_backoff_interval = 1.0
+ max_backoff_interval = 10.0
+ threshold = 0
+ debug_locks = False
+ detailed_debug_logging = False
+ backoff = PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+
+ # Act
+ await backoff.wait_for_backoff()
+ assert mock_sleep.call_count == 0
+ backoff.increment_backoff()
+ await backoff.wait_for_backoff()
+ assert mock_sleep.await_args[0][0] == pytest.approx(initial_backoff_interval)
+
+
+# Check if backoff is needed
+def test_will_backoff():
+ """
+ Test that the will_backoff method returns True if backoff is needed, False otherwise.
+ """
+ # Arrange
+ name = "test_backoff"
+ initial_backoff_interval = 1.0
+ max_backoff_interval = 10.0
+ threshold = 0
+ debug_locks = False
+ detailed_debug_logging = False
+ backoff = PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+
+ # Act and Assert
+ assert not backoff.will_backoff()
+
+ backoff.increment_backoff()
+ assert backoff.will_backoff()
+
+
+# Initialize backoff with invalid initial_backoff_interval
+def test_initialize_backoff_invalid_initial_interval():
+ """
+ Test that initializing the PulseBackoff class with an invalid
+ initial_backoff_interval raises a ValueError.
+ """
+ # Arrange
+ name = "test_backoff"
+ initial_backoff_interval = -1.0
+ max_backoff_interval = 10.0
+ threshold = 0
+ debug_locks = False
+ detailed_debug_logging = False
+
+ # Act and Assert
+ with pytest.raises(ValueError):
+ PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+
+
+# Initialize backoff with invalid max_backoff_interval
+def test_initialize_backoff_invalid_max_interval():
+ """
+ Test that initializing the PulseBackoff class with an invalid
+ max_backoff_interval raises a ValueError.
+ """
+ # Arrange
+ name = "test_backoff"
+ initial_backoff_interval = 1.0
+ max_backoff_interval = 0.5
+ threshold = 0
+ debug_locks = False
+ detailed_debug_logging = False
+
+ # Act and Assert
+ with pytest.raises(ValueError):
+ PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+
+
+# Test that setting the absolute backoff time with an invalid backoff_time raises a ValueError.
+def test_set_absolute_backoff_time_invalid_time():
+ """
+ Test that setting the absolute backoff time with an invalid backoff_time raises a ValueError.
+ """
+ # Arrange
+ backoff = PulseBackoff(
+ name="test_backoff",
+ initial_backoff_interval=1.0,
+ max_backoff_interval=10.0,
+ threshold=0,
+ debug_locks=False,
+ detailed_debug_logging=False,
+ )
+
+ # Act and Assert
+ with pytest.raises(
+ ValueError, match="Absolute backoff time must be greater than current time"
+ ):
+ backoff.set_absolute_backoff_time(time() - 1)
+
+
+def test_set_absolute_backoff_time_valid_time():
+ """
+ Test that setting the absolute backoff time with a valid backoff_time works.
+ """
+ # Arrange
+ backoff = PulseBackoff(
+ name="test_backoff",
+ initial_backoff_interval=1.0,
+ max_backoff_interval=10.0,
+ threshold=0,
+ debug_locks=False,
+ detailed_debug_logging=False,
+ )
+
+ # Act and Assert
+ backoff_time = time() + 10
+ backoff.set_absolute_backoff_time(backoff_time)
+ assert backoff._expiration_time == backoff_time
+
+
+# Initialize backoff with valid parameters
+def test_initialize_backoff_valid_parameters2():
+ """
+ Test that the PulseBackoff class can be initialized with valid parameters.
+ """
+ # Arrange
+ name = "test_backoff"
+ initial_backoff_interval = 1.0
+ max_backoff_interval = 10.0
+ threshold = 0
+ debug_locks = False
+ detailed_debug_logging = False
+
+ # Act
+ backoff = PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+
+ # Assert
+ assert backoff.name == name
+ assert backoff.initial_backoff_interval == initial_backoff_interval
+ assert backoff._max_backoff_interval == max_backoff_interval
+ assert backoff._backoff_count == 0
+ assert backoff._expiration_time == 0.0
+
+
+# Increment backoff
+def test_increment_backoff2():
+ """
+ Test that the backoff count is incremented correctly when calling the
+ increment_backoff method.
+ """
+ # Arrange
+ name = "test_backoff"
+ initial_backoff_interval = 1.0
+ max_backoff_interval = 10.0
+ threshold = 0
+ debug_locks = False
+ detailed_debug_logging = False
+ backoff = PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+
+ # Act
+ backoff.increment_backoff()
+
+ # Assert
+ assert backoff.backoff_count == 1
+
+
+# Reset backoff
+def test_reset_backoff2():
+ """
+ Test that the backoff count and expiration time are not reset when calling
+ the reset_backoff method where expiration time is in the future.
+ """
+ # Arrange
+ name = "test_backoff"
+ initial_backoff_interval = 1.0
+ max_backoff_interval = 10.0
+ threshold = 0
+ debug_locks = False
+ detailed_debug_logging = False
+ backoff = PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+ now = time()
+ backoff._backoff_count = 5
+ backoff._expiration_time = now + 10.0
+
+ # Act
+ backoff.reset_backoff()
+
+ # Assert
+ assert backoff._backoff_count == 5
+ assert backoff._expiration_time == now + 10.0
+ assert backoff.expiration_time == now + 10.0
+
+
+# Check if backoff is needed
+def test_backoff_needed():
+ """
+ Test that the 'will_backoff' method returns the correct value when
+ backoff is needed.
+ """
+ # Arrange
+ name = "test_backoff"
+ initial_backoff_interval = 1.0
+ max_backoff_interval = 10.0
+ threshold = 0
+ debug_locks = False
+ detailed_debug_logging = False
+
+ backoff = PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+
+ # Act
+ backoff.increment_backoff()
+
+ # Assert
+ assert backoff.will_backoff() is True
+
+
+# Wait for backoff
+@pytest.mark.asyncio
+async def test_wait_for_backoff(mocker):
+ """
+ Test that the wait_for_backoff method waits for the correct amount of time.
+ """
+ # Arrange
+ name = "test_backoff"
+ initial_backoff_interval = 1.0
+ max_backoff_interval = 10.0
+ threshold = 0
+ debug_locks = False
+ detailed_debug_logging = False
+ name = "test_backoff"
+ initial_backoff_interval = 1.0
+ max_backoff_interval = 10.0
+ threshold = 0
+ debug_locks = False
+ detailed_debug_logging = False
+
+ backoff = PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+ # Act
+ await backoff.wait_for_backoff()
+ assert backoff.expiration_time == 0.0
+ backoff.increment_backoff()
+ # Assert
+ assert backoff.expiration_time == 0.0
+
+
+# Set initial backoff interval
+def test_set_initial_backoff_interval():
+ """
+ Test that the initial backoff interval can be set.
+ """
+ # Arrange
+ name = "test_backoff"
+ initial_backoff_interval = 1.0
+ max_backoff_interval = 10.0
+ threshold = 0
+ debug_locks = False
+ detailed_debug_logging = False
+ backoff = PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+
+ # Act
+ new_interval = 2.0
+ backoff.initial_backoff_interval = new_interval
+
+ # Assert
+ assert backoff.initial_backoff_interval == new_interval
+
+
+# Initialize backoff with invalid max_backoff_interval
+def test_initialize_backoff_invalid_max_interval2():
+ """
+ Test that the PulseBackoff class raises a ValueError when initialized
+ with an invalid max_backoff_interval.
+ """
+ # Arrange
+ name = "test_backoff"
+ initial_backoff_interval = 1.0
+ max_backoff_interval = 0.5
+ threshold = 0
+ debug_locks = False
+ detailed_debug_logging = False
+
+ # Act & Assert
+ with pytest.raises(ValueError):
+ PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+
+
+def test_initialize_backoff_invalid_initial_interval2():
+ """
+ Test that the PulseBackoff class raises a ValueError when initialized with an
+ invalid initial_backoff_interval.
+ """
+ # Arrange
+ name = "test_backoff"
+ initial_backoff_interval = -1.0
+ max_backoff_interval = 10.0
+ threshold = 0
+ debug_locks = False
+ detailed_debug_logging = False
+
+ # Act & Assert
+ with pytest.raises(ValueError):
+ PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+
+
+# Set absolute backoff time with invalid backoff_time
+def test_set_absolute_backoff_time_invalid_backoff_time():
+ """
+ Test that set_absolute_backoff_time raises a ValueError when given an invalid backoff_time.
+ """
+ # Arrange
+ name = "test_backoff"
+ initial_backoff_interval = 1.0
+ max_backoff_interval = 10.0
+ threshold = 0
+ debug_locks = False
+ detailed_debug_logging = False
+ backoff = PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+
+ # Act and Assert
+ invalid_backoff_time = time() - 1
+ with pytest.raises(ValueError):
+ backoff.set_absolute_backoff_time(invalid_backoff_time)
+
+
+# Wait for backoff with negative diff
+@pytest.mark.asyncio
+async def test_wait_for_backoff_with_negative_diff(mocker):
+ """
+ Test that the wait_for_backoff method handles a negative diff correctly.
+ """
+ # Arrange
+ name = "test_backoff"
+ initial_backoff_interval = 1.0
+ max_backoff_interval = 10.0
+ threshold = 0
+ debug_locks = False
+ detailed_debug_logging = False
+
+ backoff = PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+
+ # Set the expiration time to a past time
+ backoff._expiration_time = time() - 1
+
+ start_time = time()
+
+ # Act
+ await backoff.wait_for_backoff()
+
+ # Assert
+ assert backoff._expiration_time >= initial_backoff_interval
+
+
+# Calculate backoff interval with backoff_count <= threshold
+def test_calculate_backoff_interval_with_backoff_count_less_than_threshold():
+ """
+ Test that the calculate_backoff_interval method returns 0
+ when the backoff count is less than or equal to the threshold.
+ """
+ # Arrange
+ name = "test_backoff"
+ initial_backoff_interval = 1.0
+ max_backoff_interval = 10.0
+ threshold = 5
+ debug_locks = False
+ detailed_debug_logging = False
+
+ backoff = PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+
+ # Act
+ result = backoff._calculate_backoff_interval()
+
+ # Assert
+ assert result == 0.0
+
+
+# Calculate backoff interval with backoff_count > threshold and exceeds max_backoff_interval
+@pytest.mark.asyncio
+async def test_calculate_backoff_interval_exceeds_max(mocker):
+ """
+ Test that the calculate_backoff_interval method returns the correct backoff interval
+ when backoff_count is greater than threshold and exceeds max_backoff_interval.
+ """
+ # Arrange
+ name = "test_backoff"
+ initial_backoff_interval = 1.0
+ max_backoff_interval = 10.0
+ threshold = 0
+ debug_locks = False
+ detailed_debug_logging = False
+
+ backoff = PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+
+ backoff._backoff_count = 2
+
+ # Act
+ result = backoff._calculate_backoff_interval()
+
+ # Assert
+ assert result == 2.0
+ backoff._backoff_count = 3
+ result = backoff._calculate_backoff_interval()
+ assert result == 4.0
+ backoff._backoff_count = 4
+ result = backoff._calculate_backoff_interval()
+ assert result == 8.0
+ backoff._backoff_count = 5
+ result = backoff._calculate_backoff_interval()
+ assert result == max_backoff_interval
+ backoff._backoff_count = 6
+ result = backoff._calculate_backoff_interval()
+ assert result == max_backoff_interval
+
+ name = "test_backoff"
+ initial_backoff_interval = 1.0
+ max_backoff_interval = 10.0
+ threshold = 3
+ debug_locks = False
+ detailed_debug_logging = False
+
+ backoff = PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+
+ backoff._backoff_count = 2
+
+ # Act
+ result = backoff._calculate_backoff_interval()
+
+ # Assert
+ assert result == 1.0
+ backoff._backoff_count = 3
+ result = backoff._calculate_backoff_interval()
+ assert result == 1.0
+ backoff._backoff_count = 4
+ result = backoff._calculate_backoff_interval()
+ assert result == initial_backoff_interval
+ backoff._backoff_count = 5
+ result = backoff._calculate_backoff_interval()
+ assert result == initial_backoff_interval * 2
+ backoff._backoff_count = 6
+ result = backoff._calculate_backoff_interval()
+ assert result == initial_backoff_interval * 4
+ backoff._backoff_count = 7
+ result = backoff._calculate_backoff_interval()
+ assert result == initial_backoff_interval * 8
+ backoff._backoff_count = 8
+ result = backoff._calculate_backoff_interval()
+ assert result == max_backoff_interval
+ backoff._backoff_count = 9
+ result = backoff._calculate_backoff_interval()
+ assert result == max_backoff_interval
+
+
+# Increment backoff and update expiration_time
+def test_increment_backoff_and_update_expiration_time():
+ """
+ Test that the backoff count is incremented
+ """
+ # Arrange
+ name = "test_backoff"
+ initial_backoff_interval = 1.0
+ max_backoff_interval = 10.0
+ threshold = 0
+ debug_locks = False
+ detailed_debug_logging = False
+ backoff = PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+ name = "test_backoff"
+ initial_backoff_interval = 1.0
+ max_backoff_interval = 10.0
+ threshold = 0
+ debug_locks = False
+ detailed_debug_logging = False
+ backoff = PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+ # Act
+ backoff.increment_backoff()
+
+ # Assert
+ assert backoff.backoff_count == 1
+
+
+# Calculate backoff interval with backoff_count > threshold
+def test_calculate_backoff_interval_with_backoff_count_greater_than_threshold():
+ """
+ Test the calculation of backoff interval when backoff_count is greater than threshold.
+ """
+ # Arrange
+ name = "test_backoff"
+ initial_backoff_interval = 1.0
+ max_backoff_interval = 10.0
+ threshold = 0
+ debug_locks = False
+ detailed_debug_logging = False
+ backoff_count = 5
+
+ backoff = PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+ backoff._backoff_count = backoff_count
+
+ # Act
+ calculated_interval = backoff._calculate_backoff_interval()
+
+ # Assert
+ expected_interval = initial_backoff_interval * (2 ** (backoff_count - threshold))
+ assert calculated_interval == min(expected_interval, max_backoff_interval)
+
+
+# Test that calling increment backoff 4 times followed by wait for backoff
+# will sleep for 8 seconds with an initial backoff of 1, max backoff of 10.
+# And that an additional call to increment backoff followed by a wait for backoff will wait for 10.
+
+
+@pytest.mark.asyncio
+async def test_increment_backoff_and_wait_for_backoff(mock_sleep):
+ """
+ Test that calling increment backoff 4 times followed by wait for backoff will
+ sleep for 8 seconds with an initial backoff of 1, max backoff of 10.
+ And that an additional call to increment backoff followed by a wait
+ for backoff will wait for 10.
+ """
+ # Arrange
+ name = "test_backoff"
+ initial_backoff_interval = 1.0
+ max_backoff_interval = 10.0
+ threshold = 0
+ debug_locks = False
+ detailed_debug_logging = False
+
+ # Create a PulseBackoff object
+ backoff = PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+
+ # Act
+ await backoff.wait_for_backoff()
+ assert mock_sleep.call_count == 0
+ backoff.increment_backoff()
+
+ await backoff.wait_for_backoff()
+ assert mock_sleep.call_count == 1
+ assert mock_sleep.call_args_list[0][0][0] == initial_backoff_interval
+ backoff.increment_backoff()
+
+ await backoff.wait_for_backoff()
+ assert mock_sleep.call_count == 2
+ assert mock_sleep.call_args_list[1][0][0] == 2 * initial_backoff_interval
+ backoff.increment_backoff()
+
+ await backoff.wait_for_backoff()
+ assert mock_sleep.call_count == 3
+ assert mock_sleep.call_args_list[2][0][0] == 4 * initial_backoff_interval
+ backoff.increment_backoff()
+
+ # Additional call after 4 iterations
+ await backoff.wait_for_backoff()
+ assert mock_sleep.call_count == 4
+ assert mock_sleep.call_args_list[3][0][0] == 8 * initial_backoff_interval
+ backoff.increment_backoff()
+
+ await backoff.wait_for_backoff()
+ assert mock_sleep.call_count == 5
+ assert mock_sleep.call_args_list[4][0][0] == max_backoff_interval
+ backoff.increment_backoff()
+ await backoff.wait_for_backoff()
+ assert mock_sleep.call_count == 6
+ assert mock_sleep.call_args_list[4][0][0] == max_backoff_interval
+
+
+@pytest.mark.asyncio
+async def test_absolute_backoff_time(mock_sleep, freeze_time_to_now):
+ """
+ Test that the absolute backoff time is calculated correctly.
+ """
+ # Arrange
+ name = "test_backoff"
+ initial_backoff_interval = 1.0
+ max_backoff_interval = 10.0
+ threshold = 0
+ debug_locks = False
+ detailed_debug_logging = False
+ backoff = PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+
+ # Act
+ backoff.set_absolute_backoff_time(time() + 100)
+ assert backoff._backoff_count == 0
+ backoff.reset_backoff()
+ # make sure backoff can't be reset
+ assert backoff.expiration_time == time() + 100
+ await backoff.wait_for_backoff()
+ assert mock_sleep.call_count == 1
+ assert mock_sleep.call_args_list[0][0][0] == 100
diff --git a/tests/test_exceptions.py b/tests/test_exceptions.py
new file mode 100644
index 0000000..3c4fc81
--- /dev/null
+++ b/tests/test_exceptions.py
@@ -0,0 +1,159 @@
+# Generated by CodiumAI
+from time import time
+
+import pytest
+
+from pyadtpulse.exceptions import (
+ PulseAccountLockedError,
+ PulseAuthenticationError,
+ PulseClientConnectionError,
+ PulseConnectionError,
+ PulseExceptionWithBackoff,
+ PulseExceptionWithRetry,
+ PulseLoginException,
+ PulseNotLoggedInError,
+ PulseServerConnectionError,
+ PulseServiceTemporarilyUnavailableError,
+)
+from pyadtpulse.pulse_backoff import PulseBackoff
+
+
+class TestCodeUnderTest:
+ # PulseExceptionWithBackoff can be initialized with a message and a PulseBackoff object
+ def test_pulse_exception_with_backoff_initialization(self):
+ backoff = PulseBackoff("test", 1.0)
+ exception = PulseExceptionWithBackoff("error", backoff)
+ assert str(exception) == "PulseExceptionWithBackoff: error"
+ assert exception.backoff == backoff
+ assert backoff.backoff_count == 1
+
+ # PulseExceptionWithBackoff increments the backoff count when initialized
+ def test_pulse_exception_with_backoff_increment(self):
+ backoff = PulseBackoff("test", 1.0)
+ exception = PulseExceptionWithBackoff("error", backoff)
+ assert backoff.backoff_count == 1
+
+ # PulseExceptionWithRetry can be initialized with a message, a PulseBackoff object, and a retry time
+ def test_pulse_exception_with_retry_initialization(self):
+ backoff = PulseBackoff("test", 1.0)
+ retry_time = time() + 10
+ exception = PulseExceptionWithRetry("error", backoff, retry_time)
+ assert str(exception) == "PulseExceptionWithRetry: error"
+ assert exception.backoff == backoff
+ assert exception.retry_time == retry_time
+
+ # PulseExceptionWithRetry resets the backoff count and sets an absolute backoff time if retry time is in the future
+ def test_pulse_exception_with_retry_reset_and_set_absolute_backoff_time(self):
+ backoff = PulseBackoff("test", 1.0)
+ backoff.increment_backoff()
+ retry_time = time() + 10
+ exception = PulseExceptionWithRetry("error", backoff, retry_time)
+ assert backoff.backoff_count == 0
+ assert backoff.expiration_time == retry_time
+
+ # PulseServerConnectionError is a subclass of PulseExceptionWithBackoff and PulseConnectionError
+ def test_pulse_server_connection_error_inheritance_fixed(self):
+ assert issubclass(PulseServerConnectionError, PulseExceptionWithBackoff)
+ assert issubclass(PulseServerConnectionError, PulseConnectionError)
+
+ # PulseClientConnectionError is a subclass of PulseExceptionWithBackoff and PulseConnectionError
+ def test_pulse_client_connection_error_inheritance_fixed(self):
+ assert issubclass(PulseClientConnectionError, PulseExceptionWithBackoff)
+ assert issubclass(PulseClientConnectionError, PulseConnectionError)
+
+ # PulseExceptionWithBackoff raises an exception if initialized with an invalid message or non-PulseBackoff object
+ def test_pulse_exception_with_backoff_invalid_initialization(self):
+ with pytest.raises(Exception):
+ PulseExceptionWithBackoff(123, "backoff")
+
+ # PulseExceptionWithRetry raises an exception if initialized with an invalid message, non-PulseBackoff object, or invalid retry time
+ def test_pulse_exception_with_retry_invalid_initialization(self):
+ backoff = PulseBackoff("test", 1.0)
+ with pytest.raises(Exception):
+ PulseExceptionWithRetry(123, backoff, "retry")
+ with pytest.raises(Exception):
+ PulseExceptionWithRetry("error", "backoff", time() + 10)
+ with pytest.raises(Exception):
+ PulseExceptionWithRetry("error", backoff, "retry")
+
+ # PulseExceptionWithRetry does not reset the backoff count or set an absolute backoff time if retry time is in the past
+ def test_pulse_exception_with_retry_past_retry_time(self):
+ backoff = PulseBackoff("test", 1.0)
+ backoff.increment_backoff()
+ retry_time = time() - 10
+ with pytest.raises(PulseExceptionWithRetry):
+ raise PulseExceptionWithRetry(
+ "retry must be in the future", backoff, retry_time
+ )
+ # 1 backoff for increment
+ assert backoff.backoff_count == 2
+ assert backoff.expiration_time == 0.0
+
+ # PulseServiceTemporarilyUnavailableError does not reset the backoff count or set an absolute backoff time if retry time is in the past
+ def test_pulse_service_temporarily_unavailable_error_past_retry_time_fixed(self):
+ backoff = PulseBackoff("test", 1.0)
+ backoff.increment_backoff()
+ retry_time = time() - 10
+ with pytest.raises(PulseServiceTemporarilyUnavailableError):
+ raise PulseServiceTemporarilyUnavailableError(backoff, retry_time)
+ assert backoff.backoff_count == 2
+ assert backoff.expiration_time == 0.0
+
+ # PulseAuthenticationError is a subclass of PulseLoginException
+ def test_pulse_authentication_error_inheritance(self):
+ backoff = PulseBackoff("test", 1.0)
+ exception = PulseAuthenticationError()
+ assert isinstance(exception, PulseLoginException)
+
+ # PulseServiceTemporarilyUnavailableError is a subclass of PulseExceptionWithRetry and PulseConnectionError
+ def test_pulse_service_temporarily_unavailable_error(self):
+ backoff = PulseBackoff("test", 1.0)
+ exception = PulseServiceTemporarilyUnavailableError(
+ backoff, retry_time=time() + 10.0
+ )
+ assert backoff.backoff_count == 0
+ assert isinstance(exception, PulseExceptionWithRetry)
+ assert isinstance(exception, PulseConnectionError)
+
+ # PulseAccountLockedError is a subclass of PulseExceptionWithRetry and PulseLoginException
+ def test_pulse_account_locked_error_inheritance(self):
+ backoff = PulseBackoff("test", 1.0)
+ exception = PulseAccountLockedError(backoff, time() + 10.0)
+ assert backoff.backoff_count == 0
+ assert isinstance(exception, PulseExceptionWithRetry)
+ assert isinstance(exception, PulseLoginException)
+
+ # PulseExceptionWithBackoff string representation includes the class name and message
+ def test_pulse_exception_with_backoff_string_representation(self):
+ backoff = PulseBackoff("test", 1.0)
+ exception = PulseExceptionWithBackoff("error", backoff)
+ assert str(exception) == "PulseExceptionWithBackoff: error"
+
+ # PulseExceptionWithBackoff string representation includes the backoff object
+ def test_pulse_exception_with_backoff_string_representation(self):
+ backoff = PulseBackoff("test", 1.0)
+ exception = PulseExceptionWithBackoff("error", backoff)
+ assert str(exception) == "PulseExceptionWithBackoff: error"
+ assert exception.backoff == backoff
+ assert backoff.backoff_count == 1
+
+ # PulseExceptionWithRetry string representation includes the class name, message, backoff object, and retry time
+ def test_pulse_exception_with_retry_string_representation_fixed(self):
+ backoff = PulseBackoff("test", 1.0)
+ exception = PulseExceptionWithRetry("error", backoff, time() + 10)
+ expected_string = "PulseExceptionWithRetry: error"
+ assert str(exception) == expected_string
+
+ # PulseNotLoggedInError is a subclass of PulseLoginException
+ def test_pulse_not_logged_in_error_inheritance(self):
+ backoff = PulseBackoff("test", 1.0)
+ exception = PulseNotLoggedInError()
+ assert isinstance(exception, PulseLoginException)
+
+ # PulseExceptionWithRetry string representation does not include the backoff count if retry time is set
+ def test_pulse_exception_with_retry_string_representation(self):
+ backoff = PulseBackoff("test", 1.0)
+ exception = PulseExceptionWithRetry("error", backoff, time() + 10)
+ assert str(exception) == "PulseExceptionWithRetry: error"
+ assert exception.backoff == backoff
+ assert backoff.backoff_count == 0
diff --git a/tests/test_gateway.py b/tests/test_gateway.py
new file mode 100644
index 0000000..d608128
--- /dev/null
+++ b/tests/test_gateway.py
@@ -0,0 +1,378 @@
+# Generated by CodiumAI
+from ipaddress import IPv4Address
+
+import pytest
+
+from pyadtpulse.const import (
+ ADT_DEFAULT_POLL_INTERVAL,
+ ADT_GATEWAY_MAX_OFFLINE_POLL_INTERVAL,
+)
+from pyadtpulse.gateway import ADTPulseGateway
+
+
+# ADTPulseGateway object can be created with default values
+def test_default_values():
+ """
+ Test that ADTPulseGateway object can be created with default values
+ """
+ gateway = ADTPulseGateway()
+ assert gateway.manufacturer == "Unknown"
+ assert gateway._status_text == "OFFLINE"
+ assert gateway.backoff._name == "Gateway"
+ assert gateway.backoff._initial_backoff_interval == ADT_DEFAULT_POLL_INTERVAL
+ assert (
+ gateway.backoff._max_backoff_interval == ADT_GATEWAY_MAX_OFFLINE_POLL_INTERVAL
+ )
+ assert gateway.backoff._backoff_count == 0
+ assert gateway.backoff._expiration_time == 0.0
+ assert gateway.backoff._detailed_debug_logging == False
+ assert gateway.backoff._threshold == 0
+ assert gateway.model == None
+ assert gateway.serial_number == None
+ assert gateway.next_update == 0
+ assert gateway.last_update == 0
+ assert gateway.firmware_version == None
+ assert gateway.hardware_version == None
+ assert gateway.primary_connection_type == None
+ assert gateway.broadband_connection_status == None
+ assert gateway.cellular_connection_status == None
+ assert gateway._cellular_connection_signal_strength == 0.0
+ assert gateway.broadband_lan_ip_address == None
+ assert gateway._broadband_lan_mac == None
+ assert gateway.device_lan_ip_address == None
+ assert gateway._device_lan_mac == None
+ assert gateway.router_lan_ip_address == None
+ assert gateway.router_wan_ip_address == None
+
+
+# is_online property returns correct online status
+def test_is_online_property():
+ """
+ Test that is_online property returns correct online status
+ """
+ gateway = ADTPulseGateway()
+ assert gateway.is_online == False
+ gateway.is_online = True
+ assert gateway.is_online == True
+ gateway.is_online = False
+ assert gateway.is_online == False
+
+
+# poll_interval property can be set and returns correct value
+def test_poll_interval_property():
+ """
+ Test that poll_interval property can be set and returns correct value
+ """
+ gateway = ADTPulseGateway()
+ assert gateway.poll_interval == ADT_DEFAULT_POLL_INTERVAL
+ gateway.poll_interval = 60.0
+ assert gateway.poll_interval == 60.0
+
+
+# gateway MAC addresses can be set and retrieved
+def test_gateway_mac_addresses():
+ """
+ Test that gateway MAC addresses can be set and retrieved
+ """
+ gateway = ADTPulseGateway()
+ gateway.broadband_lan_mac = "00:11:22:33:44:55"
+ assert gateway.broadband_lan_mac == "00:11:22:33:44:55"
+ gateway.device_lan_mac = "AA:BB:CC:DD:EE:FF"
+ assert gateway.device_lan_mac == "AA:BB:CC:DD:EE:FF"
+
+
+# cellular connection signal strength can be set and retrieved
+def test_cellular_connection_signal_strength():
+ """
+ Test that cellular connection signal strength can be set and retrieved
+ """
+ gateway = ADTPulseGateway()
+ gateway.cellular_connection_signal_strength = -70.5
+ assert gateway.cellular_connection_signal_strength == -70.5
+
+
+# set_gateway_attributes method sets attributes correctly
+def test_set_gateway_attributes_sets_attributes_correctly():
+ """
+ Test that set_gateway_attributes method sets attributes correctly
+ """
+ gateway = ADTPulseGateway()
+ attributes = {
+ "manufacturer": "ADT",
+ "model": "1234",
+ "serial_number": "5678",
+ "firmware_version": "1.0",
+ "hardware_version": "2.0",
+ "primary_connection_type": "Ethernet",
+ "broadband_connection_status": "Connected",
+ "cellular_connection_status": "Connected",
+ "broadband_lan_mac": "00:11:22:33:44:55",
+ "device_lan_mac": "AA:BB:CC:DD:EE:FF",
+ "cellular_connection_signal_strength": 4.5,
+ }
+
+ gateway.set_gateway_attributes(attributes)
+
+ assert gateway.manufacturer == "ADT"
+ assert gateway.model == "1234"
+ assert gateway.serial_number == "5678"
+ assert gateway.firmware_version == "1.0"
+ assert gateway.hardware_version == "2.0"
+ assert gateway.primary_connection_type == "Ethernet"
+ assert gateway.broadband_connection_status == "Connected"
+ assert gateway.cellular_connection_status == "Connected"
+ assert gateway.broadband_lan_mac == "00:11:22:33:44:55"
+ assert gateway.device_lan_mac == "AA:BB:CC:DD:EE:FF"
+ assert gateway.cellular_connection_signal_strength == 4.5
+
+
+# backoff object can be created with default values and current backoff interval can be retrieved
+def test_default_values2():
+ """
+ Test that ADTPulseGateway object can be created with default values
+ """
+ gateway = ADTPulseGateway()
+ assert gateway.manufacturer == "Unknown"
+ assert gateway._status_text == "OFFLINE"
+ assert gateway.backoff.name == "Gateway"
+ assert gateway.backoff.initial_backoff_interval == ADT_DEFAULT_POLL_INTERVAL
+ assert (
+ gateway.backoff._max_backoff_interval == ADT_GATEWAY_MAX_OFFLINE_POLL_INTERVAL
+ )
+ assert gateway.backoff.backoff_count == 0
+ assert gateway.backoff.expiration_time == 0.0
+ assert gateway.backoff._detailed_debug_logging == False
+ assert gateway.backoff._threshold == 0
+ assert gateway.model == None
+ assert gateway.serial_number == None
+ assert gateway.next_update == 0
+ assert gateway.last_update == 0
+ assert gateway.firmware_version == None
+ assert gateway.hardware_version == None
+ assert gateway.primary_connection_type == None
+ assert gateway.broadband_connection_status == None
+ assert gateway.cellular_connection_status == None
+ assert gateway._cellular_connection_signal_strength == 0.0
+ assert gateway.broadband_lan_ip_address == None
+ assert gateway._broadband_lan_mac == None
+ assert gateway.device_lan_ip_address == None
+ assert gateway._device_lan_mac == None
+ assert gateway.router_lan_ip_address == None
+ assert gateway.router_wan_ip_address == None
+
+
+# backoff object can be incremented and reset correctly
+def test_backoff_increment_and_reset():
+ """
+ Test that backoff object can be incremented and reset correctly
+ """
+ gateway = ADTPulseGateway()
+
+ # Increment backoff count
+ gateway.backoff.increment_backoff()
+ assert gateway.backoff._backoff_count == 1
+
+ # Reset backoff count
+ gateway.backoff.reset_backoff()
+ assert gateway.backoff._backoff_count == 0
+
+
+# is_online property returns correct offline status when set to False
+def test_is_online_returns_correct_offline_status_when_set_to_false():
+ """
+ Test that is_online property returns correct offline status when set to False
+ """
+ gateway = ADTPulseGateway()
+ gateway.is_online = False
+ assert gateway.is_online == False
+
+
+# poll_interval property raises ValueError when set to 0
+def test_poll_interval_raises_value_error_when_set_to_0():
+ """
+ Test that poll_interval property raises ValueError when set to 0
+ """
+ gateway = ADTPulseGateway()
+ with pytest.raises(ValueError):
+ gateway.poll_interval = 0
+
+
+# backoff object can wait for correct amount of time before returning
+@pytest.mark.asyncio
+async def test_backoff_wait_time():
+ """
+ Test that backoff object can wait for correct amount of time before returning
+ """
+ import time # Import the 'time' module
+
+ # Arrange
+ gateway = ADTPulseGateway()
+ gateway.backoff._backoff_count = 1
+ gateway.backoff._threshold = 0
+ gateway.backoff._initial_backoff_interval = 1.0
+ gateway.backoff._max_backoff_interval = 10.0
+ gateway.backoff._expiration_time = time.time() + 5.0
+
+ # Act
+ start_time = time.time()
+ await gateway.backoff.wait_for_backoff()
+
+ # Assert
+ end_time = time.time()
+ assert end_time - start_time >= 5.0
+
+
+# Test that set_gateway_attributes method sets attributes to None when given an empty string
+def test_set_gateway_attributes_empty_string_fixed():
+ """
+ Test that set_gateway_attributes method sets attributes to None when given an empty string
+ """
+ gateway = ADTPulseGateway()
+ gateway.set_gateway_attributes(
+ {"model": "", "serial_number": "", "firmware_version": ""}
+ )
+ assert gateway.model is None
+ assert gateway.serial_number is None
+ assert gateway.firmware_version is None
+
+
+# cellular connection signal strength can be set to 0.0
+def test_cellular_connection_signal_strength_to_zero():
+ """
+ Test that cellular connection signal strength can be set to 0.0
+ """
+ gateway = ADTPulseGateway()
+ gateway.cellular_connection_signal_strength = 0.0
+ assert gateway.cellular_connection_signal_strength == 0.0
+
+
+# poll_interval property raises ValueError when set to a value greater than ADT_GATEWAY_MAX_OFFLINE_POLL_INTERVAL
+def test_poll_interval_raises_value_error():
+ """
+ Test that poll_interval property raises ValueError when set to a value greater than ADT_GATEWAY_MAX_OFFLINE_POLL_INTERVAL
+ """
+ gateway = ADTPulseGateway()
+ with pytest.raises(ValueError):
+ gateway.poll_interval = ADT_GATEWAY_MAX_OFFLINE_POLL_INTERVAL + 1
+
+
+# Test that set_gateway_attributes method sets attributes to a valid value when given a valid value
+def test_set_gateway_attributes_valid_value():
+ """
+ Test that set_gateway_attributes method sets attributes to a valid value when given a valid value
+ """
+ gateway = ADTPulseGateway()
+ gateway.set_gateway_attributes({"broadband_lan_mac": "00:0a:95:9d:68:16"})
+ assert gateway.broadband_lan_mac == "00:0a:95:9d:68:16"
+
+
+# Test that set_gateway_attributes method sets IP address attributes to None when given an invalid IP address
+def test_set_gateway_attributes_invalid_ip():
+ """
+ Test that set_gateway_attributes method sets IP address attributes to None when given an invalid IP address
+ """
+ gateway = ADTPulseGateway()
+ gateway.set_gateway_attributes({"broadband_lan_ip_address": "invalid_ip"})
+ assert gateway.broadband_lan_ip_address is None
+ gateway.set_gateway_attributes({"device_lan_ip_address": "invalid_ip"})
+ assert gateway.device_lan_ip_address is None
+ gateway.set_gateway_attributes({"router_lan_ip_address": "invalid_ip"})
+ assert gateway.router_lan_ip_address is None
+ gateway.set_gateway_attributes({"router_wan_ip_address": "invalid_ip"})
+ assert gateway.router_wan_ip_address is None
+
+
+# gateway MAC addresses raise ValueError when set to an invalid MAC address
+def test_gateway_mac_address_invalid():
+ """
+ Test that setting an invalid MAC address raises a ValueError
+ """
+ gateway = ADTPulseGateway()
+ with pytest.raises(ValueError):
+ gateway.broadband_lan_mac = "00:00:00:00:00:00:00"
+ with pytest.raises(ValueError):
+ gateway.device_lan_mac = "00:00:00:00:00:00:00"
+
+
+# is_online property can be set to True and False
+def test_is_online_property_true_and_false():
+ """
+ Test that is_online property can be set to True and False
+ """
+ gateway = ADTPulseGateway()
+
+ # Test setting is_online to True
+ gateway.is_online = True
+ assert gateway.is_online == True
+ assert gateway._status_text == "ONLINE"
+
+ # Test setting is_online to False
+ gateway.is_online = False
+ assert gateway.is_online == False
+ assert gateway._status_text == "OFFLINE"
+
+
+# poll_interval property can be set to a custom value
+def test_poll_interval_custom_value():
+ """
+ Test that poll_interval property can be set to a custom value
+ """
+ gateway = ADTPulseGateway()
+ custom_interval = 10.0
+ gateway.poll_interval = custom_interval
+ assert gateway.poll_interval == custom_interval
+
+
+# ADTPulseGateway object can be created with custom values
+def test_custom_values():
+ """
+ Test that ADTPulseGateway object can be created with custom values
+ """
+ gateway = ADTPulseGateway(
+ manufacturer="Custom Manufacturer",
+ _status_text="CUSTOM_STATUS",
+ model="Custom Model",
+ serial_number="Custom Serial Number",
+ next_update=1234567890,
+ last_update=9876543210,
+ firmware_version="Custom Firmware Version",
+ hardware_version="Custom Hardware Version",
+ primary_connection_type="Custom Connection Type",
+ broadband_connection_status="Custom Broadband Status",
+ cellular_connection_status="Custom Cellular Status",
+ _cellular_connection_signal_strength=0.5,
+ broadband_lan_ip_address=IPv4Address("192.168.0.1"),
+ _broadband_lan_mac="00:11:22:33:44:55",
+ device_lan_ip_address=IPv4Address("192.168.0.2"),
+ _device_lan_mac="AA:BB:CC:DD:EE:FF",
+ router_lan_ip_address=IPv4Address("192.168.1.1"),
+ router_wan_ip_address=IPv4Address("10.0.0.1"),
+ )
+
+ assert gateway.manufacturer == "Custom Manufacturer"
+ assert gateway._status_text == "CUSTOM_STATUS"
+ assert gateway.backoff._name == "Gateway"
+ assert gateway.backoff._initial_backoff_interval == ADT_DEFAULT_POLL_INTERVAL
+ assert (
+ gateway.backoff._max_backoff_interval == ADT_GATEWAY_MAX_OFFLINE_POLL_INTERVAL
+ )
+ assert gateway.backoff._backoff_count == 0
+ assert gateway.backoff._expiration_time == 0.0
+ assert gateway.backoff._detailed_debug_logging == False
+ assert gateway.backoff._threshold == 0
+ assert gateway.model == "Custom Model"
+ assert gateway.serial_number == "Custom Serial Number"
+ assert gateway.next_update == 1234567890
+ assert gateway.last_update == 9876543210
+ assert gateway.firmware_version == "Custom Firmware Version"
+ assert gateway.hardware_version == "Custom Hardware Version"
+ assert gateway.primary_connection_type == "Custom Connection Type"
+ assert gateway.broadband_connection_status == "Custom Broadband Status"
+ assert gateway.cellular_connection_status == "Custom Cellular Status"
+ assert gateway._cellular_connection_signal_strength == 0.5
+ assert gateway.broadband_lan_ip_address == IPv4Address("192.168.0.1")
+ assert gateway._broadband_lan_mac == "00:11:22:33:44:55"
+ assert gateway.device_lan_ip_address == IPv4Address("192.168.0.2")
+ assert gateway._device_lan_mac == "AA:BB:CC:DD:EE:FF"
+ assert gateway.router_lan_ip_address == IPv4Address("192.168.1.1")
+ assert gateway.router_wan_ip_address == IPv4Address("10.0.0.1")
diff --git a/tests/test_paa_codium.py b/tests/test_paa_codium.py
new file mode 100644
index 0000000..ce16dd6
--- /dev/null
+++ b/tests/test_paa_codium.py
@@ -0,0 +1,258 @@
+# Generated by CodiumAI
+
+import pytest
+from bs4 import BeautifulSoup
+
+from conftest import LoginType, add_signin
+from pyadtpulse.exceptions import PulseAuthenticationError, PulseNotLoggedInError
+from pyadtpulse.pyadtpulse_async import PyADTPulseAsync
+from pyadtpulse.site import ADTPulseSite
+
+
+class TestPyADTPulseAsync:
+ # The class can be instantiated with the required parameters (username, password, fingerprint) and optional parameters (service_host, user_agent, debug_locks, keepalive_interval, relogin_interval, detailed_debug_logging).
+ @pytest.mark.asyncio
+ async def test_instantiation_with_parameters(self):
+ pulse = PyADTPulseAsync(
+ username="valid_email@example.com",
+ password="your_password",
+ fingerprint="your_fingerprint",
+ service_host="https://portal.adtpulse.com",
+ user_agent="Your User Agent",
+ debug_locks=False,
+ keepalive_interval=5,
+ relogin_interval=60,
+ detailed_debug_logging=True,
+ )
+ assert isinstance(pulse, PyADTPulseAsync)
+
+ # The __repr__ method returns a string representation of the class.
+ @pytest.mark.asyncio
+ async def test_repr_method_with_valid_email(self):
+ pulse = PyADTPulseAsync(
+ username="your_username@example.com",
+ password="your_password",
+ fingerprint="your_fingerprint",
+ )
+ assert repr(pulse) == ""
+
+ # The async_login method successfully authenticates the user to the ADT Pulse cloud service using a valid email address as the username.
+ @pytest.mark.asyncio
+ async def test_async_login_success_with_valid_email(
+ self, mocked_server_responses, get_mocked_url, read_file
+ ):
+ pulse = PyADTPulseAsync(
+ username="valid_email@example.com",
+ password="your_password",
+ fingerprint="your_fingerprint",
+ )
+ add_signin(
+ LoginType.SUCCESS, mocked_server_responses, get_mocked_url, read_file
+ )
+ await pulse.async_login()
+
+ # The class is instantiated without the required parameters (username, password, fingerprint) and raises an exception.
+ @pytest.mark.asyncio
+ async def test_instantiation_without_parameters(self):
+ with pytest.raises(TypeError):
+ pulse = PyADTPulseAsync()
+
+ # The async_login method fails to authenticate the user to the ADT Pulse cloud service and raises a PulseAuthenticationError.
+ @pytest.mark.asyncio
+ async def test_async_login_failure_with_valid_username(self):
+ pulse = PyADTPulseAsync(
+ username="valid_email@example.com",
+ password="invalid_password",
+ fingerprint="invalid_fingerprint",
+ )
+ with pytest.raises(PulseAuthenticationError):
+ await pulse.async_login()
+
+ # The async_logout method is called without being logged in and returns without any action.
+ @pytest.mark.asyncio
+ async def test_async_logout_without_login_with_valid_email_fixed(self):
+ pulse = PyADTPulseAsync(
+ username="valid_username@example.com",
+ password="valid_password",
+ fingerprint="valid_fingerprint",
+ )
+ with pytest.raises(RuntimeError):
+ await pulse.async_logout()
+
+ # The async_logout method successfully logs the user out of the ADT Pulse cloud service.
+ @pytest.mark.asyncio
+ async def test_async_logout_successfully_logs_out(
+ self, mocked_server_responses, get_mocked_url, read_file
+ ):
+ # Arrange
+ pulse = PyADTPulseAsync(
+ username="test_user@example.com",
+ password="test_password",
+ fingerprint="test_fingerprint",
+ )
+ add_signin(
+ LoginType.SUCCESS, mocked_server_responses, get_mocked_url, read_file
+ )
+ # Act
+ await pulse.async_login()
+ await pulse.async_logout()
+
+ # Assert
+ assert not pulse.is_connected
+
+ # The site property returns an ADTPulseSite object after logging in.
+ @pytest.mark.asyncio
+ async def test_site_property_returns_ADTPulseSite_object_with_login(
+ self, mocked_server_responses, get_mocked_url, read_file
+ ):
+ # Arrange
+ pulse = PyADTPulseAsync("test@example.com", "valid_password", "fingerprint")
+ add_signin(
+ LoginType.SUCCESS, mocked_server_responses, get_mocked_url, read_file
+ )
+ # Act
+ await pulse.async_login()
+ site = pulse.site
+
+ # Assert
+ assert isinstance(site, ADTPulseSite)
+
+ # The is_connected property returns True if the class is connected to the ADT Pulse cloud service.
+ @pytest.mark.asyncio
+ async def test_is_connected_property_returns_true(
+ self, mocked_server_responses, get_mocked_url, read_file
+ ):
+ pulse = PyADTPulseAsync(
+ username="valid_username@example.com",
+ password="valid_password",
+ fingerprint="valid_fingerprint",
+ )
+ add_signin(
+ LoginType.SUCCESS, mocked_server_responses, get_mocked_url, read_file
+ )
+ await pulse.async_login()
+ assert pulse.is_connected == True
+
+ # The site property is accessed without being logged in and raises an exception.
+ @pytest.mark.asyncio
+ async def test_site_property_without_login_raises_exception(self):
+ pulse = PyADTPulseAsync(
+ username="test@example.com",
+ password="your_password",
+ fingerprint="your_fingerprint",
+ service_host="https://portal.adtpulse.com",
+ user_agent="Your User Agent",
+ debug_locks=False,
+ keepalive_interval=5,
+ relogin_interval=60,
+ detailed_debug_logging=True,
+ )
+ with pytest.raises(RuntimeError):
+ pulse.site
+
+ # The sites property returns a list of ADTPulseSite objects.
+ @pytest.mark.asyncio
+ async def test_sites_property_returns_list_of_objects(
+ self, mocked_server_responses, get_mocked_url, read_file
+ ):
+ # Arrange
+ pulse = PyADTPulseAsync(
+ "test@example.com", "valid_password", "valid_fingerprint"
+ )
+ add_signin(
+ LoginType.SUCCESS, mocked_server_responses, get_mocked_url, read_file
+ )
+ # Act
+ await pulse.async_login()
+ sites = pulse.sites
+
+ # Assert
+ assert isinstance(sites, list)
+ for site in sites:
+ assert isinstance(site, ADTPulseSite)
+
+ # The is_connected property returns False if the class is not connected to the ADT Pulse cloud service.
+ @pytest.mark.asyncio
+ async def test_is_connected_property_returns_false_when_not_connected(self):
+ pulse = PyADTPulseAsync(
+ username="your_username@example.com",
+ password="your_password",
+ fingerprint="your_fingerprint",
+ )
+ assert pulse.is_connected == False
+
+ # The sites property is accessed without being logged in and raises an exception.
+ @pytest.mark.asyncio
+ async def test_sites_property_without_login_raises_exception(self):
+ pulse = PyADTPulseAsync(
+ username="your_username@example.com",
+ password="your_password",
+ fingerprint="your_fingerprint",
+ service_host="https://portal.adtpulse.com",
+ user_agent="Your User Agent",
+ debug_locks=False,
+ keepalive_interval=5,
+ relogin_interval=60,
+ detailed_debug_logging=True,
+ )
+ with pytest.raises(RuntimeError):
+ pulse.sites
+
+ # The wait_for_update method is called without being logged in and raises an exception.
+ @pytest.mark.asyncio
+ async def test_wait_for_update_without_login_raises_exception(self):
+ pulse = PyADTPulseAsync(
+ username="your_username@example.com",
+ password="your_password",
+ fingerprint="your_fingerprint",
+ service_host="https://portal.adtpulse.com",
+ user_agent="Your User Agent",
+ debug_locks=False,
+ keepalive_interval=5,
+ relogin_interval=60,
+ detailed_debug_logging=True,
+ )
+
+ with pytest.raises(PulseNotLoggedInError):
+ await pulse.wait_for_update()
+
+ # The _initialize_sites method retrieves the site id and name from the soup object and creates a new ADTPulseSite object.
+ @pytest.mark.asyncio
+ async def test_initialize_sites_method_with_valid_service_host(
+ self, mocker, read_file
+ ):
+ # Arrange
+ username = "test@example.com"
+ password = "test_password"
+ fingerprint = "test_fingerprint"
+ service_host = "https://portal.adtpulse.com"
+ user_agent = "Test User Agent"
+ debug_locks = False
+ keepalive_interval = 10
+ relogin_interval = 30
+ detailed_debug_logging = True
+
+ pulse = PyADTPulseAsync(
+ username=username,
+ password=password,
+ fingerprint=fingerprint,
+ service_host=service_host,
+ user_agent=user_agent,
+ debug_locks=debug_locks,
+ keepalive_interval=keepalive_interval,
+ relogin_interval=relogin_interval,
+ detailed_debug_logging=detailed_debug_logging,
+ )
+
+ soup = BeautifulSoup(read_file("summary.html"))
+
+ # Mock the fetch_devices method to always return True
+ # mocker.patch.object(ADTPulseSite, "fetch_devices", return_value=True)
+
+ # Act
+ await pulse._initialize_sites(soup)
+
+ # Assert
+ assert pulse._site is not None
+ assert pulse._site.id == "160301za524548"
+ assert pulse._site.name == "Robert Lippmann"
diff --git a/tests/test_pap.py b/tests/test_pap.py
new file mode 100644
index 0000000..010fec2
--- /dev/null
+++ b/tests/test_pap.py
@@ -0,0 +1,314 @@
+# Generated by CodiumAI
+
+import pytest
+from typeguard import TypeCheckError
+
+from pyadtpulse.pulse_authentication_properties import PulseAuthenticationProperties
+
+
+class TestPulseAuthenticationProperties:
+ # Initialize object with valid username, password, and fingerprint
+ def test_initialize_with_valid_credentials(self):
+ """
+ Test initializing PulseAuthenticationProperties with valid username, password, and fingerprint
+ """
+ # Arrange
+ username = "test@example.com"
+ password = "password123"
+ fingerprint = "fingerprint123"
+
+ # Act
+ properties = PulseAuthenticationProperties(username, password, fingerprint)
+
+ # Assert
+ assert properties.username == username
+ assert properties.password == password
+ assert properties.fingerprint == fingerprint
+
+ # Get and set username, password, fingerprint, site_id, and last_login_time properties
+ def test_get_and_set_properties(self):
+ """
+ Test getting and setting username, password, fingerprint, site_id, and last_login_time properties
+ """
+ # Arrange
+ username = "test@example.com"
+ password = "password123"
+ fingerprint = "fingerprint123"
+ site_id = "site123"
+ last_login_time = 123456789
+
+ properties = PulseAuthenticationProperties(username, password, fingerprint)
+
+ # Act
+ properties.username = "new_username@example.com"
+ properties.password = "new_password"
+ properties.fingerprint = "new_fingerprint"
+ properties.site_id = site_id
+ properties.last_login_time = last_login_time
+
+ # Assert
+ assert properties.username == "new_username@example.com"
+ assert properties.password == "new_password"
+ assert properties.fingerprint == "new_fingerprint"
+ assert properties.site_id == site_id
+ assert properties.last_login_time == last_login_time
+
+ # Get last_login_time property after setting it
+ def test_get_last_login_time_after_setting(self):
+ """
+ Test getting last_login_time property after setting it
+ """
+ # Arrange
+ username = "test@example.com"
+ password = "password123"
+ fingerprint = "fingerprint123"
+ last_login_time = 123456789
+
+ properties = PulseAuthenticationProperties(username, password, fingerprint)
+
+ # Act
+ properties.last_login_time = last_login_time
+
+ # Assert
+ assert properties.last_login_time == last_login_time
+
+ # Set username, password, fingerprint, site_id properties with valid values
+ def test_set_properties_with_valid_values(self):
+ """
+ Test setting username, password, fingerprint, site_id properties with valid values
+ """
+ # Arrange
+ username = "test@example.com"
+ password = "password123"
+ fingerprint = "fingerprint123"
+ site_id = "site123"
+
+ properties = PulseAuthenticationProperties(username, password, fingerprint)
+
+ # Act
+ properties.site_id = site_id
+
+ # Assert
+ assert properties.username == username
+ assert properties.password == password
+ assert properties.fingerprint == fingerprint
+ assert properties.site_id == site_id
+
+ # Set username, password, fingerprint properties with non-empty fingerprint
+ def test_set_properties_with_non_empty_fingerprint(self):
+ """
+ Test setting username, password, fingerprint properties with non-empty fingerprint
+ """
+ # Arrange
+ username = "test@example.com"
+ password = "password123"
+ fingerprint = "fingerprint123"
+
+ properties = PulseAuthenticationProperties(username, password, fingerprint)
+
+ # Act
+ properties.username = username
+ properties.password = password
+ properties.fingerprint = fingerprint
+
+ # Assert
+ assert properties.username == username
+ assert properties.password == password
+ assert properties.fingerprint == fingerprint
+
+ # Set site_id property with empty string
+ def test_set_site_id_with_empty_string(self):
+ """
+ Test setting site_id property with empty string
+ """
+ # Arrange
+ site_id = ""
+
+ properties = PulseAuthenticationProperties(
+ "test@example.com", "password123", "fingerprint123"
+ )
+
+ # Act
+ properties.site_id = site_id
+
+ # Assert
+ assert properties.site_id == site_id
+
+ # Initialize object with empty username, password, or fingerprint
+ def test_initialize_with_empty_credentials(self):
+ """
+ Test initializing PulseAuthenticationProperties with empty username, password, or fingerprint
+ """
+ # Arrange
+ username = ""
+ password = ""
+ fingerprint = ""
+
+ # Act and Assert
+ with pytest.raises(ValueError):
+ PulseAuthenticationProperties(username, password, fingerprint)
+
+ # Initialize object with invalid username or password
+ def test_initialize_with_invalid_credentials1(self):
+ """
+ Test initializing PulseAuthenticationProperties with invalid username or password
+ """
+ # Arrange
+ username = "invalid_username"
+ password = "invalid_password"
+ fingerprint = "fingerprint123"
+
+ # Act and Assert
+ with pytest.raises(ValueError):
+ PulseAuthenticationProperties(username, password, fingerprint)
+
+ # Set username, password, fingerprint properties with invalid values
+ def test_set_properties_with_invalid_values(self):
+ """
+ Test setting username, password, fingerprint properties with invalid values
+ """
+ # Arrange
+ username = "invalid_username"
+ password = ""
+ fingerprint = ""
+
+ properties = PulseAuthenticationProperties(
+ "test@example.com", "password123", "fingerprint123"
+ )
+
+ # Act and Assert
+ with pytest.raises(ValueError):
+ properties.username = username
+
+ with pytest.raises(ValueError):
+ properties.password = password
+
+ with pytest.raises(ValueError):
+ properties.fingerprint = fingerprint
+
+ # Set last_login_time property with non-integer value
+ def test_set_last_login_time_with_non_integer_value(self):
+ """
+ Test setting last_login_time property with non-integer value
+ """
+ # Arrange
+ username = "test@example.com"
+ password = "password123"
+ fingerprint = "fingerprint123"
+ last_login_time = "invalid_time"
+
+ properties = PulseAuthenticationProperties(username, password, fingerprint)
+
+ # Act and Assert
+ with pytest.raises(TypeCheckError) as exc_info:
+ properties.last_login_time = last_login_time
+
+ # Assert
+ assert (
+ str(exc_info.value)
+ == 'argument "login_time" (str) is not an instance of int'
+ )
+
+ # Set site_id property with non-string value
+ def test_set_site_id_with_non_string_value(self):
+ """
+ Test setting site_id property with non-string value
+ """
+ # Arrange
+ username = "test@example.com"
+ password = "password123"
+ fingerprint = "fingerprint123"
+ site_id = 12345 # Fix: Set a non-string value
+
+ properties = PulseAuthenticationProperties(username, password, fingerprint)
+
+ # Act
+ with pytest.raises(TypeCheckError):
+ properties.site_id = site_id
+
+ # Assert
+ assert not properties.site_id
+
+ # Set last_login_time property with integer value
+ def test_set_last_login_time_with_integer_value(self):
+ """
+ Test setting last_login_time property with integer value
+ """
+ # Arrange
+ username = "test@example.com"
+ password = "password123"
+ fingerprint = "fingerprint123"
+ last_login_time = 123456789
+
+ properties = PulseAuthenticationProperties(username, password, fingerprint)
+
+ # Act
+ properties.last_login_time = last_login_time
+
+ # Assert
+ assert properties.last_login_time == last_login_time
+
+ # Raise ValueError when initializing object with invalid username or password
+ def test_initialize_with_invalid_credentials(self):
+ """
+ Test initializing PulseAuthenticationProperties with invalid username or password
+ """
+ # Arrange
+ username = "invalid_username"
+ password = ""
+ fingerprint = "valid_fingerprint"
+
+ # Act and Assert
+ with pytest.raises(ValueError):
+ properties = PulseAuthenticationProperties(username, password, fingerprint)
+
+ # Raise TypeError when setting site_id property with non-string value
+ def test_raise_type_error_when_setting_site_id_with_non_string_value(self):
+ """
+ Test that a TypeError is raised when setting the site_id property with a non-string value
+ """
+ # Arrange
+ properties = PulseAuthenticationProperties(
+ "test@example.com", "password123", "fingerprint123"
+ )
+
+ # Act and Assert
+ with pytest.raises(TypeCheckError):
+ properties.site_id = 123
+
+ # Raise ValueError when setting username, password, fingerprint properties with invalid values
+ def test_invalid_properties(self):
+ """
+ Test that ValueError is raised when setting invalid username, password, and fingerprint properties
+ """
+ # Arrange
+ username = "test@example.com"
+ password = "password123"
+ fingerprint = "fingerprint123"
+ properties = PulseAuthenticationProperties(username, password, fingerprint)
+
+ # Act and Assert
+ with pytest.raises(ValueError):
+ properties.username = ""
+ with pytest.raises(ValueError):
+ properties.password = ""
+ with pytest.raises(ValueError):
+ properties.fingerprint = ""
+
+ # Raise TypeCheckError when setting last_login_time property with non-integer value
+ def test_raise_type_check_error_when_setting_last_login_time_with_non_integer_value(
+ self,
+ ):
+ """
+ Test that a TypeCheckError is raised when setting the last_login_time property with a non-integer value
+ """
+ import typeguard
+
+ # Arrange
+ properties = PulseAuthenticationProperties(
+ "test@example.com", "password123", "fingerprint123"
+ )
+
+ # Act and Assert
+ with pytest.raises(typeguard.TypeCheckError):
+ properties.last_login_time = "invalid_time"
diff --git a/tests/test_pqm_codium.py b/tests/test_pqm_codium.py
new file mode 100644
index 0000000..435d01e
--- /dev/null
+++ b/tests/test_pqm_codium.py
@@ -0,0 +1,508 @@
+# Generated by CodiumAI
+
+# Dependencies:
+# pip install pytest-mock
+from time import time
+
+import pytest
+from aiohttp.client_exceptions import ClientConnectionError, ServerDisconnectedError
+from aiohttp.client_reqrep import ConnectionKey
+from yarl import URL
+
+from conftest import MOCKED_API_VERSION
+from pyadtpulse.exceptions import (
+ PulseClientConnectionError,
+ PulseNotLoggedInError,
+ PulseServerConnectionError,
+ PulseServiceTemporarilyUnavailableError,
+)
+from pyadtpulse.pulse_backoff import PulseBackoff
+from pyadtpulse.pulse_connection_properties import PulseConnectionProperties
+from pyadtpulse.pulse_connection_status import PulseConnectionStatus
+from pyadtpulse.pulse_query_manager import PulseQueryManager
+
+
+class TestPulseQueryManager:
+ # can successfully make a GET request to a given URI with a valid service host
+ @pytest.mark.asyncio
+ async def test_get_request_success_with_valid_service_host(self, mocker):
+ # Given
+ connection_status = PulseConnectionStatus()
+ connection_properties = PulseConnectionProperties("https://portal.adtpulse.com")
+ query_manager = PulseQueryManager(connection_status, connection_properties)
+ expected_response = (200, "Response", URL("http://example.com"))
+
+ async def mock_async_query(
+ uri,
+ method="GET",
+ extra_params=None,
+ extra_headers=None,
+ timeout=1,
+ requires_authentication=True,
+ ):
+ return expected_response
+
+ mocker.patch.object(
+ PulseQueryManager, "async_query", side_effect=mock_async_query
+ )
+
+ # When
+ response = await query_manager.async_query("/api/data")
+
+ # Then
+ assert response == expected_response
+
+ # can successfully make a POST request to a given URI
+ @pytest.mark.asyncio
+ async def test_post_request_success(self, mocker):
+ # Given
+ connection_status = PulseConnectionStatus()
+ connection_properties = PulseConnectionProperties("https://portal.adtpulse.com")
+ query_manager = PulseQueryManager(connection_status, connection_properties)
+ expected_response = (200, "Response", URL("http://example.com"))
+
+ async def mock_async_query(
+ uri,
+ method,
+ extra_params=None,
+ extra_headers=None,
+ timeout=1,
+ requires_authentication=True,
+ ):
+ return expected_response
+
+ mocker.patch.object(
+ PulseQueryManager, "async_query", side_effect=mock_async_query
+ )
+
+ # When
+ response = await query_manager.async_query("/api/data", method="POST")
+
+ # Then
+ assert response == expected_response
+
+ # can handle HTTP 200 OK response with a valid service host
+ @pytest.mark.asyncio
+ async def test_handle_http_200_ok_with_valid_service_host(self, mocker):
+ # Given
+ connection_status = PulseConnectionStatus()
+ connection_properties = PulseConnectionProperties("https://portal.adtpulse.com")
+ query_manager = PulseQueryManager(connection_status, connection_properties)
+ expected_response = (200, "Response", URL("http://example.com"))
+
+ async def mock_async_query(
+ uri,
+ method="GET",
+ extra_params=None,
+ extra_headers=None,
+ timeout=1,
+ requires_authentication=True,
+ ):
+ return expected_response
+
+ mocker.patch.object(
+ PulseQueryManager, "async_query", side_effect=mock_async_query
+ )
+
+ # When
+ response = await query_manager.async_query("/api/data")
+
+ # Then
+ assert response == expected_response
+
+ # can handle HTTP 503 Service Unavailable response with fixed mock function
+ @pytest.mark.asyncio
+ async def test_handle_http_503_service_unavailable_fixed_fixed(self, mocker):
+ # Given
+ connection_status = PulseConnectionStatus()
+ connection_properties = PulseConnectionProperties("https://portal.adtpulse.com")
+ query_manager = PulseQueryManager(connection_status, connection_properties)
+
+ async def mock_async_query(
+ uri,
+ method="GET",
+ extra_params=None,
+ extra_headers=None,
+ timeout=1,
+ requires_authentication=True,
+ ):
+ backoff = PulseBackoff(
+ "Query:GET /api/data",
+ connection_status.get_backoff().initial_backoff_interval,
+ threshold=0,
+ debug_locks=query_manager._debug_locks,
+ detailed_debug_logging=connection_properties.detailed_debug_logging,
+ )
+ retry_time = await backoff.wait_for_backoff()
+ if retry_time is None:
+ retry_time = time() + 1 # Set a future time for retry_time
+ else:
+ retry_time += time() + 1
+ raise PulseServiceTemporarilyUnavailableError(backoff, retry_time)
+
+ mocker.patch.object(
+ PulseQueryManager, "async_query", side_effect=mock_async_query
+ )
+
+ # When
+ with pytest.raises(PulseServiceTemporarilyUnavailableError):
+ await query_manager.async_query("/api/data")
+
+ # Then
+ # PulseServiceTemporarilyUnavailableError should be raised
+
+ # can handle HTTP 429 Too Many Requests response with the recommended fix
+ @pytest.mark.asyncio
+ async def test_handle_http_429_with_fix(
+ self, mocker, mocked_server_responses, get_mocked_url
+ ):
+ # Given
+ connection_status = PulseConnectionStatus()
+ connection_properties = PulseConnectionProperties("https://portal.adtpulse.com")
+ url = get_mocked_url("/api/data")
+ query_manager = PulseQueryManager(connection_status, connection_properties)
+ expected_response = (429, "Too Many Requests", URL(url))
+ mocked_server_responses.get(url, status=expected_response[0])
+ # When
+ with pytest.raises(PulseServiceTemporarilyUnavailableError) as exc_info:
+ await query_manager.async_query("/api/data", requires_authentication=False)
+
+ # Then
+ assert "Pulse service temporarily unavailable until indefinitely" in str(
+ exc_info.value
+ )
+ assert exc_info.value.backoff == connection_status.get_backoff()
+
+ # can handle ClientConnectionError with 'Connection refused' message using default parameter values
+ @pytest.mark.asyncio
+ async def test_handle_client_connection_error_with_default_values_fixed_fixed(
+ self, mocked_server_responses, get_mocked_url
+ ):
+ # Given
+ connection_status = PulseConnectionStatus()
+ connection_properties = PulseConnectionProperties("https://portal.adtpulse.com")
+ query_manager = PulseQueryManager(connection_status, connection_properties)
+ expected_error_message = "Connection refused"
+
+ expected_response = (None, None, None, None)
+
+ # When
+ with pytest.raises(PulseServerConnectionError) as exc_info:
+ await query_manager.async_query("/api/data", requires_authentication=False)
+
+ # Then
+ assert (
+ str(exc_info.value)
+ == f"PulseServerConnectionError: Pulse server error: {expected_error_message}: GET {get_mocked_url('/api/data')}"
+ )
+
+ # can handle ClientConnectorError with non-TimeoutError or BrokenPipeError os_error
+ @pytest.mark.asyncio
+ async def test_handle_client_connector_error_with_fix(
+ self, mocked_server_responses, get_mocked_url
+ ):
+ connection_status = PulseConnectionStatus()
+ connection_properties = PulseConnectionProperties("https://portal.adtpulse.com")
+ query_manager = PulseQueryManager(connection_status, connection_properties)
+ expected_error = PulseServerConnectionError(
+ "Error occurred", connection_status.get_backoff()
+ )
+ ck = ConnectionKey("portal.adtpulse.com", 443, True, None, None, None, None)
+ url = get_mocked_url("/api/data")
+ mocked_server_responses.get(url, exception=expected_error)
+ # When, Then
+ with pytest.raises(PulseServerConnectionError) as ex:
+ await query_manager.async_query("/api/data", requires_authentication=False)
+ assert str(ex.value) == str(expected_error)
+
+ # can handle Retry-After header in HTTP response
+ @pytest.mark.timeout(70)
+ @pytest.mark.asyncio
+ async def test_handle_retry_after_header(
+ self, mocked_server_responses, get_mocked_url, freeze_time_to_now
+ ):
+ # Given
+ connection_status = PulseConnectionStatus()
+ connection_properties = PulseConnectionProperties("https://portal.adtpulse.com")
+ query_manager = PulseQueryManager(connection_status, connection_properties)
+ url = get_mocked_url("/api/data")
+ expected_response = (429, "Too Many Requests", URL(url))
+ expected_retry_after = "60"
+ mocked_server_responses.get(
+ url,
+ status=expected_response[0],
+ headers={"Retry-After": expected_retry_after},
+ )
+
+ # When
+ with pytest.raises(PulseServiceTemporarilyUnavailableError) as exc_info:
+ await query_manager.async_query("/api/data", requires_authentication=False)
+
+ # Then
+ assert exc_info.value.backoff == connection_status.get_backoff()
+ assert exc_info.value.retry_time == int(expected_retry_after) + int(time())
+
+ # can handle ServerTimeoutError
+ @pytest.mark.asyncio
+ async def test_handle_server_timeout_error(self, mocker):
+ # Given
+ connection_status = PulseConnectionStatus()
+ connection_properties = PulseConnectionProperties("https://portal.adtpulse.com")
+ query_manager = PulseQueryManager(connection_status, connection_properties)
+
+ async def mock_async_query(
+ uri,
+ method="GET",
+ extra_params=None,
+ extra_headers=None,
+ timeout=1,
+ requires_authentication=True,
+ ):
+ raise PulseServerConnectionError("message", connection_status.get_backoff())
+
+ mocker.patch.object(
+ PulseQueryManager, "async_query", side_effect=mock_async_query
+ )
+
+ # When
+ with pytest.raises(PulseServerConnectionError):
+ await query_manager.async_query(
+ "/api/data",
+ method="GET",
+ extra_params=None,
+ extra_headers=None,
+ timeout=1,
+ requires_authentication=True,
+ )
+
+ # can handle missing API version
+ @pytest.mark.asyncio
+ async def test_handle_missing_api_version(self, mocker):
+ # Given
+ connection_status = PulseConnectionStatus()
+ connection_properties = PulseConnectionProperties("https://portal.adtpulse.com")
+ query_manager = PulseQueryManager(connection_status, connection_properties)
+ expected_response = (200, "Response", URL("http://example.com"))
+
+ async def mock_async_query(
+ uri, method, extra_params, extra_headers, timeout, requires_authentication
+ ):
+ return expected_response
+
+ mocker.patch.object(
+ PulseQueryManager, "async_query", side_effect=mock_async_query
+ )
+
+ # When
+ await query_manager.async_fetch_version()
+
+ # Then
+ assert connection_properties.api_version is not None
+
+ # can handle valid method parameter
+ @pytest.mark.asyncio
+ async def test_valid_method_parameter(
+ self, mocked_server_responses, get_mocked_url, mocker
+ ):
+ # Given
+ connection_status = PulseConnectionStatus()
+ connection_properties = PulseConnectionProperties("https://portal.adtpulse.com")
+ query_manager = PulseQueryManager(connection_status, connection_properties)
+ expected_response = (200, "Response", URL(get_mocked_url("/api/data")))
+
+ mocked_server_responses.get(
+ get_mocked_url("/api/data"), status=200, body="Response"
+ )
+ # When
+ result = await query_manager.async_query(
+ "/api/data", requires_authentication=False
+ )
+
+ # Then
+ assert result == expected_response
+
+ assert query_manager._connection_properties.api_version == MOCKED_API_VERSION
+
+ # can handle ClientResponseError and include backoff in the raised exception
+ @pytest.mark.asyncio
+ async def test_handle_client_response_error_with_backoff(
+ self, mocked_server_responses, get_mocked_url
+ ):
+ # Given
+ connection_status = PulseConnectionStatus()
+ connection_properties = PulseConnectionProperties("https://portal.adtpulse.com")
+ query_manager = PulseQueryManager(connection_status, connection_properties)
+ expected_response = (429, "Too Many Requests", URL("http://example.com"))
+ mocked_server_responses.get(get_mocked_url("/api/data"), status=429)
+ # When
+ with pytest.raises(PulseServiceTemporarilyUnavailableError) as exc_info:
+ await query_manager.async_query("/api/data", requires_authentication=False)
+
+ # can handle invalid Retry-After header value format
+ @pytest.mark.asyncio
+ async def test_handle_invalid_retry_after_header_format(self, mocker):
+ # Given
+ connection_status = PulseConnectionStatus()
+ connection_properties = PulseConnectionProperties("https://portal.adtpulse.com")
+ query_manager = PulseQueryManager(connection_status, connection_properties)
+ expected_response = (503, "Service Unavailable", URL("http://example.com"))
+ retry_after_header = "invalid_format"
+
+ async def mock_async_query(
+ uri, method, extra_params, extra_headers, timeout, requires_authentication
+ ):
+ return expected_response
+
+ mocker.patch.object(
+ PulseQueryManager, "async_query", side_effect=mock_async_query
+ )
+
+ # When
+ response = await query_manager.async_query(
+ "/api/data",
+ method="GET",
+ extra_params=None,
+ extra_headers={"Retry-After": retry_after_header},
+ timeout=1,
+ requires_authentication=True,
+ )
+
+ # Then
+ assert response == expected_response
+
+ # can handle non-numeric Retry-After header value
+ @pytest.mark.asyncio
+ async def test_handle_non_numeric_retry_after_header_value(self, mocker):
+ # Given
+ connection_status = PulseConnectionStatus()
+ connection_properties = PulseConnectionProperties("https://portal.adtpulse.com")
+ query_manager = PulseQueryManager(connection_status, connection_properties)
+ expected_response = (503, "Service Unavailable", URL("http://example.com"))
+ retry_after_header = "Thu, 01 Jan 1970 00:00:00 GMT"
+
+ async def mock_async_query(
+ uri,
+ method="GET",
+ extra_params=None,
+ extra_headers=None,
+ timeout=1,
+ requires_authentication=True,
+ ):
+ return expected_response
+
+ mocker.patch.object(
+ PulseQueryManager, "async_query", side_effect=mock_async_query
+ )
+
+ # When
+ response = await query_manager.async_query(
+ "/api/data", extra_headers={"Retry-After": retry_after_header}
+ )
+
+ # Then
+ assert response == expected_response
+
+ # can handle TimeoutError
+ @pytest.mark.asyncio
+ async def test_handle_timeout_error_fixed(self, mocker):
+ # Given
+ connection_status = PulseConnectionStatus()
+ connection_properties = PulseConnectionProperties("https://portal.adtpulse.com")
+ query_manager = PulseQueryManager(connection_status, connection_properties)
+ expected_response = (0, None, None, None)
+
+ async def mock_async_query(
+ uri,
+ method="GET",
+ extra_params=None,
+ extra_headers=None,
+ timeout=1,
+ requires_authentication=True,
+ ):
+ raise TimeoutError()
+
+ mocker.patch.object(
+ PulseQueryManager, "async_query", side_effect=mock_async_query
+ )
+
+ # When
+ with pytest.raises(TimeoutError):
+ await query_manager.async_query(
+ "/api/data",
+ method="GET",
+ extra_params=None,
+ extra_headers=None,
+ timeout=1,
+ requires_authentication=True,
+ )
+
+ # Then
+ assert True
+
+ # can handle PulseClientConnectionError
+ @pytest.mark.asyncio
+ async def test_handle_pulse_client_connection_error(
+ self, mocked_server_responses, get_mocked_url
+ ):
+ # Given
+ connection_status = PulseConnectionStatus()
+ connection_properties = PulseConnectionProperties("https://portal.adtpulse.com")
+ query_manager = PulseQueryManager(connection_status, connection_properties)
+ mocked_server_responses.get(
+ get_mocked_url("/api/data"),
+ exception=ClientConnectionError("Network error"),
+ repeat=True,
+ )
+ # When
+ with pytest.raises(PulseClientConnectionError):
+ await query_manager.async_query("/api/data", requires_authentication=False)
+
+ # can handle ServerDisconnectedError
+ @pytest.mark.asyncio
+ async def test_handle_server_disconnected_error(
+ self, mocked_server_responses, get_mocked_url
+ ):
+ # Given
+ connection_status = PulseConnectionStatus()
+ connection_properties = PulseConnectionProperties("https://portal.adtpulse.com")
+ query_manager = PulseQueryManager(connection_status, connection_properties)
+ mocked_server_responses.get(
+ get_mocked_url("/api/data"), exception=ServerDisconnectedError
+ )
+ # When
+ with pytest.raises(PulseServerConnectionError):
+ await query_manager.async_query("/api/data", requires_authentication=False)
+
+ # can handle PulseNotLoggedInError
+ @pytest.mark.asyncio
+ async def test_handle_pulse_not_logged_in_error(self, mocker):
+ # Given
+ connection_status = PulseConnectionStatus()
+ connection_properties = PulseConnectionProperties("https://portal.adtpulse.com")
+ query_manager = PulseQueryManager(connection_status, connection_properties)
+ expected_response = (200, "Response", URL("http://example.com"))
+
+ async def mock_async_query(
+ uri,
+ method="GET",
+ extra_params=None,
+ extra_headers=None,
+ timeout=1,
+ requires_authentication=True,
+ ):
+ backoff = PulseBackoff(
+ "Query:GET /api/data",
+ connection_status.get_backoff().initial_backoff_interval,
+ threshold=0,
+ debug_locks=query_manager._debug_locks,
+ detailed_debug_logging=connection_properties.detailed_debug_logging,
+ )
+ raise PulseNotLoggedInError()
+
+ mocker.patch.object(
+ PulseQueryManager, "async_query", side_effect=mock_async_query
+ )
+
+ # When, Then
+ with pytest.raises(PulseNotLoggedInError):
+ await query_manager.async_query("/api/data")
diff --git a/tests/test_pulse_async.py b/tests/test_pulse_async.py
new file mode 100644
index 0000000..63aba61
--- /dev/null
+++ b/tests/test_pulse_async.py
@@ -0,0 +1,691 @@
+"""Test Pulse Query Manager."""
+
+import asyncio
+import re
+from collections.abc import Generator
+from http.client import responses
+from typing import Any, Callable, Literal
+from unittest.mock import AsyncMock, patch
+
+import aiohttp
+import pytest
+from aioresponses import aioresponses
+from pytest_mock import MockerFixture
+
+from conftest import LoginType, add_custom_response, add_logout, add_signin
+from pyadtpulse.const import (
+ ADT_DEFAULT_POLL_INTERVAL,
+ ADT_DEVICE_URI,
+ ADT_LOGIN_URI,
+ ADT_LOGOUT_URI,
+ ADT_MFA_FAIL_URI,
+ ADT_ORB_URI,
+ ADT_SUMMARY_URI,
+ ADT_SYNC_CHECK_URI,
+ ADT_TIMEOUT_URI,
+ DEFAULT_API_HOST,
+)
+from pyadtpulse.exceptions import (
+ PulseAuthenticationError,
+ PulseConnectionError,
+ PulseGatewayOfflineError,
+ PulseMFARequiredError,
+ PulseNotLoggedInError,
+ PulseServerConnectionError,
+)
+from pyadtpulse.pulse_authentication_properties import PulseAuthenticationProperties
+from pyadtpulse.pyadtpulse_async import PyADTPulseAsync
+
+DEFAULT_SYNC_CHECK = "234532-456432-0"
+NEXT_SYNC_CHECK = "234533-456432-0"
+
+
+def set_keepalive(get_mocked_url, mocked_server_responses, repeat: bool = False):
+ m = mocked_server_responses
+ m.post(
+ get_mocked_url(ADT_TIMEOUT_URI),
+ body="",
+ content_type="text/html",
+ repeat=repeat,
+ )
+
+
+@pytest.mark.asyncio
+async def test_mocked_responses(
+ read_file: Callable[..., str],
+ mocked_server_responses: aioresponses,
+ get_mocked_mapped_static_responses: dict[str, str],
+ get_mocked_url: Callable[..., str],
+ extract_ids_from_data_directory: list[str],
+):
+ """Fixture to test mocked responses."""
+ static_responses = get_mocked_mapped_static_responses
+ m = mocked_server_responses
+ async with aiohttp.ClientSession() as session:
+ for url, file_name in static_responses.items():
+ # Make an HTTP request to the URL
+ response = await session.get(url)
+
+ # Assert the status code is 200
+ assert response.status == 200
+
+ # Assert the content matches the content of the file
+ expected_content = read_file(file_name)
+ actual_content = await response.text()
+ assert actual_content == expected_content
+ devices = extract_ids_from_data_directory
+ for device_id in devices:
+ response = await session.get(
+ f"{get_mocked_url(ADT_DEVICE_URI)}?id={device_id}"
+ )
+ assert response.status == 200
+ expected_content = read_file(f"device_{device_id}.html")
+ actual_content = await response.text()
+ assert actual_content == expected_content
+
+ # redirects
+ add_custom_response(
+ mocked_server_responses,
+ read_file,
+ get_mocked_url(ADT_LOGIN_URI),
+ file_name="signin.html",
+ )
+ response = await session.get(f"{DEFAULT_API_HOST}/", allow_redirects=True)
+ assert response.status == 200
+ actual_content = await response.text()
+ expected_content = read_file("signin.html")
+ assert actual_content == expected_content
+ add_custom_response(
+ mocked_server_responses,
+ read_file,
+ get_mocked_url(ADT_LOGIN_URI),
+ file_name="signin.html",
+ )
+ response = await session.get(get_mocked_url(ADT_LOGOUT_URI))
+ assert response.status == 200
+ expected_content = read_file("signin.html")
+ actual_content = await response.text()
+ assert actual_content == expected_content
+ add_signin(
+ LoginType.SUCCESS, mocked_server_responses, get_mocked_url, read_file
+ )
+ response = await session.post(get_mocked_url(ADT_LOGIN_URI))
+ assert response.status == 200
+ expected_content = read_file(static_responses[get_mocked_url(ADT_SUMMARY_URI)])
+ actual_content = await response.text()
+ assert actual_content == expected_content
+ pattern = re.compile(rf"{re.escape(get_mocked_url(ADT_SYNC_CHECK_URI))}/?.*$")
+ m.get(pattern, status=200, body="1-0-0", content_type="text/html")
+ response = await session.get(
+ get_mocked_url(ADT_SYNC_CHECK_URI), params={"ts": "first call"}
+ )
+ assert response.status == 200
+ actual_content = await response.text()
+ expected_content = "1-0-0"
+ assert actual_content == expected_content
+ set_keepalive(get_mocked_url, m)
+ response = await session.post(get_mocked_url(ADT_TIMEOUT_URI))
+
+
+# not sure we need this
+@pytest.fixture
+def wrap_wait_for_update():
+ with patch.object(
+ PyADTPulseAsync,
+ "wait_for_update",
+ new_callable=AsyncMock,
+ spec=PyADTPulseAsync,
+ wraps=PyADTPulseAsync.wait_for_update,
+ ) as wait_for_update:
+ yield wait_for_update
+
+
+@pytest.fixture
+@pytest.mark.asyncio
+async def adt_pulse_instance(
+ mocked_server_responses: aioresponses,
+ extract_ids_from_data_directory: list[str],
+ get_mocked_url: Callable[..., str],
+ read_file: Callable[..., str],
+):
+ """Create an instance of PyADTPulseAsync and login."""
+ p = PyADTPulseAsync("testuser@example.com", "testpassword", "testfingerprint")
+ add_signin(LoginType.SUCCESS, mocked_server_responses, get_mocked_url, read_file)
+ await p.async_login()
+ # Assertions after login
+ assert p._pulse_connection_status.authenticated_flag.is_set()
+ assert p._pulse_connection_status.get_backoff().backoff_count == 0
+ assert p._pulse_connection.login_in_progress is False
+ assert p._pulse_connection.login_backoff.backoff_count == 0
+ assert p.site.name == "Robert Lippmann"
+ assert p._timeout_task is not None
+ assert p._timeout_task.get_name() == p._get_timeout_task_name()
+ assert p._sync_task is None
+ assert p.site.zones_as_dict is not None
+ assert len(p.site.zones_as_dict) == len(extract_ids_from_data_directory) - 3
+ return p, mocked_server_responses
+
+
+@pytest.mark.asyncio
+async def test_login(
+ adt_pulse_instance: tuple[PyADTPulseAsync, Any],
+ extract_ids_from_data_directory: list[str],
+ get_mocked_url: Callable[..., str],
+ read_file: Callable[..., str],
+):
+ """Fixture to test login."""
+ p, response = await adt_pulse_instance
+ # make sure everything is there on logout
+
+ assert p._pulse_connection_status.get_backoff().backoff_count == 0
+ assert p._pulse_connection.login_in_progress is False
+ assert p._pulse_connection.login_backoff.backoff_count == 0
+ add_logout(response, get_mocked_url, read_file)
+ add_custom_response(
+ response,
+ read_file,
+ get_mocked_url(ADT_LOGIN_URI),
+ file_name=LoginType.SUCCESS.value,
+ )
+ await p.async_logout()
+ assert not p._pulse_connection_status.authenticated_flag.is_set()
+ assert p._pulse_connection_status.get_backoff().backoff_count == 0
+ assert p._pulse_connection.login_in_progress is False
+ assert p._pulse_connection.login_backoff.backoff_count == 0
+ assert p.site.name == "Robert Lippmann"
+ assert p.site.zones_as_dict is not None
+ assert len(p.site.zones_as_dict) == len(extract_ids_from_data_directory) - 3
+ assert p._timeout_task is None
+
+
+@pytest.mark.asyncio
+@pytest.mark.parametrize(
+ "test_type",
+ (
+ (LoginType.FAIL, PulseAuthenticationError),
+ (LoginType.NOT_SIGNED_IN, PulseNotLoggedInError),
+ (LoginType.MFA, PulseMFARequiredError),
+ ),
+)
+async def test_login_failures(
+ adt_pulse_instance: tuple[PyADTPulseAsync, Any],
+ get_mocked_url: Callable[..., str],
+ read_file: Callable[..., str],
+ test_type: Literal[LoginType.FAIL, LoginType.NOT_SIGNED_IN, LoginType.MFA],
+):
+ p, response = await adt_pulse_instance
+ assert p._pulse_connection.login_backoff.backoff_count == 0, "initial"
+ add_logout(response, get_mocked_url, read_file)
+ await p.async_logout()
+ assert p._pulse_connection.login_backoff.backoff_count == 0, "post logout"
+
+ assert p._pulse_connection.login_backoff.backoff_count == 0, str(test_type[0])
+ add_signin(test_type[0], response, get_mocked_url, read_file)
+ with pytest.raises(test_type[1]):
+ await p.async_login()
+ await asyncio.sleep(1)
+ assert p._timeout_task is None or p._timeout_task.done()
+ assert p._pulse_connection.login_backoff.backoff_count == 0, str(test_type)
+ add_signin(LoginType.SUCCESS, response, get_mocked_url, read_file)
+ await p.async_login()
+ assert p._pulse_connection.login_backoff.backoff_count == 0
+
+
+async def do_wait_for_update(p: PyADTPulseAsync, shutdown_event: asyncio.Event):
+ while not shutdown_event.is_set():
+ try:
+ await p.wait_for_update()
+ except asyncio.CancelledError:
+ break
+
+
+@pytest.mark.asyncio
+async def test_wait_for_update(
+ adt_pulse_instance: tuple[PyADTPulseAsync, Any],
+ get_mocked_url: Callable[..., str],
+ read_file: Callable[..., str],
+):
+ p, responses = await adt_pulse_instance
+ shutdown_event = asyncio.Event()
+ task = asyncio.create_task(do_wait_for_update(p, shutdown_event))
+ await p.async_logout()
+ assert p._sync_task is None
+ assert p.site.name == "Robert Lippmann"
+ with pytest.raises(PulseNotLoggedInError):
+ await task
+
+ # test exceptions
+ # check we can't wait for update if not logged in
+ with pytest.raises(PulseNotLoggedInError):
+ await p.wait_for_update()
+
+ add_signin(LoginType.SUCCESS, responses, get_mocked_url, read_file)
+ await p.async_login()
+ await p.async_logout()
+
+
+def make_sync_check_pattern(get_mocked_url):
+ return re.compile(rf"{re.escape(get_mocked_url(ADT_SYNC_CHECK_URI))}/?.*$")
+
+
+@pytest.mark.asyncio
+@pytest.mark.parametrize("test_requests", (False, True))
+@pytest.mark.timeout(60)
+async def test_orb_update(
+ adt_pulse_instance: tuple[PyADTPulseAsync, Any],
+ get_mocked_url: Callable[..., str],
+ read_file: Callable[..., str],
+ test_requests: bool,
+):
+ p, response = await adt_pulse_instance
+ pattern = make_sync_check_pattern(get_mocked_url)
+
+ def signal_status_change():
+ response.get(
+ pattern,
+ body=DEFAULT_SYNC_CHECK,
+ content_type="text/html",
+ )
+ response.get(pattern, body="1-0-0", content_type="text/html")
+ response.get(pattern, body="2-0-0", content_type="text/html")
+ response.get(
+ pattern,
+ body=NEXT_SYNC_CHECK,
+ content_type="text/html",
+ )
+ response.get(
+ pattern,
+ body=NEXT_SYNC_CHECK,
+ content_type="text/html",
+ )
+
+ def open_patio():
+ response.get(
+ get_mocked_url(ADT_ORB_URI),
+ body=read_file("orb_patio_opened.html"),
+ content_type="text/html",
+ )
+ signal_status_change()
+
+ def close_all():
+ response.get(
+ get_mocked_url(ADT_ORB_URI),
+ body=read_file("orb.html"),
+ content_type="text/html",
+ )
+ signal_status_change()
+
+ def open_garage():
+ response.get(
+ get_mocked_url(ADT_ORB_URI),
+ body=read_file("orb_garage.html"),
+ content_type="text/html",
+ )
+ signal_status_change()
+
+ def open_both_garage_and_patio():
+ response.get(
+ get_mocked_url(ADT_ORB_URI),
+ body=read_file("orb_patio_garage.html"),
+ content_type="text/html",
+ )
+ signal_status_change()
+
+ def setup_sync_check():
+ open_patio()
+ close_all()
+
+ async def test_sync_check_and_orb():
+ code, content, _ = await p._pulse_connection.async_query(
+ ADT_ORB_URI, requires_authentication=False
+ )
+ assert code == 200
+ assert content == read_file("orb_patio_opened.html")
+ await asyncio.sleep(1)
+ code, content, _ = await p._pulse_connection.async_query(
+ ADT_ORB_URI, requires_authentication=False
+ )
+ assert code == 200
+ assert content == read_file("orb.html")
+ await asyncio.sleep(1)
+ for _ in range(1):
+ code, content, _ = await p._pulse_connection.async_query(
+ ADT_SYNC_CHECK_URI, requires_authentication=False
+ )
+ assert code == 200
+ assert content == DEFAULT_SYNC_CHECK
+ code, content, _ = await p._pulse_connection.async_query(
+ ADT_SYNC_CHECK_URI, requires_authentication=False
+ )
+ assert code == 200
+ assert content == "1-0-0"
+ code, content, _ = await p._pulse_connection.async_query(
+ ADT_SYNC_CHECK_URI, requires_authentication=False
+ )
+ assert code == 200
+ assert content == "2-0-0"
+ code, content, _ = await p._pulse_connection.async_query(
+ ADT_SYNC_CHECK_URI, requires_authentication=False
+ )
+ assert code == 200
+ assert content == NEXT_SYNC_CHECK
+ code, content, _ = await p._pulse_connection.async_query(
+ ADT_SYNC_CHECK_URI, requires_authentication=False
+ )
+ assert code == 200
+ assert content == NEXT_SYNC_CHECK
+
+ # do a first run though to make sure aioresponses will work ok
+ if not test_requests:
+ setup_sync_check()
+ await test_sync_check_and_orb()
+ await p.async_logout()
+ assert p._sync_task is None
+ assert p._timeout_task is None
+ return
+ await p.async_logout()
+ for j in range(2):
+ if j == 0:
+ zone = 11
+ else:
+ zone = 10
+ for i in range(2):
+ if i == 0:
+ if j == 0:
+ open_patio()
+ else:
+ open_garage()
+ state = "Open"
+ else:
+ close_all()
+ state = "OK"
+ add_signin(LoginType.SUCCESS, response, get_mocked_url, read_file)
+ await p.async_login()
+ await p.wait_for_update()
+ await p.async_logout()
+ assert len(p.site.zones) == 13
+ assert p.site.zones_as_dict[zone].state == state
+ assert p._sync_task is not None
+
+
+@pytest.mark.asyncio
+async def test_keepalive_check(
+ adt_pulse_instance: tuple[PyADTPulseAsync, Any],
+ get_mocked_url: Callable[..., str],
+ read_file: Callable[..., str],
+):
+ p, response = await adt_pulse_instance
+ assert p._timeout_task is not None
+ await asyncio.sleep(0)
+
+
+@pytest.mark.asyncio
+async def test_infinite_sync_check(
+ adt_pulse_instance: tuple[PyADTPulseAsync, Any],
+ get_mocked_url: Callable[..., str],
+ read_file: Callable[..., str],
+):
+ p, response = await adt_pulse_instance
+ pattern = re.compile(rf"{re.escape(get_mocked_url(ADT_SYNC_CHECK_URI))}/?.*$")
+ response.get(
+ pattern,
+ body=DEFAULT_SYNC_CHECK,
+ content_type="text/html",
+ repeat=True,
+ )
+ shutdown_event = asyncio.Event()
+ shutdown_event.clear()
+ task = asyncio.create_task(do_wait_for_update(p, shutdown_event))
+ await asyncio.sleep(5)
+ shutdown_event.set()
+ task.cancel()
+ await task
+
+
+@pytest.mark.asyncio
+async def test_sync_check_errors(
+ adt_pulse_instance: tuple[PyADTPulseAsync, Any],
+ get_mocked_url: Callable[..., str],
+ read_file: Callable[..., str],
+ mocker: Callable[..., Generator[MockerFixture, None, None]],
+):
+ p, response = await adt_pulse_instance
+ pattern = re.compile(rf"{re.escape(get_mocked_url(ADT_SYNC_CHECK_URI))}/?.*$")
+
+ shutdown_event = asyncio.Event()
+ shutdown_event.clear()
+ for test_type in (
+ (LoginType.FAIL, PulseAuthenticationError),
+ (LoginType.NOT_SIGNED_IN, PulseNotLoggedInError),
+ (LoginType.MFA, PulseMFARequiredError),
+ ):
+ redirect = ADT_LOGIN_URI
+ if test_type[0] == LoginType.MFA:
+ redirect = ADT_MFA_FAIL_URI
+ response.get(
+ pattern, status=302, headers={"Location": get_mocked_url(redirect)}
+ )
+ add_signin(test_type[0], response, get_mocked_url, read_file)
+ task = asyncio.create_task(do_wait_for_update(p, shutdown_event))
+ with pytest.raises(test_type[1]):
+ await task
+ await asyncio.sleep(0.5)
+ assert p._sync_task is None or p._sync_task.done()
+ assert p._timeout_task is None or p._timeout_task.done()
+ if test_type[0] == LoginType.MFA:
+ # pop the post MFA redirect from the responses
+ with pytest.raises(PulseMFARequiredError):
+ await p.async_login()
+ add_signin(LoginType.SUCCESS, response, get_mocked_url, read_file)
+ if test_type[0] != LoginType.LOCKED:
+ await p.async_login()
+
+
+@pytest.mark.asyncio
+async def test_multiple_login(
+ adt_pulse_instance: tuple[PyADTPulseAsync, Any],
+ extract_ids_from_data_directory: list[str],
+ get_mocked_url: Callable[..., str],
+ read_file: Callable[..., str],
+):
+ p, response = await adt_pulse_instance
+ add_signin(LoginType.SUCCESS, response, get_mocked_url, read_file)
+ await p.async_login()
+ assert p.site.zones_as_dict is not None
+ assert len(p.site.zones_as_dict) == len(extract_ids_from_data_directory) - 3
+ add_logout(response, get_mocked_url, read_file)
+ await p.async_logout()
+ assert p.site.zones_as_dict is not None
+ assert len(p.site.zones_as_dict) == len(extract_ids_from_data_directory) - 3
+ add_signin(LoginType.SUCCESS, response, get_mocked_url, read_file)
+ await p.async_login()
+ assert p.site.zones_as_dict is not None
+ assert len(p.site.zones_as_dict) == len(extract_ids_from_data_directory) - 3
+ add_signin(LoginType.SUCCESS, response, get_mocked_url, read_file)
+ assert p.site.zones_as_dict is not None
+ assert len(p.site.zones_as_dict) == len(extract_ids_from_data_directory) - 3
+
+
+@pytest.mark.timeout(180)
+@pytest.mark.asyncio
+async def test_gateway_offline(
+ get_mocked_url: Callable[..., str],
+ read_file: Callable[..., str],
+ adt_pulse_instance: tuple[PyADTPulseAsync, Any],
+):
+ p, response = await adt_pulse_instance
+ pattern = make_sync_check_pattern(get_mocked_url)
+ response.get(
+ get_mocked_url(ADT_ORB_URI), body=read_file("orb_gateway_offline.html")
+ )
+ response.get(
+ pattern,
+ body=DEFAULT_SYNC_CHECK,
+ content_type="text/html",
+ )
+ response.get(
+ pattern,
+ body="1-0-0",
+ content_type="text/html",
+ )
+ response.get(
+ pattern,
+ body=DEFAULT_SYNC_CHECK,
+ content_type="text/html",
+ )
+ response.get(
+ pattern,
+ body=DEFAULT_SYNC_CHECK,
+ content_type="text/html",
+ )
+ num_backoffs = 3
+ for i in range(3):
+ response.get(
+ pattern,
+ body=DEFAULT_SYNC_CHECK,
+ content_type="text/html",
+ )
+ # success case
+ response.get(get_mocked_url(ADT_ORB_URI), body=read_file("orb.html"), repeat=True)
+ response.get(
+ pattern,
+ body="1-0-0",
+ content_type="text/html",
+ )
+ response.get(
+ pattern, body=DEFAULT_SYNC_CHECK, content_type="text/html", repeat=True
+ )
+ add_logout(response, get_mocked_url, read_file)
+ assert p.site.gateway.poll_interval == 2.0
+ # FIXME: why + 2?
+ for i in range(num_backoffs + 2):
+ with pytest.raises(PulseGatewayOfflineError):
+ await p.wait_for_update()
+
+ await p.wait_for_update()
+ assert p.site.gateway.is_online
+ assert p.site.gateway.backoff.get_current_backoff_interval() == 0
+
+ await p.async_logout()
+
+
+@pytest.mark.asyncio
+async def test_not_logged_in(
+ mocked_server_responses: aioresponses,
+ get_mocked_url: Callable[..., str],
+ read_file: Callable[..., str],
+):
+ p = PyADTPulseAsync("testuser@example.com", "testpassword", "testfingerprint")
+ add_signin(LoginType.SUCCESS, mocked_server_responses, get_mocked_url, read_file)
+ add_logout(mocked_server_responses, get_mocked_url, read_file)
+ with pytest.raises(PulseNotLoggedInError):
+ await p.wait_for_update()
+ with pytest.raises(PulseNotLoggedInError):
+ await p.wait_for_update()
+ await p.async_login()
+ await p.async_logout()
+ with pytest.raises(PulseNotLoggedInError):
+ await p.wait_for_update()
+ with pytest.raises(PulseNotLoggedInError):
+ await p.wait_for_update()
+ add_signin(LoginType.SUCCESS, mocked_server_responses, get_mocked_url, read_file)
+ add_logout(mocked_server_responses, get_mocked_url, read_file)
+ pattern = make_sync_check_pattern(get_mocked_url)
+ mocked_server_responses.get(
+ pattern, body=DEFAULT_SYNC_CHECK, content_type="text/html", repeat=True
+ )
+ mocked_server_responses.get(
+ get_mocked_url(ADT_ORB_URI), body=read_file("orb.html"), repeat=True
+ )
+ await p.async_login()
+ task = asyncio.create_task(do_wait_for_update(p, asyncio.Event()))
+ await asyncio.sleep(ADT_DEFAULT_POLL_INTERVAL * 5)
+ await p.async_logout()
+ with pytest.raises(PulseNotLoggedInError):
+ await task
+ with pytest.raises(PulseNotLoggedInError):
+ await p.wait_for_update()
+ await asyncio.sleep(ADT_DEFAULT_POLL_INTERVAL * 2)
+ with pytest.raises(PulseNotLoggedInError):
+ await p.wait_for_update()
+
+
+@pytest.mark.asyncio
+@pytest.mark.timeout(120)
+async def test_connection_fails_wait_for_update(
+ mocked_server_responses: aioresponses,
+ get_mocked_url: Callable[..., str],
+ read_file: Callable[..., str],
+):
+ p = PyADTPulseAsync("testuser@example.com", "testpassword", "testfingerprint")
+ add_signin(LoginType.SUCCESS, mocked_server_responses, get_mocked_url, read_file)
+ add_logout(mocked_server_responses, get_mocked_url, read_file)
+ mocked_server_responses.get(
+ get_mocked_url(ADT_ORB_URI), body=read_file("orb.html"), repeat=True
+ )
+ await p.async_login()
+ with pytest.raises(PulseConnectionError):
+ await do_wait_for_update(p, asyncio.Event())
+ await p.async_logout()
+
+
+@pytest.mark.timeout(180)
+@pytest.mark.asyncio
+async def test_sync_check_disconnect(
+ adt_pulse_instance: tuple[PyADTPulseAsync, Any],
+ read_file: Callable[..., str],
+ get_mocked_url: Callable[..., str],
+):
+ p, responses = await adt_pulse_instance
+ add_logout(responses, get_mocked_url, read_file)
+ pattern = make_sync_check_pattern(get_mocked_url)
+ responses.get(pattern, body=DEFAULT_SYNC_CHECK, content_type="text/html")
+ responses.get(get_mocked_url(ADT_ORB_URI), body=read_file("orb.html"), repeat=True)
+ while p._pulse_connection_status.get_backoff().get_current_backoff_interval() < 15:
+ with pytest.raises(PulseServerConnectionError):
+ await p.wait_for_update()
+ # check recovery
+ responses.get(pattern, body="1-0-0", content_type="text/html")
+ responses.get(
+ pattern, body=DEFAULT_SYNC_CHECK, content_type="text/html", repeat=True
+ )
+ await p.wait_for_update()
+ await p.async_logout()
+
+
+@pytest.mark.asyncio
+@pytest.mark.timeout(60)
+async def test_sync_check_relogin(
+ adt_pulse_instance: tuple[PyADTPulseAsync, Any],
+ get_mocked_url: Callable[..., str],
+ read_file: Callable[..., str],
+):
+ p, responses = await adt_pulse_instance
+ pa: PulseAuthenticationProperties = p._authentication_properties
+ login_time = pa.last_login_time
+ # fail redirect
+ add_signin(LoginType.NOT_SIGNED_IN, responses, get_mocked_url, read_file)
+ # successful login afterward
+ add_signin(LoginType.SUCCESS, responses, get_mocked_url, read_file)
+ add_logout(responses, get_mocked_url, read_file)
+ pattern = make_sync_check_pattern(get_mocked_url)
+ for _ in range(3):
+ responses.get(pattern, body=DEFAULT_SYNC_CHECK, content_type="text/html")
+ responses.get(
+ pattern,
+ body="",
+ content_type="text/html",
+ status=307,
+ headers={"Location": get_mocked_url(ADT_LOGIN_URI)},
+ )
+ # resume normal operation
+ # signal update to stop wait for update
+ responses.get(pattern, body="1-0-0", content_type="text/html")
+ responses.get(
+ pattern, body=DEFAULT_SYNC_CHECK, content_type="text/html", repeat=True
+ )
+ responses.get(get_mocked_url(ADT_ORB_URI), body=read_file("orb.html"), repeat=True)
+
+ await p.wait_for_update()
+ assert p._authentication_properties.last_login_time > login_time
+ await p.async_logout()
diff --git a/tests/test_pulse_connection.py b/tests/test_pulse_connection.py
new file mode 100644
index 0000000..1ec6688
--- /dev/null
+++ b/tests/test_pulse_connection.py
@@ -0,0 +1,220 @@
+"""Test Pulse Connection."""
+
+import asyncio
+import datetime
+
+import pytest
+from bs4 import BeautifulSoup
+
+from conftest import LoginType, add_custom_response, add_signin
+from pyadtpulse.const import ADT_LOGIN_URI, DEFAULT_API_HOST
+from pyadtpulse.exceptions import (
+ PulseAccountLockedError,
+ PulseAuthenticationError,
+ PulseMFARequiredError,
+ PulseServerConnectionError,
+)
+from pyadtpulse.pulse_authentication_properties import PulseAuthenticationProperties
+from pyadtpulse.pulse_connection import PulseConnection
+from pyadtpulse.pulse_connection_properties import PulseConnectionProperties
+from pyadtpulse.pulse_connection_status import PulseConnectionStatus
+from pyadtpulse.pulse_query_manager import MAX_REQUERY_RETRIES
+
+
+def setup_pulse_connection() -> PulseConnection:
+ s = PulseConnectionStatus()
+ pcp = PulseConnectionProperties(DEFAULT_API_HOST)
+ pa = PulseAuthenticationProperties(
+ "test@example.com", "testpassword", "testfingerprint"
+ )
+ pc = PulseConnection(s, pcp, pa)
+ return pc
+
+
+@pytest.mark.asyncio
+async def test_login(mocked_server_responses, read_file, mock_sleep, get_mocked_url):
+ """Test Pulse Connection."""
+ pc = setup_pulse_connection()
+ add_signin(LoginType.SUCCESS, mocked_server_responses, get_mocked_url, read_file)
+ # first call to signin post is successful in conftest.py
+ result = await pc.async_do_login_query()
+ assert result == BeautifulSoup(read_file("summary.html"), "html.parser")
+ assert mock_sleep.call_count == 0
+ assert pc.login_in_progress is False
+ assert pc._login_backoff.backoff_count == 0
+ assert pc._connection_status.authenticated_flag.is_set()
+ # so logout won't fail
+ add_custom_response(
+ mocked_server_responses, read_file, get_mocked_url(ADT_LOGIN_URI)
+ )
+ await pc.async_do_logout_query()
+ assert not pc._connection_status.authenticated_flag.is_set()
+ assert mock_sleep.call_count == 0
+ assert pc._login_backoff.backoff_count == 0
+
+
+@pytest.mark.asyncio
+async def test_login_failure_server_down(mock_server_down):
+ pc = setup_pulse_connection()
+ with pytest.raises(PulseServerConnectionError):
+ await pc.async_do_login_query()
+ assert pc.login_in_progress is False
+ assert pc._login_backoff.backoff_count == 0
+
+
+@pytest.mark.asyncio
+async def test_multiple_login(
+ mocked_server_responses, get_mocked_url, read_file, mock_sleep
+):
+ """Test Pulse Connection."""
+ pc = setup_pulse_connection()
+ add_signin(LoginType.SUCCESS, mocked_server_responses, get_mocked_url, read_file)
+ result = await pc.async_do_login_query()
+ assert result == BeautifulSoup(read_file("summary.html"), "html.parser")
+ assert mock_sleep.call_count == 0
+ assert pc.login_in_progress is False
+ assert pc._login_backoff.backoff_count == 0
+ assert pc._connection_status.authenticated_flag.is_set()
+ add_signin(LoginType.SUCCESS, mocked_server_responses, get_mocked_url, read_file)
+ await pc.async_do_login_query()
+ assert mock_sleep.call_count == 0
+ assert pc.login_in_progress is False
+ assert pc._login_backoff.backoff_count == 0
+ assert pc._connection_status.get_backoff().backoff_count == 0
+ assert pc._connection_status.authenticated_flag.is_set()
+ # this should fail
+ with pytest.raises(PulseServerConnectionError):
+ await pc.async_do_login_query()
+ assert mock_sleep.call_count == MAX_REQUERY_RETRIES - 1
+ assert pc.login_in_progress is False
+ assert pc._login_backoff.backoff_count == 0
+ assert pc._connection_status.get_backoff().backoff_count == 1
+ assert not pc._connection_status.authenticated_flag.is_set()
+ assert not pc.is_connected
+ with pytest.raises(PulseServerConnectionError):
+ await pc.async_do_login_query()
+ assert pc._login_backoff.backoff_count == 0
+ # 2 retries first time, 1 for the connection backoff
+ assert mock_sleep.call_count == MAX_REQUERY_RETRIES
+ assert pc.login_in_progress is False
+
+ assert pc._connection_status.get_backoff().backoff_count == 2
+ assert not pc._connection_status.authenticated_flag.is_set()
+ assert not pc.is_connected
+ add_signin(LoginType.SUCCESS, mocked_server_responses, get_mocked_url, read_file)
+ await pc.async_do_login_query()
+ # will just to a connection backoff
+ assert mock_sleep.call_count == MAX_REQUERY_RETRIES + 1
+ assert pc.login_in_progress is False
+ assert pc._login_backoff.backoff_count == 0
+ assert pc._connection_status.authenticated_flag.is_set()
+
+ add_signin(LoginType.SUCCESS, mocked_server_responses, get_mocked_url, read_file)
+ await pc.async_do_login_query()
+ # shouldn't sleep at all
+ assert mock_sleep.call_count == MAX_REQUERY_RETRIES + 1
+ assert pc.login_in_progress is False
+ assert pc._login_backoff.backoff_count == 0
+ assert pc._connection_status.authenticated_flag.is_set()
+
+
+@pytest.mark.asyncio
+async def test_account_lockout(
+ mocked_server_responses, mock_sleep, get_mocked_url, read_file, freeze_time_to_now
+):
+ pc = setup_pulse_connection()
+ add_signin(LoginType.SUCCESS, mocked_server_responses, get_mocked_url, read_file)
+ await pc.async_do_login_query()
+ assert mock_sleep.call_count == 0
+ assert pc.login_in_progress is False
+ assert pc._login_backoff.backoff_count == 0
+ assert pc.is_connected
+ assert pc._connection_status.authenticated_flag.is_set()
+ add_signin(LoginType.LOCKED, mocked_server_responses, get_mocked_url, read_file)
+ with pytest.raises(PulseAccountLockedError):
+ await pc.async_do_login_query()
+ # won't sleep yet
+ assert not pc.is_connected
+ assert not pc._connection_status.authenticated_flag.is_set()
+ # don't set backoff on locked account, just set expiration time on backoff
+ assert pc._login_backoff.backoff_count == 0
+ assert mock_sleep.call_count == 0
+ freeze_time_to_now.tick(delta=datetime.timedelta(seconds=(60 * 30) + 1))
+ add_signin(LoginType.SUCCESS, mocked_server_responses, get_mocked_url, read_file)
+ await pc.async_do_login_query()
+ assert mock_sleep.call_count == 0
+ assert pc.is_connected
+ assert pc._connection_status.authenticated_flag.is_set()
+ freeze_time_to_now.tick(delta=datetime.timedelta(seconds=60 * 30 + 1))
+ add_signin(LoginType.LOCKED, mocked_server_responses, get_mocked_url, read_file)
+ with pytest.raises(PulseAccountLockedError):
+ await pc.async_do_login_query()
+ assert pc._login_backoff.backoff_count == 0
+ assert mock_sleep.call_count == 0
+
+
+@pytest.mark.asyncio
+async def test_invalid_credentials(
+ mocked_server_responses, mock_sleep, get_mocked_url, read_file
+):
+ pc = setup_pulse_connection()
+ add_signin(LoginType.SUCCESS, mocked_server_responses, get_mocked_url, read_file)
+ await pc.async_do_login_query()
+ assert mock_sleep.call_count == 0
+ assert pc.login_in_progress is False
+ assert pc._login_backoff.backoff_count == 0
+ add_signin(LoginType.FAIL, mocked_server_responses, get_mocked_url, read_file)
+ with pytest.raises(PulseAuthenticationError):
+ await pc.async_do_login_query()
+ assert pc._login_backoff.backoff_count == 0
+ assert mock_sleep.call_count == 0
+ add_signin(LoginType.FAIL, mocked_server_responses, get_mocked_url, read_file)
+
+ with pytest.raises(PulseAuthenticationError):
+ await pc.async_do_login_query()
+ assert pc._login_backoff.backoff_count == 0
+ assert mock_sleep.call_count == 0
+ add_signin(LoginType.SUCCESS, mocked_server_responses, get_mocked_url, read_file)
+ assert pc._login_backoff.backoff_count == 0
+ assert mock_sleep.call_count == 0
+
+
+@pytest.mark.asyncio
+async def test_mfa_failure(mocked_server_responses, get_mocked_url, read_file):
+ pc = setup_pulse_connection()
+ add_signin(LoginType.SUCCESS, mocked_server_responses, get_mocked_url, read_file)
+ await pc.async_do_login_query()
+ assert pc.login_in_progress is False
+ assert pc._login_backoff.backoff_count == 0
+ add_signin(LoginType.MFA, mocked_server_responses, get_mocked_url, read_file)
+ with pytest.raises(PulseMFARequiredError):
+ await pc.async_do_login_query()
+ assert pc._login_backoff.backoff_count == 0
+ add_signin(LoginType.MFA, mocked_server_responses, get_mocked_url, read_file)
+ with pytest.raises(PulseMFARequiredError):
+ await pc.async_do_login_query()
+ assert pc._login_backoff.backoff_count == 0
+ add_signin(LoginType.SUCCESS, mocked_server_responses, get_mocked_url, read_file)
+ await pc.async_do_login_query()
+ assert pc._login_backoff.backoff_count == 0
+
+
+@pytest.mark.asyncio
+async def test_only_single_login(mocked_server_responses, get_mocked_url, read_file):
+ async def login_task():
+ await pc.async_do_login_query()
+
+ pc = setup_pulse_connection()
+ add_signin(LoginType.SUCCESS, mocked_server_responses, get_mocked_url, read_file)
+ # delay one task for a little bit
+ for i in range(4):
+ pc._login_backoff.increment_backoff()
+ task1 = asyncio.create_task(login_task())
+ task2 = asyncio.create_task(login_task())
+ await task2
+ assert pc.login_in_progress
+ assert not pc.is_connected
+ assert not task1.done()
+ await task1
+ assert not pc.login_in_progress
+ assert pc.is_connected
diff --git a/tests/test_pulse_connection_properties.py b/tests/test_pulse_connection_properties.py
new file mode 100644
index 0000000..0051325
--- /dev/null
+++ b/tests/test_pulse_connection_properties.py
@@ -0,0 +1,374 @@
+# Generated by CodiumAI
+from asyncio import AbstractEventLoop
+
+import pytest
+from aiohttp import ClientSession
+
+from pyadtpulse.const import ADT_DEFAULT_HTTP_USER_AGENT, API_HOST_CA, DEFAULT_API_HOST
+from pyadtpulse.pulse_connection_properties import PulseConnectionProperties
+
+
+class TestPulseConnectionProperties:
+ # Initialize PulseConnectionProperties with valid host
+ @pytest.mark.asyncio
+ async def test_initialize_with_valid_host(self):
+ # Arrange
+ host = "https://portal.adtpulse.com"
+ user_agent = ADT_DEFAULT_HTTP_USER_AGENT["User-Agent"]
+ detailed_debug_logging = False
+ debug_locks = False
+
+ # Act
+ connection_properties = PulseConnectionProperties(
+ host, user_agent, detailed_debug_logging, debug_locks
+ )
+
+ # Assert
+ assert connection_properties.service_host == host
+ assert connection_properties._user_agent == user_agent
+ assert connection_properties._detailed_debug_logging == detailed_debug_logging
+ assert connection_properties._debug_locks == debug_locks
+
+ # Set service host to default API host
+ @pytest.mark.asyncio
+ async def test_set_service_host_to_default_api_host(self):
+ # Arrange
+ host = DEFAULT_API_HOST
+ user_agent = ADT_DEFAULT_HTTP_USER_AGENT["User-Agent"]
+ detailed_debug_logging = False
+ debug_locks = False
+ connection_properties = PulseConnectionProperties(
+ host, user_agent, detailed_debug_logging, debug_locks
+ )
+
+ # Act
+ connection_properties.service_host = DEFAULT_API_HOST
+
+ # Assert
+ assert connection_properties.service_host == DEFAULT_API_HOST
+
+ # Set service host to API host CA
+ @pytest.mark.asyncio
+ async def test_set_service_host_to_api_host_ca(self):
+ # Arrange
+ host = DEFAULT_API_HOST
+ user_agent = ADT_DEFAULT_HTTP_USER_AGENT["User-Agent"]
+ detailed_debug_logging = False
+ debug_locks = False
+ connection_properties = PulseConnectionProperties(
+ host, user_agent, detailed_debug_logging, debug_locks
+ )
+
+ # Act
+ connection_properties.service_host = API_HOST_CA
+
+ # Assert
+ assert connection_properties.service_host == API_HOST_CA
+
+ # Get the service host
+ @pytest.mark.asyncio
+ async def test_get_service_host(self):
+ # Arrange
+ host = "https://portal.adtpulse.com"
+ user_agent = ADT_DEFAULT_HTTP_USER_AGENT["User-Agent"]
+ detailed_debug_logging = False
+ debug_locks = False
+ connection_properties = PulseConnectionProperties(
+ host, user_agent, detailed_debug_logging, debug_locks
+ )
+
+ # Act & Assert
+ assert connection_properties.service_host == host
+
+ # Set detailed debug logging to True
+ @pytest.mark.asyncio
+ async def test_set_detailed_debug_logging_to_true(self):
+ # Arrange
+ host = "https://portal.adtpulse.com"
+ user_agent = ADT_DEFAULT_HTTP_USER_AGENT["User-Agent"]
+ detailed_debug_logging = False
+ debug_locks = False
+ connection_properties = PulseConnectionProperties(
+ host, user_agent, detailed_debug_logging, debug_locks
+ )
+
+ # Act
+ connection_properties.detailed_debug_logging = True
+
+ # Assert
+ assert connection_properties.detailed_debug_logging is True
+
+ # Set detailed debug logging to False
+ @pytest.mark.asyncio
+ async def test_set_detailed_debug_logging_to_false(self):
+ # Arrange
+ host = "https://portal.adtpulse.com"
+ user_agent = ADT_DEFAULT_HTTP_USER_AGENT["User-Agent"]
+ detailed_debug_logging = True
+ debug_locks = False
+ connection_properties = PulseConnectionProperties(
+ host, user_agent, detailed_debug_logging, debug_locks
+ )
+
+ # Act
+ connection_properties.detailed_debug_logging = False
+
+ # Assert
+ assert connection_properties.detailed_debug_logging is False
+
+ # Initialize PulseConnectionProperties with invalid host raises ValueError
+ @pytest.mark.asyncio
+ async def test_initialize_with_invalid_host_raises_value_error(self):
+ # Arrange
+ host = ""
+ user_agent = ADT_DEFAULT_HTTP_USER_AGENT["User-Agent"]
+ detailed_debug_logging = False
+ debug_locks = False
+
+ # Act & Assert
+ with pytest.raises(ValueError):
+ PulseConnectionProperties(
+ host, user_agent, detailed_debug_logging, debug_locks
+ )
+
+ # Set service host to valid host does not raise ValueError
+ @pytest.mark.asyncio
+ async def test_set_service_host_to_valid_host_does_not_raise_value_error(self):
+ # Arrange
+ host = "https://portal.adtpulse.com"
+ user_agent = ADT_DEFAULT_HTTP_USER_AGENT["User-Agent"]
+ detailed_debug_logging = False
+ debug_locks = False
+ connection_properties = PulseConnectionProperties(
+ host, user_agent, detailed_debug_logging, debug_locks
+ )
+
+ # Act & Assert
+ connection_properties.service_host = host
+
+ # Set API version to invalid version raises ValueError
+ @pytest.mark.asyncio
+ async def test_set_api_version_to_invalid_version_raises_value_error(self):
+ # Arrange
+ host = "https://portal.adtpulse.com"
+ user_agent = ADT_DEFAULT_HTTP_USER_AGENT["User-Agent"]
+ detailed_debug_logging = False
+ debug_locks = False
+ connection_properties = PulseConnectionProperties(
+ host, user_agent, detailed_debug_logging, debug_locks
+ )
+
+ # Act & Assert
+ with pytest.raises(ValueError):
+ connection_properties.api_version = "1.0"
+
+ # Check sync without setting the event loop raises RuntimeError
+ @pytest.mark.asyncio
+ async def test_check_sync_without_setting_event_loop_raises_runtime_error(self):
+ # Arrange
+ host = "https://portal.adtpulse.com"
+ user_agent = ADT_DEFAULT_HTTP_USER_AGENT["User-Agent"]
+ detailed_debug_logging = False
+ debug_locks = False
+ connection_properties = PulseConnectionProperties(
+ host, user_agent, detailed_debug_logging, debug_locks
+ )
+
+ # Act & Assert
+ with pytest.raises(RuntimeError):
+ connection_properties.check_sync("Sync login was not performed")
+
+ # Get the detailed debug logging flag
+ @pytest.mark.asyncio
+ async def test_get_detailed_debug_logging_flag(self):
+ # Arrange
+ host = "https://portal.adtpulse.com"
+ user_agent = ADT_DEFAULT_HTTP_USER_AGENT["User-Agent"]
+ detailed_debug_logging = True
+ debug_locks = False
+ connection_properties = PulseConnectionProperties(
+ host, user_agent, detailed_debug_logging, debug_locks
+ )
+
+ # Act
+ result = connection_properties.detailed_debug_logging
+
+ # Assert
+ assert result == detailed_debug_logging
+
+ # Set debug locks to True with a valid service host
+ @pytest.mark.asyncio
+ async def test_set_debug_locks_to_true_with_valid_service_host(self):
+ # Arrange
+ host = "https://portal.adtpulse.com"
+ user_agent = ADT_DEFAULT_HTTP_USER_AGENT["User-Agent"]
+ detailed_debug_logging = False
+ debug_locks = True
+
+ # Act
+ connection_properties = PulseConnectionProperties(
+ host, user_agent, detailed_debug_logging, debug_locks
+ )
+
+ # Assert
+ assert connection_properties.service_host == host
+ assert connection_properties._user_agent == user_agent
+ assert connection_properties._detailed_debug_logging == detailed_debug_logging
+ assert connection_properties._debug_locks == debug_locks
+
+ # Get the debug locks flag
+ @pytest.mark.asyncio
+ async def test_get_debug_locks_flag(self):
+ # Arrange
+ host = "https://portal.adtpulse.com"
+ user_agent = ADT_DEFAULT_HTTP_USER_AGENT["User-Agent"]
+ detailed_debug_logging = False
+ debug_locks = True
+
+ # Act
+ connection_properties = PulseConnectionProperties(
+ host, user_agent, detailed_debug_logging, debug_locks
+ )
+
+ # Assert
+ assert connection_properties.debug_locks == debug_locks
+
+ # Set debug locks to False with a valid service host
+ @pytest.mark.asyncio
+ async def test_set_debug_locks_to_false_with_valid_service_host(self):
+ # Arrange
+ host = "https://portal.adtpulse.com"
+ user_agent = ADT_DEFAULT_HTTP_USER_AGENT["User-Agent"]
+ detailed_debug_logging = False
+ debug_locks = False
+
+ # Act
+ connection_properties = PulseConnectionProperties(
+ host, user_agent, detailed_debug_logging, debug_locks
+ )
+
+ # Assert
+ assert connection_properties.debug_locks == debug_locks
+
+ # Set the event loop
+ @pytest.mark.asyncio
+ async def test_set_event_loop(self):
+ # Arrange
+ host = "https://portal.adtpulse.com"
+ user_agent = ADT_DEFAULT_HTTP_USER_AGENT["User-Agent"]
+ detailed_debug_logging = False
+ debug_locks = False
+ loop = AbstractEventLoop()
+
+ # Act
+ connection_properties = PulseConnectionProperties(
+ host, user_agent, detailed_debug_logging, debug_locks
+ )
+ connection_properties.loop = loop
+
+ # Assert
+ assert connection_properties.loop == loop
+
+ # Get the event loop
+ @pytest.mark.asyncio
+ async def test_get_event_loop(self):
+ # Arrange
+ host = "https://portal.adtpulse.com"
+ user_agent = ADT_DEFAULT_HTTP_USER_AGENT["User-Agent"]
+ detailed_debug_logging = False
+ debug_locks = False
+ connection_properties = PulseConnectionProperties(
+ host, user_agent, detailed_debug_logging, debug_locks
+ )
+
+ # Act
+ event_loop = connection_properties.loop
+
+ # Assert
+ assert event_loop is None
+
+ # Set the API version
+ @pytest.mark.asyncio
+ async def test_set_api_version(self):
+ # Arrange
+ host = "https://portal.adtpulse.com"
+ user_agent = ADT_DEFAULT_HTTP_USER_AGENT["User-Agent"]
+ detailed_debug_logging = False
+ debug_locks = False
+ version = "26.0.0-subpatch"
+
+ connection_properties = PulseConnectionProperties(
+ host, user_agent, detailed_debug_logging, debug_locks
+ )
+
+ # Act
+ with pytest.raises(ValueError):
+ connection_properties.api_version = version
+ version = "26.0.0"
+ with pytest.raises(ValueError):
+ connection_properties.api_version = version
+ version = "25.0.0-22"
+ with pytest.raises(ValueError):
+ connection_properties.api_version = version
+ version = "26.0.0-22"
+ connection_properties.api_version = version
+ # Assert
+ assert connection_properties.api_version == version
+
+ # Get the API version
+ @pytest.mark.asyncio
+ async def test_get_api_version(self):
+ # Arrange
+ host = "https://portal.adtpulse.com"
+ user_agent = ADT_DEFAULT_HTTP_USER_AGENT["User-Agent"]
+ detailed_debug_logging = False
+ debug_locks = False
+ response_path = "example.com/api/v1"
+
+ connection_properties = PulseConnectionProperties(
+ host, user_agent, detailed_debug_logging, debug_locks
+ )
+
+ # Act
+ api_version = connection_properties.get_api_version(response_path)
+
+ # Assert
+ assert api_version is None
+
+ # Get the session with a valid host
+ @pytest.mark.asyncio
+ async def test_get_session_with_valid_host(self):
+ # Arrange
+ host = "https://portal.adtpulse.com"
+ user_agent = ADT_DEFAULT_HTTP_USER_AGENT["User-Agent"]
+ detailed_debug_logging = False
+ debug_locks = False
+ connection_properties = PulseConnectionProperties(
+ host, user_agent, detailed_debug_logging, debug_locks
+ )
+
+ # Act
+ session = connection_properties.session
+
+ # Assert
+ assert isinstance(session, ClientSession)
+ assert connection_properties._session == session
+
+ # Check async after setting the event loop raises RuntimeError
+ @pytest.mark.asyncio
+ async def test_check_async_after_setting_event_loop_raises_runtime_error(self):
+ # Arrange
+ host = "https://portal.adtpulse.com"
+ user_agent = ADT_DEFAULT_HTTP_USER_AGENT["User-Agent"]
+ detailed_debug_logging = False
+ debug_locks = False
+ connection_properties = PulseConnectionProperties(
+ host, user_agent, detailed_debug_logging, debug_locks
+ )
+
+ # Act
+ connection_properties.loop = AbstractEventLoop()
+
+ # Assert
+ with pytest.raises(RuntimeError):
+ connection_properties.check_async("Async login not performed")
diff --git a/tests/test_pulse_connection_status.py b/tests/test_pulse_connection_status.py
new file mode 100644
index 0000000..7a0260d
--- /dev/null
+++ b/tests/test_pulse_connection_status.py
@@ -0,0 +1,186 @@
+# Generated by CodiumAI
+import pytest
+
+from pyadtpulse.pulse_backoff import PulseBackoff
+from pyadtpulse.pulse_connection_status import PulseConnectionStatus
+
+
+class TestPulseConnectionStatus:
+ # PulseConnectionStatus can be initialized without errors
+ def test_initialized_without_errors(self):
+ """
+ Test that PulseConnectionStatus can be initialized without errors.
+ """
+ pcs = PulseConnectionStatus()
+ assert pcs is not None
+
+ # authenticated_flag can be accessed without errors
+ def test_access_authenticated_flag(self):
+ """
+ Test that authenticated_flag can be accessed without errors.
+ """
+ pcs = PulseConnectionStatus()
+ authenticated_flag = pcs.authenticated_flag
+ assert authenticated_flag is not None
+
+ # retry_after can be accessed without errors
+ def test_access_retry_after(self):
+ """
+ Test that retry_after can be accessed without errors.
+ """
+ pcs = PulseConnectionStatus()
+ retry_after = pcs.retry_after
+ assert retry_after is not None
+
+ # retry_after can be set without errors
+ def test_set_retry_after(self):
+ """
+ Test that retry_after can be set without errors.
+ """
+ import time
+
+ pcs = PulseConnectionStatus()
+ current_time = time.time()
+ retry_time = current_time + 1000
+ pcs.retry_after = retry_time
+ assert pcs.retry_after == retry_time
+
+ # get_backoff returns a PulseBackoff object
+ def test_get_backoff(self):
+ """
+ Test that get_backoff returns a PulseBackoff object.
+ """
+ pcs = PulseConnectionStatus()
+ backoff = pcs.get_backoff()
+ assert isinstance(backoff, PulseBackoff)
+
+ # increment_backoff can be called without errors
+ def test_increment_backoff(self):
+ """
+ Test that increment_backoff can be called without errors.
+ """
+ pcs = PulseConnectionStatus()
+ pcs.get_backoff().increment_backoff()
+
+ # retry_after can be set to a time in the future
+ def test_set_retry_after_past_time_fixed(self):
+ """
+ Test that retry_after can be set to a time in the future.
+ """
+ import time
+
+ pcs = PulseConnectionStatus()
+ current_time = time.time()
+ past_time = current_time - 10.0
+ with pytest.raises(ValueError):
+ pcs.retry_after = past_time
+
+ # retry_after can be set to a time in the future
+ def test_set_retry_after_future_time_fixed(self):
+ """
+ Test that retry_after can be set to a time in the future.
+ """
+ import time
+
+ pcs = PulseConnectionStatus()
+ pcs.retry_after = time.time() + 10.0
+ assert pcs.retry_after > time.time()
+
+ # retry_after can be set to a positive value greater than the current time
+ def test_set_retry_after_negative_value_fixed(self):
+ """
+ Test that retry_after can be set to a positive value greater than the current time.
+ """
+ from time import time
+
+ pcs = PulseConnectionStatus()
+ retry_after_time = time() + 10.0
+ pcs.retry_after = retry_after_time
+ assert pcs.retry_after == retry_after_time
+
+ # retry_after can be set to a very large value
+ def test_set_retry_after_large_value(self):
+ """
+ Test that retry_after can be set to a very large value.
+ """
+ pcs = PulseConnectionStatus()
+ pcs.retry_after = float("inf")
+ assert pcs.retry_after == float("inf")
+
+ # retry_after can be set to a non-numeric value
+ def test_set_retry_after_non_numeric_value_fixed(self):
+ """
+ Test that retry_after can be set to a non-numeric value.
+ """
+ import time
+
+ pcs = PulseConnectionStatus()
+ retry_after_time = time.time() + 5.0
+ pcs.retry_after = retry_after_time
+ assert pcs.retry_after == retry_after_time
+
+ # reset_backoff can be called without errors
+ def test_reset_backoff(self):
+ """
+ Test that reset_backoff can be called without errors.
+ """
+ pcs = PulseConnectionStatus()
+ pcs.get_backoff().reset_backoff()
+
+ # authenticated_flag can be set to True
+ def test_authenticated_flag_set_to_true(self):
+ """
+ Test that authenticated_flag can be set to True.
+ """
+ pcs = PulseConnectionStatus()
+ pcs.authenticated_flag.set()
+ assert pcs.authenticated_flag.is_set()
+
+ # authenticated_flag can be set to False
+ def test_authenticated_flag_false(self):
+ """
+ Test that authenticated_flag can be set to False.
+ """
+ pcs = PulseConnectionStatus()
+ pcs.authenticated_flag.clear()
+ assert not pcs.authenticated_flag.is_set()
+
+ # Test that get_backoff returns the same PulseBackoff object every time it is called.
+ def test_get_backoff_returns_same_object(self):
+ """
+ Test that get_backoff returns the same PulseBackoff object every time it is called.
+ Arrange:
+ - Create an instance of PulseConnectionStatus
+ Act:
+ - Call get_backoff method twice
+ Assert:
+ - The returned PulseBackoff objects are the same
+ """
+ pcs = PulseConnectionStatus()
+ backoff1 = pcs.get_backoff()
+ backoff2 = pcs.get_backoff()
+ assert backoff1 is backoff2
+
+ # increment_backoff increases the backoff count by 1
+ def test_increment_backoff2(self):
+ """
+ Test that increment_backoff increases the backoff count by 1.
+ """
+ pcs = PulseConnectionStatus()
+ backoff = pcs.get_backoff()
+ initial_backoff_count = backoff.backoff_count
+ backoff.increment_backoff()
+ new_backoff_count = backoff.backoff_count
+ assert new_backoff_count == initial_backoff_count + 1
+
+ # reset_backoff sets the backoff count to 0 and the expiration time to 0.0
+ def test_reset_backoff_sets_backoff_count_and_expiration_time(self):
+ """
+ Test that reset_backoff sets the backoff count to 0 and the expiration time to 0.0.
+ """
+ pcs = PulseConnectionStatus()
+ backoff = pcs.get_backoff()
+ backoff.increment_backoff()
+ backoff.reset_backoff()
+ assert backoff.backoff_count == 0
+ assert backoff.expiration_time == 0.0
diff --git a/tests/test_pulse_query_manager.py b/tests/test_pulse_query_manager.py
new file mode 100644
index 0000000..0441664
--- /dev/null
+++ b/tests/test_pulse_query_manager.py
@@ -0,0 +1,393 @@
+"""Test Pulse Query Manager."""
+
+import logging
+import asyncio
+import time
+from datetime import datetime, timedelta
+from typing import Any, Callable
+
+import pytest
+from aiohttp import client_exceptions, client_reqrep
+from aioresponses import aioresponses
+from bs4 import BeautifulSoup
+from freezegun.api import FrozenDateTimeFactory, StepTickTimeFactory
+
+from conftest import MOCKED_API_VERSION
+from pyadtpulse.const import ADT_ORB_URI, DEFAULT_API_HOST
+from pyadtpulse.exceptions import (
+ PulseClientConnectionError,
+ PulseConnectionError,
+ PulseServerConnectionError,
+ PulseServiceTemporarilyUnavailableError,
+)
+from pyadtpulse.pulse_connection_properties import PulseConnectionProperties
+from pyadtpulse.pulse_connection_status import PulseConnectionStatus
+from pyadtpulse.pulse_query_manager import MAX_REQUERY_RETRIES, PulseQueryManager
+
+
+@pytest.mark.asyncio
+async def test_fetch_version(mocked_server_responses: aioresponses):
+ """Test fetch version."""
+ s = PulseConnectionStatus()
+ cp = PulseConnectionProperties(DEFAULT_API_HOST)
+ p = PulseQueryManager(s, cp)
+ await p.async_fetch_version()
+ assert cp.api_version == MOCKED_API_VERSION
+
+
+@pytest.mark.asyncio
+async def test_fetch_version_fail(mock_server_down: aioresponses):
+ """Test fetch version."""
+ s = PulseConnectionStatus()
+ cp = PulseConnectionProperties(DEFAULT_API_HOST)
+ p = PulseQueryManager(s, cp)
+ with pytest.raises(PulseServerConnectionError):
+ await p.async_fetch_version()
+ assert s.get_backoff().backoff_count == 1
+ with pytest.raises(PulseServerConnectionError):
+ await p.async_query(ADT_ORB_URI, requires_authentication=False)
+ assert s.get_backoff().backoff_count == 2
+ assert s.get_backoff().get_current_backoff_interval() == 2.0
+
+
+@pytest.mark.asyncio
+async def test_fetch_version_eventually_succeeds(
+ mock_server_temporarily_down: aioresponses,
+):
+ """Test fetch version."""
+ s = PulseConnectionStatus()
+ cp = PulseConnectionProperties(DEFAULT_API_HOST)
+ p = PulseQueryManager(s, cp)
+ with pytest.raises(PulseServerConnectionError):
+ await p.async_fetch_version()
+ assert s.get_backoff().backoff_count == 1
+ with pytest.raises(PulseServerConnectionError):
+ await p.async_query(ADT_ORB_URI, requires_authentication=False)
+ assert s.get_backoff().backoff_count == 2
+ assert s.get_backoff().get_current_backoff_interval() == 2.0
+ await p.async_fetch_version()
+ assert s.get_backoff().backoff_count == 0
+
+
+@pytest.mark.asyncio
+async def test_query_orb(
+ mocked_server_responses: aioresponses,
+ read_file: Callable[..., str],
+ mock_sleep: Any,
+ get_mocked_connection_properties: PulseConnectionProperties,
+):
+ """Test query orb.
+
+ We also check that it waits for authenticated flag.
+ """
+
+ async def query_orb_task():
+ return await p.query_orb(logging.DEBUG, "Failed to query orb")
+
+ s = PulseConnectionStatus()
+ cp = get_mocked_connection_properties
+ p = PulseQueryManager(s, cp)
+ orb_file = read_file("orb.html")
+ mocked_server_responses.get(
+ cp.make_url(ADT_ORB_URI), status=200, content_type="text/html", body=orb_file
+ )
+ task = asyncio.create_task(query_orb_task())
+ await asyncio.sleep(2)
+ assert not task.done()
+ s.authenticated_flag.set()
+ await task
+ assert task.done()
+ assert task.result() == BeautifulSoup(orb_file, "html.parser")
+ assert mock_sleep.call_count == 1 # from the asyncio.sleep call above
+ mocked_server_responses.get(cp.make_url(ADT_ORB_URI), status=404)
+ with pytest.raises(PulseServerConnectionError):
+ result = await query_orb_task()
+ assert mock_sleep.call_count == 1
+ assert s.get_backoff().backoff_count == 1
+ mocked_server_responses.get(
+ cp.make_url(ADT_ORB_URI), status=200, content_type="text/html", body=orb_file
+ )
+ result = await query_orb_task()
+ assert result == BeautifulSoup(orb_file, "html.parser")
+ assert mock_sleep.call_count == 2
+
+
+@pytest.mark.asyncio
+async def test_retry_after(
+ mocked_server_responses: aioresponses,
+ freeze_time_to_now: FrozenDateTimeFactory | StepTickTimeFactory,
+ get_mocked_connection_properties: PulseConnectionProperties,
+ mock_sleep: Any,
+):
+ """Test retry after."""
+
+ retry_after_time = 120
+ frozen_time = freeze_time_to_now
+ now = time.time()
+
+ s = PulseConnectionStatus()
+ cp = get_mocked_connection_properties
+ p = PulseQueryManager(s, cp)
+
+ mocked_server_responses.get(
+ cp.make_url(ADT_ORB_URI),
+ status=429,
+ headers={"Retry-After": str(retry_after_time)},
+ )
+ with pytest.raises(PulseServiceTemporarilyUnavailableError):
+ await p.async_query(ADT_ORB_URI, requires_authentication=False)
+ # make sure we can't override the retry
+ s.get_backoff().reset_backoff()
+ assert s.get_backoff().expiration_time == int(now + float(retry_after_time))
+ with pytest.raises(PulseServiceTemporarilyUnavailableError):
+ await p.async_query(ADT_ORB_URI, requires_authentication=False)
+ frozen_time.tick(timedelta(seconds=retry_after_time + 1))
+ mocked_server_responses.get(
+ cp.make_url(ADT_ORB_URI),
+ status=200,
+ )
+ # this should succeed
+ await p.async_query(ADT_ORB_URI, requires_authentication=False)
+
+ now = time.time()
+ retry_date = now + float(retry_after_time)
+ retry_date_str = datetime.fromtimestamp(retry_date).strftime(
+ "%a, %d %b %Y %H:%M:%S GMT"
+ )
+ # need to get the new retry after time since it doesn't have fractions of seconds
+ new_retry_after = (
+ datetime.strptime(retry_date_str, "%a, %d %b %Y %H:%M:%S GMT").timestamp() - now
+ )
+ mocked_server_responses.get(
+ cp.make_url(ADT_ORB_URI),
+ status=503,
+ headers={"Retry-After": retry_date_str},
+ )
+ with pytest.raises(PulseServiceTemporarilyUnavailableError):
+ await p.async_query(ADT_ORB_URI, requires_authentication=False)
+
+ frozen_time.tick(timedelta(seconds=new_retry_after - 1))
+ with pytest.raises(PulseServiceTemporarilyUnavailableError):
+ await p.async_query(ADT_ORB_URI, requires_authentication=False)
+ frozen_time.tick(timedelta(seconds=2))
+ mocked_server_responses.get(
+ cp.make_url(ADT_ORB_URI),
+ status=200,
+ )
+ # should succeed
+ await p.async_query(ADT_ORB_URI, requires_authentication=False)
+ # unavailable with no retry after
+ mocked_server_responses.get(
+ cp.make_url(ADT_ORB_URI),
+ status=503,
+ )
+ frozen_time.tick(timedelta(seconds=retry_after_time + 1))
+ with pytest.raises(PulseServiceTemporarilyUnavailableError):
+ await p.async_query(ADT_ORB_URI, requires_authentication=False)
+ mocked_server_responses.get(
+ cp.make_url(ADT_ORB_URI),
+ status=200,
+ )
+ # should succeed
+ frozen_time.tick(timedelta(seconds=1))
+ await p.async_query(ADT_ORB_URI, requires_authentication=False)
+
+ # retry after in the past
+ mocked_server_responses.get(
+ cp.make_url(ADT_ORB_URI),
+ status=503,
+ headers={"Retry-After": retry_date_str},
+ )
+ with pytest.raises(PulseServiceTemporarilyUnavailableError):
+ await p.async_query(ADT_ORB_URI, requires_authentication=False)
+ mocked_server_responses.get(
+ cp.make_url(ADT_ORB_URI),
+ status=200,
+ )
+ frozen_time.tick(timedelta(seconds=1))
+ # should succeed
+ await p.async_query(ADT_ORB_URI, requires_authentication=False)
+
+
+async def run_query_exception_test(
+ mocked_server_responses,
+ mock_sleep,
+ get_mocked_connection_properties,
+ aiohttp_exception: client_exceptions.ClientError,
+ pulse_exception: PulseConnectionError,
+):
+ s = PulseConnectionStatus()
+ cp = get_mocked_connection_properties
+ p = PulseQueryManager(s, cp)
+ # need to do ClientConnectorError, but it requires initialization
+ for _ in range(MAX_REQUERY_RETRIES + 1):
+ mocked_server_responses.get(
+ cp.make_url(ADT_ORB_URI),
+ exception=aiohttp_exception,
+ )
+ mocked_server_responses.get(
+ cp.make_url(ADT_ORB_URI),
+ status=200,
+ )
+ with pytest.raises(pulse_exception):
+ await p.async_query(
+ ADT_ORB_URI,
+ requires_authentication=False,
+ )
+
+ # only MAX_REQUERY_RETRIES - 1 sleeps since first call won't sleep
+ assert (
+ mock_sleep.call_count == MAX_REQUERY_RETRIES - 1
+ ), f"Failure on exception {aiohttp_exception.__name__}"
+ for i in range(MAX_REQUERY_RETRIES - 1):
+ assert mock_sleep.call_args_list[i][0][0] == 1 * 2 ** (
+ i
+ ), f"Failure on exception sleep count {i} on exception {aiohttp_exception.__name__}"
+ assert (
+ s.get_backoff().backoff_count == 1
+ ), f"Failure on exception {aiohttp_exception.__name__}"
+ with pytest.raises(pulse_exception):
+ await p.async_query(ADT_ORB_URI, requires_authentication=False)
+ # pqm backoff should trigger here
+
+ # MAX_REQUERY_RETRIES - 1 backoff for query, 1 for connection backoff
+ assert mock_sleep.call_count == MAX_REQUERY_RETRIES
+ assert (
+ mock_sleep.call_args_list[MAX_REQUERY_RETRIES - 1][0][0]
+ == s.get_backoff().initial_backoff_interval
+ )
+ mocked_server_responses.get(
+ cp.make_url(ADT_ORB_URI),
+ status=200,
+ )
+ # this should trigger a sleep
+ await p.async_query(ADT_ORB_URI, requires_authentication=False)
+ assert mock_sleep.call_count == MAX_REQUERY_RETRIES + 1
+ assert (
+ mock_sleep.call_args_list[MAX_REQUERY_RETRIES][0][0]
+ == s.get_backoff().initial_backoff_interval * 2
+ )
+ # this shouldn't trigger a sleep
+ await p.async_query(ADT_ORB_URI, requires_authentication=False)
+ assert mock_sleep.call_count == MAX_REQUERY_RETRIES + 1
+
+
+@pytest.mark.asyncio
+@pytest.mark.parametrize(
+ "test_exception",
+ (
+ (client_exceptions.ClientConnectionError, PulseClientConnectionError),
+ (client_exceptions.ClientError, PulseClientConnectionError),
+ (client_exceptions.ClientOSError, PulseClientConnectionError),
+ (client_exceptions.ServerDisconnectedError, PulseServerConnectionError),
+ (client_exceptions.ServerTimeoutError, PulseServerConnectionError),
+ (client_exceptions.ServerConnectionError, PulseServerConnectionError),
+ (asyncio.TimeoutError, PulseServerConnectionError),
+ ),
+)
+async def test_async_query_exceptions(
+ mocked_server_responses: aioresponses,
+ mock_sleep: Any,
+ get_mocked_connection_properties: PulseConnectionProperties,
+ test_exception,
+):
+ await run_query_exception_test(
+ mocked_server_responses,
+ mock_sleep,
+ get_mocked_connection_properties,
+ *test_exception,
+ )
+
+
+@pytest.mark.asyncio
+@pytest.mark.parametrize(
+ "test_exception",
+ (
+ (ConnectionRefusedError, PulseServerConnectionError),
+ (ConnectionResetError, PulseServerConnectionError),
+ (TimeoutError, PulseClientConnectionError),
+ (BrokenPipeError, PulseClientConnectionError),
+ ),
+)
+async def test_async_query_connector_errors(
+ mocked_server_responses: aioresponses,
+ mock_sleep: Any,
+ get_mocked_connection_properties: PulseConnectionProperties,
+ test_exception,
+):
+ aiohttp_exception = client_exceptions.ClientConnectorError(
+ client_reqrep.ConnectionKey(
+ DEFAULT_API_HOST,
+ 443,
+ is_ssl=True,
+ ssl=True,
+ proxy=None,
+ proxy_auth=None,
+ proxy_headers_hash=None,
+ ),
+ os_error=test_exception[0],
+ )
+ await run_query_exception_test(
+ mocked_server_responses,
+ mock_sleep,
+ get_mocked_connection_properties,
+ aiohttp_exception,
+ test_exception[1],
+ )
+
+
+async def test_wait_for_authentication_flag(
+ mocked_server_responses: aioresponses,
+ get_mocked_connection_properties: PulseConnectionProperties,
+ read_file: Callable[..., str],
+):
+ async def query_orb_task(lock: asyncio.Lock):
+ async with lock:
+ try:
+ result = await p.query_orb(logging.DEBUG, "Failed to query orb")
+ except asyncio.CancelledError:
+ result = None
+ return result
+
+ s = PulseConnectionStatus()
+ cp = get_mocked_connection_properties
+ p = PulseQueryManager(s, cp)
+ mocked_server_responses.get(
+ cp.make_url(ADT_ORB_URI),
+ status=200,
+ body=read_file("orb.html"),
+ )
+ lock = asyncio.Lock()
+ task = asyncio.create_task(query_orb_task(lock))
+ try:
+ await asyncio.wait_for(query_orb_task(lock), 10)
+ except asyncio.TimeoutError:
+ task.cancel()
+ await task
+ # if we time out, the test has passed
+ else:
+ pytest.fail("Query should have timed out")
+ await lock.acquire()
+ task = asyncio.create_task(query_orb_task(lock))
+ lock.release()
+ await asyncio.sleep(1)
+ assert not task.done()
+ await asyncio.sleep(3)
+ assert not task.done()
+ s.authenticated_flag.set()
+ result = await task
+ assert result == BeautifulSoup(read_file("orb.html"), "html.parser")
+
+ # test query with retry will wait for authentication
+ # don't set an orb response so that we will backoff on the query
+ await lock.acquire()
+ task = asyncio.create_task(query_orb_task(lock))
+ lock.release()
+ await asyncio.sleep(0.5)
+ assert not task.done()
+ s.authenticated_flag.clear()
+ await asyncio.sleep(5)
+ assert not task.done()
+ s.authenticated_flag.set()
+ with pytest.raises(PulseServerConnectionError):
+ await task
diff --git a/tests/test_site_properties.py b/tests/test_site_properties.py
new file mode 100644
index 0000000..034baa4
--- /dev/null
+++ b/tests/test_site_properties.py
@@ -0,0 +1,303 @@
+# Generated by CodiumAI
+from multiprocessing import RLock
+from time import time
+
+# Dependencies:
+# pip install pytest-mock
+import pytest
+
+from pyadtpulse.alarm_panel import ADTPulseAlarmPanel
+from pyadtpulse.const import DEFAULT_API_HOST
+from pyadtpulse.gateway import ADTPulseGateway
+from pyadtpulse.pulse_authentication_properties import PulseAuthenticationProperties
+from pyadtpulse.pulse_connection import PulseConnection
+from pyadtpulse.pulse_connection_properties import PulseConnectionProperties
+from pyadtpulse.pulse_connection_status import PulseConnectionStatus
+from pyadtpulse.site_properties import ADTPulseSiteProperties
+from pyadtpulse.zones import ADTPulseFlattendZone, ADTPulseZoneData, ADTPulseZones
+
+
+class TestADTPulseSiteProperties:
+ # Retrieve site id and name
+ def test_retrieve_site_id_and_name(self):
+ # Arrange
+ site_id = "12345"
+ site_name = "My ADT Pulse Site"
+ site_properties = ADTPulseSiteProperties(site_id, site_name)
+
+ # Act
+ retrieved_id = site_properties.id
+ retrieved_name = site_properties.name
+
+ # Assert
+ assert retrieved_id == site_id
+ assert retrieved_name == site_name
+
+ # Retrieve last update time
+ def test_retrieve_last_update_time(self):
+ # Arrange
+ site_id = "12345"
+ site_name = "My ADT Pulse Site"
+ site_properties = ADTPulseSiteProperties(site_id, site_name)
+
+ # Act
+ last_updated = site_properties.last_updated
+
+ # Assert
+ assert isinstance(last_updated, int)
+
+ # Retrieve all zones registered with ADT Pulse account when zones exist
+ def test_retrieve_all_zones_with_zones_fixed(self):
+ # Arrange
+
+ site_id = "12345"
+ site_name = "My ADT Pulse Site"
+ site_properties = ADTPulseSiteProperties(site_id, site_name)
+
+ # Add some zones to the site_properties instance
+ zone1 = ADTPulseZoneData(id_=1, name="Front Door")
+ zone2 = ADTPulseZoneData(id_=2, name="Back Door")
+
+ site_properties._zones[1] = zone1
+ site_properties._zones[2] = zone2
+
+ # Act
+ zones = site_properties.zones
+
+ # Assert
+ assert isinstance(zones, list)
+ assert len(zones) == 2
+
+ # Retrieve zone information in dictionary form
+ def test_retrieve_zone_information_as_dict(self):
+ # Arrange
+
+ site_id = "12345"
+ site_name = "My ADT Pulse Site"
+ site_properties = ADTPulseSiteProperties(site_id, site_name)
+ site_properties._zones = ADTPulseZones()
+ zone = ADTPulseZoneData(id_=1, name="Zone1") # Provide the 'id_' argument
+ site_properties._zones[1] = zone
+
+ # Act
+ zones_dict = site_properties.zones_as_dict
+
+ # Assert
+ assert isinstance(zones_dict, ADTPulseZones)
+
+ # Retrieve alarm panel object for the site
+ def test_retrieve_alarm_panel_object(self):
+ # Arrange
+ site_id = "12345"
+ site_name = "My ADT Pulse Site"
+ site_properties = ADTPulseSiteProperties(site_id, site_name)
+
+ # Act
+ alarm_panel = site_properties.alarm_control_panel
+
+ # Assert
+ assert isinstance(alarm_panel, ADTPulseAlarmPanel)
+
+ # Retrieve gateway device object
+ def test_retrieve_gateway_device_object(self):
+ # Arrange
+ site_id = "12345"
+ site_name = "My ADT Pulse Site"
+ site_properties = ADTPulseSiteProperties(site_id, site_name)
+
+ # Act
+ gateway = site_properties.gateway
+
+ # Assert
+ assert isinstance(gateway, ADTPulseGateway)
+
+ # No zones exist
+ def test_no_zones_exist(self):
+ # Arrange
+ site_id = "12345"
+ site_name = "My ADT Pulse Site"
+ site_properties = ADTPulseSiteProperties(site_id, site_name)
+
+ # Act & Assert
+ with pytest.raises(RuntimeError):
+ site_properties.zones
+
+ # Attempting to retrieve site data while another thread is modifying it
+ def test_retrieve_site_data_while_modifying(self, mocker):
+ # Arrange
+ site_id = "12345"
+ site_name = "My ADT Pulse Site"
+ site_properties = ADTPulseSiteProperties(site_id, site_name)
+
+ def modify_site_data():
+ with site_properties.site_lock:
+ time.sleep(2)
+ site_properties._last_updated = int(time())
+
+ mocker.patch.object(site_properties, "_last_updated", 0)
+ mocker.patch.object(site_properties, "_site_lock", RLock())
+
+ # Act
+ with site_properties.site_lock:
+ retrieved_last_updated = site_properties.last_updated
+
+ # Assert
+ assert retrieved_last_updated == 0
+
+ # Attempting to set alarm status to existing status
+ def test_set_alarm_status_to_existing_status(self, mocker):
+ # Arrange
+ site_id = "12345"
+ site_name = "My ADT Pulse Site"
+ site_properties = ADTPulseSiteProperties(site_id, site_name)
+
+ mocker.patch.object(site_properties._alarm_panel, "_status", "Armed Away")
+
+ # Check if updates exist
+ def test_check_updates_exist(self, mocker):
+ # Arrange
+ from time import time
+
+ site_properties = ADTPulseSiteProperties("12345", "My ADT Pulse Site")
+ mocker.patch.object(site_properties, "_last_updated", return_value=time())
+
+ # Act
+ result = site_properties.updates_may_exist
+
+ # Assert
+ assert result is False
+
+ # Update site/zone data async with current data
+ @pytest.mark.asyncio
+ async def test_update_site_zone_data_async(self, mocker):
+ # Arrange
+ site_id = "12345"
+ site_name = "My ADT Pulse Site"
+ site_properties = ADTPulseSiteProperties(site_id, site_name)
+ mock_zones = mocker.Mock()
+ mock_zones.flatten.return_value = [ADTPulseFlattendZone()]
+ site_properties._zones = mock_zones
+
+ # Act
+ result = await site_properties.async_update()
+
+ # Assert
+ assert result == False
+
+ # Cannot set alarm status from one state to another
+ @pytest.mark.asyncio
+ async def test_cannot_set_alarm_status(self, mocker):
+ # Arrange
+ site_id = "12345"
+ site_name = "My ADT Pulse Site"
+ site_properties = ADTPulseSiteProperties(site_id, site_name)
+ cp = PulseConnectionProperties(DEFAULT_API_HOST)
+ cs = PulseConnectionStatus()
+ pa = PulseAuthenticationProperties(
+ "test@example.com", "testpassword", "testfingerprint"
+ )
+
+ connection = PulseConnection(cs, cp, pa)
+
+ # Act
+ result = await site_properties._alarm_panel._arm(
+ connection, "Armed Home", False
+ )
+
+ # Assert
+ assert result == False
+
+ # Failed updating ADT Pulse alarm to new mode
+ @pytest.mark.asyncio
+ async def test_failed_updating_alarm_mode(self, mocker):
+ # Arrange
+ site_id = "12345"
+ site_name = "My ADT Pulse Site"
+ site_properties = ADTPulseSiteProperties(site_id, site_name)
+
+ # Mock the _arm method to return False
+ async def mock_arm(*args, **kwargs):
+ return False
+
+ mocker.patch.object(ADTPulseAlarmPanel, "_arm", side_effect=mock_arm)
+
+ # Act
+ result = await site_properties.alarm_control_panel._arm(None, "new_mode", False)
+
+ # Assert
+ assert result == False
+
+ # Retrieve last update time with invalid input
+ def test_retrieve_last_update_invalid_input(self):
+ # Arrange
+ site_id = "12345"
+ site_name = "My ADT Pulse Site"
+ site_properties = ADTPulseSiteProperties(site_id, site_name)
+
+ # Act
+ last_updated = site_properties.last_updated
+
+ # Assert
+ assert last_updated == 0
+
+ # Retrieve site id and name with invalid input
+ def test_retrieve_site_id_and_name_with_invalid_input(self):
+ # Arrange
+ site_id = "12345"
+ site_name = "My ADT Pulse Site"
+ site_properties = ADTPulseSiteProperties(site_id, site_name)
+
+ # Act
+ retrieved_id = site_properties.id
+ retrieved_name = site_properties.name
+
+ # Assert
+ assert retrieved_id == site_id
+ assert retrieved_name == site_name
+
+ # Retrieve zone information in dictionary form with invalid input
+ def test_retrieve_zone_info_invalid_input(self, mocker):
+ # Arrange
+ site_id = "12345"
+ site_name = "My ADT Pulse Site"
+ site_properties = ADTPulseSiteProperties(site_id, site_name)
+ mocker.patch.object(site_properties, "_zones", None)
+
+ # Act and Assert
+ with pytest.raises(RuntimeError):
+ site_properties.zones
+
+ with pytest.raises(RuntimeError):
+ site_properties.zones_as_dict
+
+ # Retrieve all zones registered with ADT Pulse account with invalid input
+ def test_retrieve_zones_with_invalid_input(self, mocker):
+ # Arrange
+ site_id = "12345"
+ site_name = "My ADT Pulse Site"
+ site_properties = ADTPulseSiteProperties(site_id, site_name)
+ mocker.patch.object(site_properties, "_zones", None)
+
+ # Act and Assert
+ with pytest.raises(RuntimeError):
+ _ = site_properties.zones
+
+ with pytest.raises(RuntimeError):
+ _ = site_properties.zones_as_dict
+
+ # Retrieve alarm panel object for the site with invalid input
+ def test_retrieve_alarm_panel_invalid_input(self, mocker):
+ # Arrange
+ site_id = "12345"
+ site_name = "My ADT Pulse Site"
+ site_properties = ADTPulseSiteProperties(site_id, site_name)
+
+ # Mock the ADTPulseAlarmPanel object
+ mock_alarm_panel = mocker.Mock()
+ site_properties._alarm_panel = mock_alarm_panel
+
+ # Act
+ retrieved_alarm_panel = site_properties.alarm_control_panel
+
+ # Assert
+ assert retrieved_alarm_panel == mock_alarm_panel
diff --git a/tests/test_zones.py b/tests/test_zones.py
new file mode 100644
index 0000000..b7e19db
--- /dev/null
+++ b/tests/test_zones.py
@@ -0,0 +1,1221 @@
+# Generated by CodiumAI
+from datetime import datetime
+
+import pytest
+from typeguard import TypeCheckError
+
+from pyadtpulse.zones import (
+ ADT_NAME_TO_DEFAULT_TAGS,
+ ADTPulseFlattendZone,
+ ADTPulseZoneData,
+ ADTPulseZones,
+)
+
+
+class TestADTPulseZoneData:
+ # Creating an instance of ADTPulseZoneData with required parameters should succeed.
+ def test_create_instance_with_required_parameters(self):
+ """
+ Test that creating an instance of ADTPulseZoneData with required parameters succeeds.
+ """
+ # Arrange
+ name = "Zone 1"
+ id_ = "zone1"
+
+ # Act
+ zone_data = ADTPulseZoneData(name, id_)
+
+ # Assert
+ assert zone_data.name == name
+ assert zone_data.id_ == id_
+ assert zone_data.tags == ADT_NAME_TO_DEFAULT_TAGS["Window"]
+ assert zone_data.status == "Unknown"
+ assert zone_data.state == "Unknown"
+ assert zone_data.last_activity_timestamp == 0
+
+ # Setting the last_activity_timestamp with a value greater than or equal to 1420070400 should succeed.
+ def test_set_last_activity_timestamp_greater_than_or_equal_to_1420070400(self):
+ """
+ Test that setting the last_activity_timestamp with a value greater than or equal to 1420070400 succeeds.
+ """
+ # Arrange
+ zone_data = ADTPulseZoneData("Zone 1", "zone1")
+ timestamp = 1420070400
+
+ # Act
+ zone_data.last_activity_timestamp = timestamp
+
+ # Assert
+ assert zone_data.last_activity_timestamp == timestamp
+
+ # Setting the tags with a valid value should succeed.
+ def test_set_tags_with_valid_value(self):
+ """
+ Test that setting the tags with a valid value succeeds.
+ """
+ # Arrange
+ zone_data = ADTPulseZoneData("Zone 1", "zone1")
+ tags = ("sensor", "doorWindow")
+
+ # Act
+ zone_data.tags = tags
+
+ # Assert
+ assert zone_data.tags == tags
+
+ # Getting the last_activity_timestamp should return the correct value.
+ def test_get_last_activity_timestamp(self):
+ """
+ Test that getting the last_activity_timestamp returns the correct value.
+ """
+ # Arrange
+ timestamp = 1420070400
+ zone_data = ADTPulseZoneData("Zone 1", "zone1")
+ zone_data.last_activity_timestamp = timestamp
+
+ # Act
+ result = zone_data.last_activity_timestamp
+
+ # Assert
+ assert result == timestamp
+
+ # Getting the tags should return the correct value.
+ def test_get_tags_fixed(self):
+ """
+ Test that getting the tags returns the correct value.
+ """
+ # Arrange
+ tags = ("sensor", "doorWindow")
+ zone_data = ADTPulseZoneData("Zone 1", "zone1")
+ zone_data.tags = tags
+
+ # Act
+ result = zone_data.tags
+
+ # Assert
+ assert result == tags
+
+ # ADT_NAME_TO_DEFAULT_TAGS should be a valid dictionary.
+ def test_ADT_NAME_TO_DEFAULT_TAGS_is_valid_dictionary(self):
+ """
+ Test that ADT_NAME_TO_DEFAULT_TAGS is a valid dictionary.
+ """
+ # Arrange
+
+ # Act
+
+ # Assert
+ assert isinstance(ADT_NAME_TO_DEFAULT_TAGS, dict)
+
+ # Creating an instance of ADTPulseZoneData without required parameters should fail.
+ def test_create_instance_without_required_parameters(self):
+ """
+ Test that creating an instance of ADTPulseZoneData without required parameters fails.
+ """
+ # Arrange
+
+ # Act and Assert
+ with pytest.raises(TypeError):
+ ADTPulseZoneData()
+
+ # Setting the last_activity_timestamp with a value less than 1420070400 should raise a ValueError.
+ def test_set_last_activity_timestamp_less_than_1420070400(self):
+ """
+ Test that setting the last_activity_timestamp with a value less than 1420070400 raises a ValueError.
+ """
+ # Arrange
+ zone_data = ADTPulseZoneData("Zone 1", "zone1")
+ timestamp = 1419999999
+
+ # Act and Assert
+ with pytest.raises(ValueError):
+ zone_data.last_activity_timestamp = timestamp
+
+ # Setting the tags with an invalid value should raise a ValueError.
+ def test_set_tags_with_invalid_value(self):
+ """
+ Test that setting the tags with an invalid value raises a ValueError.
+ """
+ # Arrange
+ zone_data = ADTPulseZoneData("Zone 1", "zone1")
+ tags = ("InvalidSensor", "InvalidType")
+
+ # Act and Assert
+ with pytest.raises(ValueError):
+ zone_data.tags = tags
+
+ # Getting the name should return the correct value.
+ def test_get_name(self):
+ """
+ Test that getting the name returns the correct value.
+ """
+ # Arrange
+ name = "Zone 1"
+ zone_data = ADTPulseZoneData(name, "zone1")
+
+ # Act
+ result = zone_data.name
+
+ # Assert
+ assert result == name
+
+ # Getting the id_ should return the correct value.
+ def test_get_id(self):
+ """
+ Test that getting the id_ returns the correct value.
+ """
+ # Arrange
+ id_ = "zone1"
+ zone_data = ADTPulseZoneData("Zone 1", id_)
+
+ # Act
+ result = zone_data.id_
+
+ # Assert
+ assert result == id_
+
+ # Setting the status with a valid value should succeed.
+ def test_set_status_with_valid_value(self):
+ """
+ Test that setting the status with a valid value succeeds.
+ """
+ # Arrange
+ zone_data = ADTPulseZoneData("Zone 1", "zone1")
+ status = "Online"
+
+ # Act
+ zone_data.status = status
+
+ # Assert
+ assert zone_data.status == status
+
+ # Setting the state with a valid value should succeed.
+ def test_setting_state_with_valid_value(self):
+ """
+ Test that setting the state with a valid value succeeds.
+ """
+ # Arrange
+ name = "Zone 1"
+ id_ = "zone1"
+ state = "Opened"
+
+ # Act
+ zone_data = ADTPulseZoneData(name, id_)
+ zone_data.state = state
+
+ # Assert
+ assert zone_data.state == state
+
+ # Getting the status should return the correct value.
+ def test_getting_status(self):
+ """
+ Test that getting the status returns the correct value.
+ """
+ # Arrange
+ name = "Zone 1"
+ id_ = "zone1"
+ status = "Online"
+
+ # Act
+ zone_data = ADTPulseZoneData(name, id_)
+ zone_data.status = status
+
+ # Assert
+ assert zone_data.status == status
+
+ # Getting the state should return the correct value.
+ def test_getting_state_returns_correct_value(self):
+ """
+ Test that getting the state returns the correct value.
+ """
+ # Arrange
+ name = "Zone 1"
+ id_ = "zone1"
+ state = "Opened"
+
+ zone_data = ADTPulseZoneData(name, id_)
+ zone_data.state = state
+
+ # Act
+ result = zone_data.state
+
+ # Assert
+ assert result == state
+
+
+class TestADTPulseFlattendZone:
+ # Creating a new instance of ADTPulseFlattendZone with valid parameters should successfully create an object with the correct attributes.
+ def test_valid_parameters(self):
+ """
+ Test that creating a new instance of ADTPulseFlattendZone with valid parameters successfully creates an object with the correct attributes.
+ """
+ # Arrange
+ zone = 1
+ name = "Zone 1"
+ id_ = "zone1"
+ tags = ("sensor1", "type1")
+ status = "Online"
+ state = "Opened"
+ last_activity_timestamp = 1234567890
+
+ # Act
+ zone_obj = ADTPulseFlattendZone(
+ zone=zone,
+ name=name,
+ id_=id_,
+ tags=tags,
+ status=status,
+ state=state,
+ last_activity_timestamp=last_activity_timestamp,
+ )
+
+ # Assert
+ assert zone_obj["zone"] == zone
+ assert zone_obj["name"] == name
+ assert zone_obj["id_"] == id_
+ assert zone_obj["tags"] == tags
+ assert zone_obj["status"] == status
+ assert zone_obj["state"] == state
+ assert zone_obj["last_activity_timestamp"] == last_activity_timestamp
+
+ # Accessing any attribute of an instance of ADTPulseFlattendZone should return the expected value.
+ def test_access_attributes(self):
+ """
+ Test that accessing any attribute of an instance of ADTPulseFlattendZone returns the expected value.
+ """
+ # Arrange
+ zone = 1
+ name = "Zone 1"
+ id_ = "zone1"
+ tags = ("sensor1", "type1")
+ status = "Online"
+ state = "Opened"
+ last_activity_timestamp = 1234567890
+ zone_obj = ADTPulseFlattendZone(
+ zone=zone,
+ name=name,
+ id_=id_,
+ tags=tags,
+ status=status,
+ state=state,
+ last_activity_timestamp=last_activity_timestamp,
+ )
+
+ # Act & Assert
+ assert zone_obj["zone"] == zone
+ assert zone_obj["name"] == name
+ assert zone_obj["id_"] == id_
+ assert zone_obj["tags"] == tags
+ assert zone_obj["status"] == status
+ assert zone_obj["state"] == state
+ assert zone_obj["last_activity_timestamp"] == last_activity_timestamp
+
+ # Modifying any attribute of an instance of ADTPulseFlattendZone should successfully update the attribute with the new value.
+ def test_modify_attributes_fixed(self):
+ """
+ Test that modifying any attribute of an instance of ADTPulseFlattendZone successfully updates the attribute with the new value.
+ """
+ # Arrange
+ zone = 1
+ name = "Zone 1"
+ id_ = "zone1"
+ tags = ("sensor1", "type1")
+ status = "Online"
+ state = "Opened"
+ last_activity_timestamp = 1234567890
+ zone_obj = ADTPulseFlattendZone(
+ zone=zone,
+ name=name,
+ id_=id_,
+ tags=tags,
+ status=status,
+ state=state,
+ last_activity_timestamp=last_activity_timestamp,
+ )
+
+ # Act
+ new_zone = 2
+ new_name = "Zone 2"
+ new_id = "zone2"
+ new_tags = ("sensor2", "type2")
+ new_status = "Offline"
+ new_state = "Closed"
+ new_last_activity_timestamp = 9876543210
+
+ zone_obj["zone"] = new_zone
+ zone_obj["name"] = new_name
+ zone_obj["id_"] = new_id
+ zone_obj["tags"] = new_tags
+ zone_obj["status"] = new_status
+ zone_obj["state"] = new_state
+ zone_obj["last_activity_timestamp"] = new_last_activity_timestamp
+
+ # Assert
+ assert zone_obj["zone"] == new_zone
+ assert zone_obj["name"] == new_name
+ assert zone_obj["id_"] == new_id
+ assert zone_obj["tags"] == new_tags
+ assert zone_obj["status"] == new_status
+ assert zone_obj["state"] == new_state
+ assert zone_obj["last_activity_timestamp"] == new_last_activity_timestamp
+
+ # Creating a new instance of ADTPulseFlattendZone with a non-integer value for 'zone' should not raise a TypeError.
+ def test_non_integer_zone(self):
+ """
+ Test that creating a new instance of ADTPulseFlattendZone with a non-integer value for 'zone' does not raise a TypeError.
+ """
+ # Arrange
+ zone = "1"
+ name = "Zone 1"
+ id_ = "zone1"
+ tags = ("sensor1", "type1")
+ status = "Online"
+ state = "Opened"
+ last_activity_timestamp = 1234567890
+
+ # Act & Assert
+ ADTPulseFlattendZone(
+ zone=zone,
+ name=name,
+ id_=id_,
+ tags=tags,
+ status=status,
+ state=state,
+ last_activity_timestamp=last_activity_timestamp,
+ )
+
+ # Creating a new instance of ADTPulseFlattendZone with an empty string for 'name' should not raise a ValueError.
+ def test_empty_name(self):
+ """
+ Test that creating a new instance of ADTPulseFlattendZone with an empty string for 'name' does not raise a ValueError.
+ """
+ # Arrange
+ zone = 1
+ name = ""
+ id_ = "zone1"
+ tags = ("sensor1", "type1")
+ status = "Online"
+ state = "Opened"
+ last_activity_timestamp = 1234567890
+
+ # Act & Assert
+ ADTPulseFlattendZone(
+ zone=zone,
+ name=name,
+ id_=id_,
+ tags=tags,
+ status=status,
+ state=state,
+ last_activity_timestamp=last_activity_timestamp,
+ )
+
+ # Creating a new instance of ADTPulseFlattendZone with an empty string for 'id_' should not raise a ValueError.
+ def test_empty_id_fixed(self):
+ """
+ Test that creating a new instance of ADTPulseFlattendZone with an empty string for 'id_' does not raise a ValueError.
+ """
+ # Arrange
+ zone = 1
+ name = "Zone 1"
+ id_ = ""
+ tags = ("sensor1", "type1")
+ status = "Online"
+ state = "Opened"
+ last_activity_timestamp = 1234567890
+
+ # Act
+ ADTPulseFlattendZone(
+ zone=zone,
+ name=name,
+ id_=id_,
+ tags=tags,
+ status=status,
+ state=state,
+ last_activity_timestamp=last_activity_timestamp,
+ )
+
+ # Assert
+ assert True
+
+ # Creating a new instance of ADTPulseFlattendZone with a tuple that contains non-string values for 'tags' should not raise a TypeError.
+ def test_non_string_tags(self):
+ """
+ Test that creating a new instance of ADTPulseFlattendZone with a tuple that contains non-string values for 'tags' does not raise a TypeError.
+ """
+ # Arrange
+ zone = 1
+ name = "Zone 1"
+ id_ = "zone1"
+ tags = ("sensor1", 2)
+ status = "Online"
+ state = "Opened"
+ last_activity_timestamp = 1234567890
+
+ # Act & Assert
+ ADTPulseFlattendZone(
+ zone=zone,
+ name=name,
+ id_=id_,
+ tags=tags,
+ status=status,
+ state=state,
+ last_activity_timestamp=last_activity_timestamp,
+ )
+
+ # Creating a new instance of ADTPulseFlattendZone with a non-string value for 'status' should not raise a TypeError.
+ def test_non_string_status(self):
+ """
+ Test that creating a new instance of ADTPulseFlattendZone with a non-string value for 'status' does not raise a TypeError.
+ """
+ # Arrange
+ zone = 1
+ name = "Zone 1"
+ id_ = "zone1"
+ tags = ("sensor1", "type1")
+ status = 1
+ state = "Opened"
+ last_activity_timestamp = 1234567890
+
+ # Act & Assert
+ ADTPulseFlattendZone(
+ zone=zone,
+ name=name,
+ id_=id_,
+ tags=tags,
+ status=status,
+ state=state,
+ last_activity_timestamp=last_activity_timestamp,
+ )
+
+ # Creating a new instance of ADTPulseFlattendZone with a non-string value for 'state' should not raise a TypeError.
+ def test_non_string_state(self):
+ """
+ Test that creating a new instance of ADTPulseFlattendZone with a non-string value for 'state' does not raise a TypeError.
+ """
+ # Arrange
+ zone = 1
+ name = "Zone 1"
+ id_ = "zone1"
+ tags = ("sensor1", "type1")
+ status = "Online"
+ state = 1
+ last_activity_timestamp = 1234567890
+
+ # Act & Assert
+ ADTPulseFlattendZone(
+ zone=zone,
+ name=name,
+ id_=id_,
+ tags=tags,
+ status=status,
+ state=state,
+ last_activity_timestamp=last_activity_timestamp,
+ )
+
+ # Creating a new instance of ADTPulseFlattendZone with a non-integer value for 'last_activity_timestamp' should not raise a TypeError.
+ def test_non_integer_last_activity_timestamp(self):
+ """
+ Test that creating a new instance of ADTPulseFlattendZone with a non-integer value for 'last_activity_timestamp' does not raise a TypeError.
+ """
+ # Arrange
+ zone = 1
+ name = "Zone 1"
+ id_ = "zone1"
+ tags = ("sensor1", "type1")
+ status = "Online"
+ state = "Opened"
+ last_activity_timestamp = "1234567890"
+
+ # Act & Assert
+ ADTPulseFlattendZone(
+ zone=zone,
+ name=name,
+ id_=id_,
+ tags=tags,
+ status=status,
+ state=state,
+ last_activity_timestamp=last_activity_timestamp,
+ )
+
+ # Creating a new instance of ADTPulseFlattendZone with a very large integer value for 'zone' should successfully create an object with the correct attributes.
+ def test_large_zone_fixed(self):
+ """
+ Test that creating a new instance of ADTPulseFlattendZone with a very large integer value for 'zone' successfully creates an object with the correct attributes.
+ """
+ # Arrange
+ zone = 9999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999
+ name = "Zone 1"
+ id_ = "zone1"
+ tags = ("sensor1", "type1")
+ status = "Online"
+ state = "Opened"
+ last_activity_timestamp = 1234567890
+
+ # Act
+ zone_obj = ADTPulseFlattendZone(
+ zone=zone,
+ name=name,
+ id_=id_,
+ tags=tags,
+ status=status,
+ state=state,
+ last_activity_timestamp=last_activity_timestamp,
+ )
+
+ # Assert
+ assert zone_obj["zone"] == zone
+ assert zone_obj["name"] == name
+ assert zone_obj["id_"] == id_
+ assert zone_obj["tags"] == tags
+ assert zone_obj["status"] == status
+ assert zone_obj["state"] == state
+ assert zone_obj["last_activity_timestamp"] == last_activity_timestamp
+
+ # Creating a new instance of ADTPulseFlattendZone with a very long string for 'name' should successfully create an object with the correct attributes.
+ def test_long_name_fixed(self):
+ """
+ Test that creating a new instance of ADTPulseFlattendZone with a very long string for 'name' successfully creates an object with the correct attributes.
+ """
+ # Arrange
+ zone = 1
+ name = "This is a very long name that exceeds the maximum length allowed for the 'name' attribute in ADTPulseFlattendZone"
+ id_ = "zone1"
+ tags = ("sensor1", "type1")
+ status = "Online"
+ state = "Opened"
+ last_activity_timestamp = 1234567890
+
+ # Act
+ zone_obj = ADTPulseFlattendZone(
+ zone=zone,
+ name=name,
+ id_=id_,
+ tags=tags,
+ status=status,
+ state=state,
+ last_activity_timestamp=last_activity_timestamp,
+ )
+
+ # Assert
+ assert zone_obj["zone"] == zone
+ assert zone_obj["name"] == name
+ assert zone_obj["id_"] == id_
+ assert zone_obj["tags"] == tags
+ assert zone_obj["status"] == status
+ assert zone_obj["state"] == state
+ assert zone_obj["last_activity_timestamp"] == last_activity_timestamp
+
+ # Creating a new instance of ADTPulseFlattendZone with a very long string for 'id_' should successfully create an object with the correct attributes.
+ def test_long_id_fixed(self):
+ """
+ Test that creating a new instance of ADTPulseFlattendZone with a very long string for 'id_' successfully creates an object with the correct attributes.
+ """
+ # Arrange
+ zone = 1
+ name = "Zone 1"
+ id_ = "a" * 1000 # Very long string for 'id_'
+ tags = ("sensor1", "type1")
+ status = "Online"
+ state = "Opened"
+ last_activity_timestamp = 1234567890
+
+ # Act
+ zone_obj = ADTPulseFlattendZone(
+ zone=zone,
+ name=name,
+ id_=id_,
+ tags=tags,
+ status=status,
+ state=state,
+ last_activity_timestamp=last_activity_timestamp,
+ )
+
+ # Assert
+ assert zone_obj["zone"] == zone
+ assert zone_obj["name"] == name
+ assert zone_obj["id_"] == id_
+ assert zone_obj["tags"] == tags
+ assert zone_obj["status"] == status
+ assert zone_obj["state"] == state
+ assert zone_obj["last_activity_timestamp"] == last_activity_timestamp
+
+ # Creating a new instance of ADTPulseFlattendZone with a tuple that contains multiple strings for 'tags' should successfully create an object with the correct attributes.
+ def test_create_instance_with_multiple_tags_fixed(self):
+ """
+ Test that creating a new instance of ADTPulseFlattendZone with a tuple that contains multiple strings for 'tags' successfully creates an object with the correct attributes.
+ """
+ # Arrange
+ zone = 1
+ name = "Zone 1"
+ id_ = "zone1"
+ tags = ("sensor1", "type1", "sensor2", "type2")
+ status = "Online"
+ state = "Opened"
+ last_activity_timestamp = 1234567890
+
+ # Act
+ zone_obj = ADTPulseFlattendZone(
+ zone=zone,
+ name=name,
+ id_=id_,
+ tags=tags,
+ status=status,
+ state=state,
+ last_activity_timestamp=last_activity_timestamp,
+ )
+
+ # Assert
+ assert zone_obj["zone"] == zone
+ assert zone_obj["name"] == name
+ assert zone_obj["id_"] == id_
+ assert zone_obj["tags"] == tags
+ assert zone_obj["status"] == status
+ assert zone_obj["state"] == state
+ assert zone_obj["last_activity_timestamp"] == last_activity_timestamp
+
+ # Creating a new instance of ADTPulseFlattendZone with a very long string for 'status' should successfully create an object with the correct attributes.
+ def test_long_status_string_fixed(self):
+ """
+ Test that creating a new instance of ADTPulseFlattendZone with a very long string for 'status' successfully creates an object with the correct attributes.
+ """
+ # Arrange
+ zone = 1
+ name = "Zone 1"
+ id_ = "zone1"
+ tags = ("sensor1", "type1")
+ status = "Very long status string" * 1000
+ state = "Opened"
+ last_activity_timestamp = 1234567890
+
+ # Act
+ zone_obj = ADTPulseFlattendZone(
+ zone=zone,
+ name=name,
+ id_=id_,
+ tags=tags,
+ status=status,
+ state=state,
+ last_activity_timestamp=last_activity_timestamp,
+ )
+
+ # Assert
+ assert zone_obj["zone"] == zone
+ assert zone_obj["name"] == name
+ assert zone_obj["id_"] == id_
+ assert zone_obj["tags"] == tags
+ assert zone_obj["status"] == status
+ assert zone_obj["state"] == state
+ assert zone_obj["last_activity_timestamp"] == last_activity_timestamp
+
+ # Creating a new instance of ADTPulseFlattendZone with a very long string for 'state' should successfully create an object with the correct attributes.
+ def test_long_state_string_fixed(self):
+ """
+ Test that creating a new instance of ADTPulseFlattendZone with a very long string for 'state' successfully creates an object with the correct attributes.
+ """
+ # Arrange
+ zone = 1
+ name = "Zone 1"
+ id_ = "zone1"
+ tags = ("sensor1", "type1")
+ status = "Online"
+ state = "a" * 1000 # Very long string for 'state'
+ last_activity_timestamp = 1234567890
+
+ # Act
+ zone_obj = ADTPulseFlattendZone(
+ zone=zone,
+ name=name,
+ id_=id_,
+ tags=tags,
+ status=status,
+ state=state,
+ last_activity_timestamp=last_activity_timestamp,
+ )
+
+ # Assert
+ assert zone_obj["zone"] == zone
+ assert zone_obj["name"] == name
+ assert zone_obj["id_"] == id_
+ assert zone_obj["tags"] == tags
+ assert zone_obj["status"] == status
+ assert zone_obj["state"] == state
+ assert zone_obj["last_activity_timestamp"] == last_activity_timestamp
+
+ # Creating a new instance of ADTPulseFlattendZone with a very large integer value for 'last_activity_timestamp' should successfully create an object with the correct attributes.
+ def test_large_last_activity_timestamp_fixed(self):
+ """
+ Test that creating a new instance of ADTPulseFlattendZone with a very large integer value for 'last_activity_timestamp' successfully creates an object with the correct attributes.
+ """
+ # Arrange
+ zone = 1
+ name = "Zone 1"
+ id_ = "zone1"
+ tags = ("sensor1", "type1")
+ status = "Online"
+ state = "Opened"
+ last_activity_timestamp = 999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999
+
+ # Act
+ zone_obj = ADTPulseFlattendZone(
+ zone=zone,
+ name=name,
+ id_=id_,
+ tags=tags,
+ status=status,
+ state=state,
+ last_activity_timestamp=last_activity_timestamp,
+ )
+
+ # Assert
+ assert zone_obj["zone"] == zone
+ assert zone_obj["name"] == name
+ assert zone_obj["id_"] == id_
+ assert zone_obj["tags"] == tags
+ assert zone_obj["status"] == status
+ assert zone_obj["state"] == state
+ assert zone_obj["last_activity_timestamp"] == last_activity_timestamp
+
+
+class TestADTPulseZones:
+ # ADTPulseZones can be initialized with a dictionary containing ADTPulseZoneData with zone as the key
+ def test_initialized_with_dictionary(self):
+ """
+ Test that ADTPulseZones can be initialized with a dictionary containing ADTPulseZoneData with zone as the key
+ """
+ # Arrange
+ data = {
+ 1: ADTPulseZoneData("Zone 1", "sensor-1"),
+ 2: ADTPulseZoneData("Zone 2", "sensor-2"),
+ 3: ADTPulseZoneData("Zone 3", "sensor-3"),
+ }
+
+ # Act
+ zones = ADTPulseZones(data)
+
+ # Assert
+ assert len(zones) == 3
+ assert zones[1].name == "Zone 1"
+ assert zones[2].name == "Zone 2"
+ assert zones[3].name == "Zone 3"
+
+ # ADTPulseZones can get a Zone by its id
+ def test_get_zone_by_id(self):
+ """
+ Test that ADTPulseZones can get a Zone by its id
+ """
+ # Arrange
+ zones = ADTPulseZones(
+ {
+ 1: ADTPulseZoneData("Zone 1", "sensor-1"),
+ 2: ADTPulseZoneData("Zone 2", "sensor-2"),
+ 3: ADTPulseZoneData("Zone 3", "sensor-3"),
+ }
+ )
+
+ # Act
+ zone_1 = zones[1]
+ zone_2 = zones[2]
+ zone_3 = zones[3]
+
+ # Assert
+ assert zone_1.name == "Zone 1"
+ assert zone_2.name == "Zone 2"
+ assert zone_3.name == "Zone 3"
+
+ # ADTPulseZones can set a Zone by its id
+ def test_set_zone_by_id(self):
+ """
+ Test that ADTPulseZones can set a Zone by its id
+ """
+ # Arrange
+ zones = ADTPulseZones()
+
+ # Act
+ zones[1] = ADTPulseZoneData("Zone 1", "sensor-1")
+ zones[2] = ADTPulseZoneData("Zone 2", "sensor-2")
+ zones[3] = ADTPulseZoneData("Zone 3", "sensor-3")
+
+ # Assert
+ assert len(zones) == 3
+ assert zones[1].name == "Zone 1"
+ assert zones[2].name == "Zone 2"
+ assert zones[3].name == "Zone 3"
+
+ # ADTPulseZones can update zone status by its id
+ def test_update_zone_status(self):
+ """
+ Test that ADTPulseZones can update zone status by its id
+ """
+ # Arrange
+ zones = ADTPulseZones(
+ {
+ 1: ADTPulseZoneData("Zone 1", "sensor-1"),
+ 2: ADTPulseZoneData("Zone 2", "sensor-2"),
+ 3: ADTPulseZoneData("Zone 3", "sensor-3"),
+ }
+ )
+
+ # Act
+ zones.update_status(1, "Online")
+ zones.update_status(2, "Low Battery")
+ zones.update_status(3, "Offline")
+
+ # Assert
+ assert zones[1].status == "Online"
+ assert zones[2].status == "Low Battery"
+ assert zones[3].status == "Offline"
+
+ # ADTPulseZones can update zone state by its id
+ def test_update_zone_state(self):
+ """
+ Test that ADTPulseZones can update zone state by its id
+ """
+ # Arrange
+ zones = ADTPulseZones(
+ {
+ 1: ADTPulseZoneData("Zone 1", "sensor-1"),
+ 2: ADTPulseZoneData("Zone 2", "sensor-2"),
+ 3: ADTPulseZoneData("Zone 3", "sensor-3"),
+ }
+ )
+
+ # Act
+ zones.update_state(1, "Opened")
+ zones.update_state(2, "Closed")
+ zones.update_state(3, "Unknown")
+
+ # Assert
+ assert zones[1].state == "Opened"
+ assert zones[2].state == "Closed"
+ assert zones[3].state == "Unknown"
+
+ # ADTPulseZones can update last activity timestamp by its id
+ def test_update_last_activity_timestamp(self):
+ """
+ Test that ADTPulseZones can update last activity timestamp by its id
+ """
+ # Arrange
+ zones = ADTPulseZones(
+ {
+ 1: ADTPulseZoneData("Zone 1", "sensor-1"),
+ 2: ADTPulseZoneData("Zone 2", "sensor-2"),
+ 3: ADTPulseZoneData("Zone 3", "sensor-3"),
+ }
+ )
+
+ # Act
+ dt_1 = datetime(2022, 1, 1, 12, 0, 0)
+ dt_2 = datetime(2022, 1, 2, 12, 0, 0)
+ dt_3 = datetime(2022, 1, 3, 12, 0, 0)
+
+ zones.update_last_activity_timestamp(1, dt_1)
+ zones.update_last_activity_timestamp(2, dt_2)
+ zones.update_last_activity_timestamp(3, dt_3)
+
+ # Assert
+ assert zones[1].last_activity_timestamp == int(dt_1.timestamp())
+ assert zones[2].last_activity_timestamp == int(dt_2.timestamp())
+ assert zones[3].last_activity_timestamp == int(dt_3.timestamp())
+
+ # ADTPulseZones can update device info by its id
+ def test_update_device_info_by_id(self):
+ """
+ Test that ADTPulseZones can update device info by its id
+ """
+ # Arrange
+ zones = ADTPulseZones()
+ zones[1] = ADTPulseZoneData("Zone 1", "sensor-1")
+
+ # Act
+ zones.update_device_info(1, "Opened", "Low Battery")
+
+ # Assert
+ assert zones[1].state == "Opened"
+ assert zones[1].status == "Low Battery"
+
+ # ADTPulseZones can update zone attributes with a dictionary containing zone attributes
+ def test_update_zone_attributes_with_dictionary(self):
+ """
+ Test that ADTPulseZones can update zone attributes with a dictionary containing zone attributes
+ """
+ # Arrange
+ zones = ADTPulseZones()
+ dev_attr = {
+ "name": "Zone 1",
+ "type_model": "Window Sensor",
+ "zone": "1",
+ "status": "Online",
+ }
+
+ # Act
+ zones.update_zone_attributes(dev_attr)
+
+ # Assert
+ assert len(zones) == 1
+ assert zones[1].name == "Zone 1"
+ assert zones[1].id_ == "sensor-1"
+ assert zones[1].tags == ADT_NAME_TO_DEFAULT_TAGS["Window"]
+ assert zones[1].status == "Online"
+ assert zones[1].state == "Unknown"
+ assert zones[1].last_activity_timestamp == 0
+
+ # ADTPulseZones raises a KeyError if the key is not an int when getting or setting a Zone
+ def test_key_not_int(self):
+ """
+ Test that ADTPulseZones raises a KeyError if the key is not an int when getting or setting a Zone
+ """
+ # Arrange
+ zones = ADTPulseZones()
+ valid_key = 1
+ invalid_key = "1"
+ value = ADTPulseZoneData("Zone 1", "sensor-1")
+
+ # Act
+ zones[valid_key] = value
+
+ # Assert
+ with pytest.raises(KeyError):
+ zones[invalid_key]
+
+ # ADTPulseZones can flatten its data into a list of ADTPulseFlattendZone
+ def test_flatten_method(self):
+ """
+ Test that ADTPulseZones can flatten its data into a list of ADTPulseFlattendZone
+ """
+ # Arrange
+ zones = ADTPulseZones()
+ zones[1] = ADTPulseZoneData("Zone 1", "sensor-1")
+ zones[2] = ADTPulseZoneData("Zone 2", "sensor-2")
+ zones[3] = ADTPulseZoneData("Zone 3", "sensor-3")
+
+ # Act
+ flattened_zones = zones.flatten()
+
+ # Assert
+ assert len(flattened_zones) == 3
+ assert flattened_zones[0]["zone"] == 1
+ assert flattened_zones[0]["name"] == "Zone 1"
+ assert flattened_zones[0]["id_"] == "sensor-1"
+ assert flattened_zones[1]["zone"] == 2
+ assert flattened_zones[1]["name"] == "Zone 2"
+ assert flattened_zones[1]["id_"] == "sensor-2"
+ assert flattened_zones[2]["zone"] == 3
+ assert flattened_zones[2]["name"] == "Zone 3"
+ assert flattened_zones[2]["id_"] == "sensor-3"
+
+ # ADTPulseZones raises a ValueError if the value is not ADTPulseZoneData when setting a Zone
+ def test_raises_value_error_if_value_not_adtpulsezonedata(self):
+ """
+ Test that ADTPulseZones raises a ValueError if the value is not ADTPulseZoneData when setting a Zone
+ """
+ # Arrange
+ zones = ADTPulseZones()
+
+ # Act and Assert
+ with pytest.raises(ValueError):
+ zones[1] = "Invalid Zone Data"
+
+ # ADTPulseZones raises a ValueError when setting a Zone with a non-ADTPulseZoneData value
+ def test_raises_value_error_when_setting_zone_with_non_adtpulsezonedata_value(self):
+ """
+ Test that ADTPulseZones raises a ValueError when setting a Zone with a non-ADTPulseZoneData value
+ """
+ # Arrange
+ zones = ADTPulseZones()
+ key = 1
+ value = "Not ADTPulseZoneData"
+
+ # Act & Assert
+ with pytest.raises(ValueError):
+ zones[key] = value
+
+ # ADTPulseZones raises a ValueError when setting a Zone with a string value
+ def test_raises_value_error_when_setting_zone_with_string_value(self):
+ """
+ Test that ADTPulseZones raises a ValueError when setting a Zone with a string value
+ """
+ # Arrange
+ zones = ADTPulseZones()
+
+ # Act and Assert
+ with pytest.raises(ValueError):
+ zones[1] = "Zone 1"
+
+ # ADTPulseZones raises a ValueError when setting a Zone with a list value
+ def test_raises_value_error_when_setting_zone_with_list_value(self):
+ """
+ Test that ADTPulseZones raises a ValueError when setting a Zone with a list value
+ """
+ # Arrange
+ zones = ADTPulseZones()
+ key = 1
+ value = [1, 2, 3]
+
+ # Act & Assert
+ with pytest.raises(ValueError):
+ zones[key] = value
+
+ # ADTPulseZones sets default values for ADTPulseZoneData.id_ and name if not set when setting a Zone
+ def test_default_values_for_id_and_name(self):
+ """
+ Test that ADTPulseZones sets default values for ADTPulseZoneData.id_ and name if not set when setting a Zone
+ """
+ # Arrange
+ zones = ADTPulseZones()
+
+ # Act
+ zones[1] = ADTPulseZoneData("", "")
+
+ # Assert
+ assert zones[1].id_ == "sensor-1"
+ assert zones[1].name == "Sensor for Zone 1"
+
+ # ADTPulseZones raises a ValueError if there is invalid Zone data in ADTPulseZones when flattening
+ def test_invalid_zone_data_in_flattening(self):
+ """
+ Test that ADTPulseZones raises a ValueError if there is invalid Zone data in ADTPulseZones when flattening
+ """
+ # Arrange
+ zones = ADTPulseZones()
+ zones[1] = ADTPulseZoneData("Zone 1", "sensor-1")
+ zones[2] = ADTPulseZoneData("Zone 2", "sensor-2")
+ zones[3] = ADTPulseZoneData("Zone 3", "sensor-3")
+ with pytest.raises(TypeCheckError):
+ zones[3].tags = "Invalid Tags"
+
+ # ADTPulseZones skips incomplete zone data when updating zone attributes
+ def test_skips_incomplete_zone_data(self):
+ """
+ Test that ADTPulseZones skips incomplete zone data when updating zone attributes
+ """
+ # Arrange
+ zones = ADTPulseZones()
+ dev_attr = {
+ "name": "Zone 1",
+ "type_model": "Window Sensor",
+ "zone": "1",
+ "status": "Online",
+ }
+
+ # Act
+ zones.update_zone_attributes(dev_attr)
+
+ # Assert
+ assert len(zones) == 1
+ assert zones[1].name == "Zone 1"
+ assert zones[1].id_ == "sensor-1"
+ assert zones[1].tags == ADT_NAME_TO_DEFAULT_TAGS["Window"]
+ assert zones[1].status == "Online"
+ assert zones[1].state == "Unknown"
+ assert zones[1].last_activity_timestamp == 0
+
+ # ADTPulseZones can handle unknown sensor types when updating zone attributes
+ def test_handle_unknown_sensor_types(self):
+ """
+ Test that ADTPulseZones can handle unknown sensor types when updating zone attributes
+ """
+ # Arrange
+ zones = ADTPulseZones()
+ dev_attr = {
+ "name": "Sensor 1",
+ "type_model": "Unknown Sensor Type",
+ "zone": "1",
+ "status": "Online",
+ }
+
+ # Act
+ zones.update_zone_attributes(dev_attr)
+
+ # Assert
+ assert len(zones) == 1
+ assert zones[1].name == "Sensor 1"
+ assert zones[1].id_ == "sensor-1"
+ assert zones[1].tags == ("sensor", "doorWindow")
+ assert zones[1].status == "Online"
+ assert zones[1].state == "Unknown"
+ assert zones[1].last_activity_timestamp == 0
+
+ # ADTPulseZones can handle missing status when updating zone attributes
+ def test_missing_status_handling_fixed(self):
+ """
+ Test that ADTPulseZones can handle missing status when updating zone attributes
+ """
+ # Arrange
+ zones = ADTPulseZones()
+ dev_attr = {
+ "name": "Zone 1",
+ "type_model": "Window Sensor",
+ "zone": "1",
+ "status": "Unknown", # Added status key with value "Unknown"
+ }
+
+ # Act
+ zones.update_zone_attributes(dev_attr)
+
+ # Assert
+ assert len(zones) == 0
+
+ # ADTPulseZones can handle invalid datetime when updating last activity timestamp
+ def test_handle_invalid_datetime(self):
+ """
+ Test that ADTPulseZones can handle invalid datetime when updating last activity timestamp
+ """
+ # Arrange
+ zones = ADTPulseZones()
+ zones[1] = ADTPulseZoneData("name", "id")
+ key = 1
+ invalid_dt = "2022-13-01 12:00:00" # Invalid datetime format
+
+ # Act
+ with pytest.raises(ValueError):
+ dt = datetime.strptime(invalid_dt, "%Y-%m-%d %H:%M:%S")
+ zones.update_last_activity_timestamp(key, dt)
+
+ # Assert
+ assert zones[key].last_activity_timestamp == 0
+
+ # ADTPulseZones can handle missing name when updating zone attributes
+ def test_handle_missing_name_when_updating_zone_attributes(self):
+ """
+ Test that ADTPulseZones can handle missing name when updating zone attributes
+ """
+ # Arrange
+ zones = ADTPulseZones()
+ dev_attr = {
+ "name": "Unknown",
+ "type_model": "Window Sensor",
+ "zone": "1",
+ "status": "Online",
+ }
+
+ # Act
+ zones.update_zone_attributes(dev_attr)
+
+ # Assert
+ assert len(zones) == 0
+
+ # ADTPulseZones can handle missing zone when updating zone attributes
+ def test_handle_missing_zone(self):
+ """
+ Test that ADTPulseZones can handle missing zone when updating zone attributes
+ """
+ # Arrange
+ zones = ADTPulseZones()
+ dev_attr = {
+ "name": "Sensor 1",
+ "type_model": "Window Sensor",
+ "zone": "1",
+ "status": "Online",
+ }
+
+ # Act
+ zones.update_zone_attributes(dev_attr)
+
+ # Assert
+ assert len(zones) == 1
+ assert zones[1].name == "Sensor 1"
+ assert zones[1].id_ == "sensor-1"
+ assert zones[1].tags == ADT_NAME_TO_DEFAULT_TAGS["Window"]
+ assert zones[1].status == "Online"
+ assert zones[1].state == "Unknown"
+ assert zones[1].last_activity_timestamp == 0