diff --git a/pyproject.toml b/pyproject.toml
index 658cae8f2d..7a9cfca47d 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -15,6 +15,5 @@ exclude = '''
| benchmarks
| hazelcast/protocol/codec
- | tests
)/
'''
diff --git a/tests/__init__.py b/tests/__init__.py
index 3fdc96c28c..1170e06650 100644
--- a/tests/__init__.py
+++ b/tests/__init__.py
@@ -2,12 +2,15 @@
import subprocess
try:
- output = subprocess.check_output(["git", "show", "-s", "--format=\"%h\""]).decode()
- commit_id = output.strip().replace("\"", "").replace("'", "")
+ output = subprocess.check_output(["git", "show", "-s", '--format="%h"']).decode()
+ commit_id = output.strip().replace('"', "").replace("'", "")
except:
commit_id = ""
logging.basicConfig(
- format='%(asctime)s%(msecs)03d [' + commit_id + '][%(threadName)s][%(name)s] %(levelname)s: %(message)s',
- datefmt="%H:%M:%S,")
+ format="%(asctime)s%(msecs)03d ["
+ + commit_id
+ + "][%(threadName)s][%(name)s] %(levelname)s: %(message)s",
+ datefmt="%H:%M:%S,",
+)
logging.getLogger().setLevel(logging.INFO)
diff --git a/tests/address_test.py b/tests/address_test.py
index 32fdbdc07f..7eae98f368 100644
--- a/tests/address_test.py
+++ b/tests/address_test.py
@@ -18,7 +18,9 @@ def test_v4_address_without_port(self):
self._validate_without_port(self.v4_address, self.v4_address)
def test_v6_address_with_port(self):
- self._validate_with_port("[" + self.v6_address + "]:" + str(self.port), self.v6_address, self.port)
+ self._validate_with_port(
+ "[" + self.v6_address + "]:" + str(self.port), self.v6_address, self.port
+ )
def test_v6_address_without_port(self):
self._validate_without_port(self.v6_address, self.v6_address)
diff --git a/tests/base.py b/tests/base.py
index 5d71c84533..eb9497e4e4 100644
--- a/tests/base.py
+++ b/tests/base.py
@@ -39,7 +39,7 @@ def __init__(self, methodName):
@staticmethod
def create_rc():
- return HzRemoteController('127.0.0.1', 9701)
+ return HzRemoteController("127.0.0.1", 9701)
@classmethod
def create_cluster(cls, rc, config=None):
@@ -79,8 +79,16 @@ def assertSetEventually(self, event, timeout=5):
is_set = event.wait(timeout)
self.assertTrue(is_set, "Event was not set within %d seconds" % timeout)
- def assertEntryEvent(self, event, event_type, key=None, value=None, old_value=None, merging_value=None,
- number_of_affected_entries=1):
+ def assertEntryEvent(
+ self,
+ event,
+ event_type,
+ key=None,
+ value=None,
+ old_value=None,
+ merging_value=None,
+ number_of_affected_entries=1,
+ ):
self.assertEqual(event.key, key)
self.assertEqual(event.event_type, event_type)
@@ -107,6 +115,7 @@ class SingleMemberTestCase(HazelcastTestCase):
"""
Test cases where a single member - client combination is needed
"""
+
rc = None
client = None
diff --git a/tests/client_message_test.py b/tests/client_message_test.py
index bbb795ccd1..4e6ec72749 100644
--- a/tests/client_message_test.py
+++ b/tests/client_message_test.py
@@ -6,9 +6,21 @@
from hazelcast.connection import _Reader
from hazelcast.errors import _ErrorsCodec
from hazelcast.protocol import ErrorHolder
-from hazelcast.protocol.builtin import CodecUtil, FixSizedTypesCodec, ByteArrayCodec, DataCodec, EntryListCodec, \
- StringCodec, EntryListUUIDListIntegerCodec, EntryListUUIDLongCodec, ListMultiFrameCodec, ListIntegerCodec, \
- ListLongCodec, ListUUIDCodec, MapCodec
+from hazelcast.protocol.builtin import (
+ CodecUtil,
+ FixSizedTypesCodec,
+ ByteArrayCodec,
+ DataCodec,
+ EntryListCodec,
+ StringCodec,
+ EntryListUUIDListIntegerCodec,
+ EntryListUUIDLongCodec,
+ ListMultiFrameCodec,
+ ListIntegerCodec,
+ ListLongCodec,
+ ListUUIDCodec,
+ MapCodec,
+)
from hazelcast.protocol.client_message import *
from hazelcast.protocol.codec import client_authentication_codec
from hazelcast.protocol.codec.custom.error_holder_codec import ErrorHolderCodec
@@ -144,9 +156,15 @@ def test_entry_list(self):
EntryListCodec.encode_nullable(self.buf, None, StringCodec.encode, StringCodec.encode, True)
message = self.write_and_decode()
message.next_frame() # initial frame
- self.assertEqual(entries, EntryListCodec.decode(message, StringCodec.decode, StringCodec.decode))
- self.assertEqual(entries, EntryListCodec.decode_nullable(message, StringCodec.decode, StringCodec.decode))
- self.assertIsNone(EntryListCodec.decode_nullable(message, StringCodec.decode, StringCodec.decode))
+ self.assertEqual(
+ entries, EntryListCodec.decode(message, StringCodec.decode, StringCodec.decode)
+ )
+ self.assertEqual(
+ entries, EntryListCodec.decode_nullable(message, StringCodec.decode, StringCodec.decode)
+ )
+ self.assertIsNone(
+ EntryListCodec.decode_nullable(message, StringCodec.decode, StringCodec.decode)
+ )
def test_uuid_integer_list_entry_list(self):
self.mark_initial_frame_as_non_final()
@@ -200,8 +218,12 @@ def test_list(self):
self.assertEqual(l, ListMultiFrameCodec.decode(message, StringCodec.decode))
self.assertEqual(l, ListMultiFrameCodec.decode_nullable(message, StringCodec.decode))
self.assertIsNone(ListMultiFrameCodec.decode_nullable(message, StringCodec.decode))
- self.assertEqual(l, ListMultiFrameCodec.decode_contains_nullable(message, StringCodec.decode))
- self.assertEqual([None], ListMultiFrameCodec.decode_contains_nullable(message, StringCodec.decode))
+ self.assertEqual(
+ l, ListMultiFrameCodec.decode_contains_nullable(message, StringCodec.decode)
+ )
+ self.assertEqual(
+ [None], ListMultiFrameCodec.decode_contains_nullable(message, StringCodec.decode)
+ )
def test_uuid_list(self):
self.mark_initial_frame_as_non_final()
@@ -223,7 +245,9 @@ def test_map(self):
message = self.write_and_decode()
message.next_frame() # initial frame
self.assertEqual(m, MapCodec.decode(message, StringCodec.decode, StringCodec.decode))
- self.assertEqual(m, MapCodec.decode_nullable(message, StringCodec.decode, StringCodec.decode))
+ self.assertEqual(
+ m, MapCodec.decode_nullable(message, StringCodec.decode, StringCodec.decode)
+ )
self.assertIsNone(MapCodec.decode_nullable(message, StringCodec.decode, StringCodec.decode))
def test_string(self):
@@ -262,8 +286,9 @@ def setUp(self):
self.builder = ClientMessageBuilder(lambda m: self.counter.increment())
def test_unfragmented_message(self):
- request = client_authentication_codec.encode_request("dev", "user", "pass", uuid.uuid4(),
- "PYH", 1, "4.0", "python", [])
+ request = client_authentication_codec.encode_request(
+ "dev", "user", "pass", uuid.uuid4(), "PYH", 1, "4.0", "python", []
+ )
self.reader.read(request.buf)
message = self.reader._read_message()
self.builder.on_message(message)
diff --git a/tests/client_test.py b/tests/client_test.py
index 09db994e1f..9a763ace49 100644
--- a/tests/client_test.py
+++ b/tests/client_test.py
@@ -11,14 +11,18 @@ def test_client_only_listens(self):
rc = self.create_rc()
client_heartbeat_seconds = 8
- cluster_config = """
%s
- """ % client_heartbeat_seconds
+ """
+ % client_heartbeat_seconds
+ )
cluster = self.create_cluster(rc, cluster_config)
cluster.start_member()
@@ -45,7 +49,7 @@ def event_collector(e):
def message_listener(_):
pass
-
+
topic.add_listener(message_listener)
topic2 = client2.get_topic(key)
@@ -77,29 +81,32 @@ def tearDown(self):
self.shutdown_all_clients()
def test_default_config(self):
- client = self.create_client({
- "cluster_name": self.cluster.id
- })
+ client = self.create_client({"cluster_name": self.cluster.id})
self.assertIsNone(self.get_labels_from_member(client._connection_manager.client_uuid))
def test_provided_labels_are_received(self):
- client = self.create_client({
- "cluster_name": self.cluster.id,
- "labels": [
- "test-label",
- ]
- })
- self.assertEqual(b"test-label", self.get_labels_from_member(client._connection_manager.client_uuid))
+ client = self.create_client(
+ {
+ "cluster_name": self.cluster.id,
+ "labels": [
+ "test-label",
+ ],
+ }
+ )
+ self.assertEqual(
+ b"test-label", self.get_labels_from_member(client._connection_manager.client_uuid)
+ )
def get_labels_from_member(self, client_uuid):
- script = """var clients = instance_0.getClientService().getConnectedClients().toArray();
+ script = """
+ var clients = instance_0.getClientService().getConnectedClients().toArray();
for (i=0; i < clients.length; i++) {
var client = clients[i];
if ("%s".equals(client.getUuid().toString())) {
result = client.getLabels().iterator().next();
break;
}
- }
- """ % str(client_uuid)
+ }""" % str(
+ client_uuid
+ )
return self.rc.executeOnController(self.cluster.id, script, Lang.JAVASCRIPT).result
-
diff --git a/tests/cluster_test.py b/tests/cluster_test.py
index d8dee4ae7e..76a1b58d78 100644
--- a/tests/cluster_test.py
+++ b/tests/cluster_test.py
@@ -32,9 +32,7 @@ def member_added(m):
events.append(m)
config = self.create_config()
- config["membership_listeners"] = [
- (member_added, None)
- ]
+ config["membership_listeners"] = [(member_added, None)]
member = self.cluster.start_member()
@@ -109,9 +107,7 @@ def listener(_):
raise RuntimeError("error")
config = self.create_config()
- config["membership_listeners"] = [
- (listener, listener)
- ]
+ config["membership_listeners"] = [(listener, listener)]
self.cluster.start_member()
self.create_client(config)
@@ -127,7 +123,9 @@ def test_cluster_service_get_members_with_selector(self):
config = self.create_config()
client = self.create_client(config)
- self.assertEqual(0, len(client.cluster_service.get_members(lambda m: member.address != m.address)))
+ self.assertEqual(
+ 0, len(client.cluster_service.get_members(lambda m: member.address != m.address))
+ )
class _MockClusterService(object):
@@ -237,7 +235,9 @@ def test_when_member_started_with_another_port_and_the_same_uuid(self):
added_listener = event_collector()
removed_listener = event_collector()
- self.client.cluster_service.add_listener(member_added=added_listener, member_removed=removed_listener)
+ self.client.cluster_service.add_listener(
+ member_added=added_listener, member_removed=removed_listener
+ )
self.rc.shutdownCluster(self.cluster.id)
# now stop cluster, restart it with the same name and then start member with port 5702
@@ -267,4 +267,7 @@ def _get_config(self, port):
%s
- """ % (port, self.tmp_dir)
+ """ % (
+ port,
+ self.tmp_dir,
+ )
diff --git a/tests/config_test.py b/tests/config_test.py
index 3467f2a5f9..6b62afda00 100644
--- a/tests/config_test.py
+++ b/tests/config_test.py
@@ -1,7 +1,18 @@
import unittest
-from hazelcast.config import _Config, SSLProtocol, ReconnectMode, IntType, InMemoryFormat, EvictionPolicy, \
- IndexConfig, IndexType, UniqueKeyTransformation, QueryConstants, BitmapIndexOptions
+from hazelcast.config import (
+ _Config,
+ SSLProtocol,
+ ReconnectMode,
+ IntType,
+ InMemoryFormat,
+ EvictionPolicy,
+ IndexConfig,
+ IndexType,
+ UniqueKeyTransformation,
+ QueryConstants,
+ BitmapIndexOptions,
+)
from hazelcast.errors import InvalidConfigurationError
from hazelcast.serialization.api import IdentifiedDataSerializable, Portable, StreamSerializer
from hazelcast.serialization.portable.classdef import ClassDefinition
@@ -336,9 +347,7 @@ def test_data_serializable_factories(self):
with self.assertRaises(TypeError):
config.data_serializable_factories = invalid_config
- factories = {1: {
- 2: IdentifiedDataSerializable
- }}
+ factories = {1: {2: IdentifiedDataSerializable}}
config.data_serializable_factories = factories
self.assertEqual(factories, config.data_serializable_factories)
@@ -360,9 +369,7 @@ def test_data_portable_factories(self):
with self.assertRaises(TypeError):
config.portable_factories = invalid_config
- factories = {1: {
- 2: Portable
- }}
+ factories = {1: {2: Portable}}
config.portable_factories = factories
self.assertEqual(factories, config.portable_factories)
@@ -445,9 +452,7 @@ def test_custom_serializers(self):
with self.assertRaises(TypeError):
config.custom_serializers = invalid_config
- serializers = {
- int: StreamSerializer
- }
+ serializers = {int: StreamSerializer}
config.custom_serializers = serializers
self.assertEqual(serializers, config.custom_serializers)
@@ -507,16 +512,18 @@ def test_near_caches_with_a_few_changes(self):
def test_near_caches(self):
config = self.config
- config.near_caches = {"a": {
- "invalidate_on_change": False,
- "in_memory_format": "OBJECT",
- "time_to_live": 100,
- "max_idle": 200,
- "eviction_policy": "RANDOM",
- "eviction_max_size": 1000,
- "eviction_sampling_count": 20,
- "eviction_sampling_pool_size": 15,
- }}
+ config.near_caches = {
+ "a": {
+ "invalidate_on_change": False,
+ "in_memory_format": "OBJECT",
+ "time_to_live": 100,
+ "max_idle": 200,
+ "eviction_policy": "RANDOM",
+ "eviction_max_size": 1000,
+ "eviction_sampling_count": 20,
+ "eviction_sampling_pool_size": 15,
+ }
+ }
nc_config = config.near_caches["a"]
self.assertFalse(nc_config.invalidate_on_change)
self.assertEqual(InMemoryFormat.OBJECT, nc_config.in_memory_format)
@@ -619,10 +626,12 @@ def test_flake_id_generators_with_a_few_changes(self):
def test_flake_id_generators(self):
config = self.config
- config.flake_id_generators = {"a": {
- "prefetch_count": 20,
- "prefetch_validity": 30,
- }}
+ config.flake_id_generators = {
+ "a": {
+ "prefetch_count": 20,
+ "prefetch_validity": 30,
+ }
+ }
fig_config = config.flake_id_generators["a"]
self.assertEqual(20, fig_config.prefetch_count)
self.assertEqual(30, fig_config.prefetch_validity)
@@ -767,13 +776,13 @@ def test_defaults(self):
self.assertEqual(IndexType.SORTED, config.type)
self.assertEqual([], config.attributes)
self.assertEqual(QueryConstants.KEY_ATTRIBUTE_NAME, config.bitmap_index_options.unique_key)
- self.assertEqual(UniqueKeyTransformation.OBJECT, config.bitmap_index_options.unique_key_transformation)
+ self.assertEqual(
+ UniqueKeyTransformation.OBJECT, config.bitmap_index_options.unique_key_transformation
+ )
def test_from_dict(self):
with self.assertRaises(InvalidConfigurationError):
- IndexConfig.from_dict({
- "unknown_key": 1
- })
+ IndexConfig.from_dict({"unknown_key": 1})
def test_from_dict_defaults(self):
config = IndexConfig.from_dict({})
@@ -781,17 +790,23 @@ def test_from_dict_defaults(self):
self.assertEqual(IndexType.SORTED, config.type)
self.assertEqual([], config.attributes)
self.assertEqual(QueryConstants.KEY_ATTRIBUTE_NAME, config.bitmap_index_options.unique_key)
- self.assertEqual(UniqueKeyTransformation.OBJECT, config.bitmap_index_options.unique_key_transformation)
+ self.assertEqual(
+ UniqueKeyTransformation.OBJECT, config.bitmap_index_options.unique_key_transformation
+ )
def test_from_dict_with_changes(self):
- config = IndexConfig.from_dict({
- "name": "test",
- })
+ config = IndexConfig.from_dict(
+ {
+ "name": "test",
+ }
+ )
self.assertEqual("test", config.name)
self.assertEqual(IndexType.SORTED, config.type)
self.assertEqual([], config.attributes)
self.assertEqual(QueryConstants.KEY_ATTRIBUTE_NAME, config.bitmap_index_options.unique_key)
- self.assertEqual(UniqueKeyTransformation.OBJECT, config.bitmap_index_options.unique_key_transformation)
+ self.assertEqual(
+ UniqueKeyTransformation.OBJECT, config.bitmap_index_options.unique_key_transformation
+ )
def test_add_attributes(self):
config = IndexConfig()
@@ -800,7 +815,7 @@ def test_add_attributes(self):
(None, AssertionError),
(" ", ValueError),
("x.", ValueError),
- (" x.x.", ValueError)
+ (" x.x.", ValueError),
]
for attr, error in invalid_attributes:
@@ -817,7 +832,7 @@ def test_with_changes(self):
attributes = ["attr", "attr.nested"]
bio = {
"unique_key": QueryConstants.THIS_ATTRIBUTE_NAME,
- "unique_key_transformation": UniqueKeyTransformation.RAW
+ "unique_key_transformation": UniqueKeyTransformation.RAW,
}
config = IndexConfig(name, idx_type, attributes, bio)
@@ -825,16 +840,18 @@ def test_with_changes(self):
self.assertEqual(idx_type, config.type)
self.assertEqual(attributes, attributes)
self.assertEqual(bio["unique_key"], config.bitmap_index_options.unique_key)
- self.assertEqual(bio["unique_key_transformation"], config.bitmap_index_options.unique_key_transformation)
+ self.assertEqual(
+ bio["unique_key_transformation"], config.bitmap_index_options.unique_key_transformation
+ )
def test_bitmap_index_options(self):
config = IndexConfig()
- config.bitmap_index_options = {
- "unique_key": QueryConstants.THIS_ATTRIBUTE_NAME
- }
+ config.bitmap_index_options = {"unique_key": QueryConstants.THIS_ATTRIBUTE_NAME}
self.assertEqual(QueryConstants.THIS_ATTRIBUTE_NAME, config.bitmap_index_options.unique_key)
- self.assertEqual(UniqueKeyTransformation.OBJECT, config.bitmap_index_options.unique_key_transformation)
+ self.assertEqual(
+ UniqueKeyTransformation.OBJECT, config.bitmap_index_options.unique_key_transformation
+ )
invalid_options = [
({"unique_key": None}, TypeError),
@@ -889,9 +906,7 @@ def test_defaults(self):
def test_from_dict(self):
with self.assertRaises(InvalidConfigurationError):
- BitmapIndexOptions.from_dict({
- "unknown_key": 1
- })
+ BitmapIndexOptions.from_dict({"unknown_key": 1})
def test_from_dict_defaults(self):
options = BitmapIndexOptions.from_dict({})
@@ -899,9 +914,11 @@ def test_from_dict_defaults(self):
self.assertEqual(UniqueKeyTransformation.OBJECT, options.unique_key_transformation)
def test_from_dict_with_changes(self):
- options = BitmapIndexOptions.from_dict({
- "unique_key": QueryConstants.THIS_ATTRIBUTE_NAME,
- })
+ options = BitmapIndexOptions.from_dict(
+ {
+ "unique_key": QueryConstants.THIS_ATTRIBUTE_NAME,
+ }
+ )
self.assertEqual(QueryConstants.THIS_ATTRIBUTE_NAME, options.unique_key)
self.assertEqual(UniqueKeyTransformation.OBJECT, options.unique_key_transformation)
diff --git a/tests/connection_strategy_test.py b/tests/connection_strategy_test.py
index 7a248a5750..65660b7ea6 100644
--- a/tests/connection_strategy_test.py
+++ b/tests/connection_strategy_test.py
@@ -54,12 +54,15 @@ def on_state_change(event):
on_state_change.events = events
return on_state_change
+
event_collector = collector()
- self.client = HazelcastClient(cluster_name=self.cluster.id,
- cluster_members=["localhost:5701"],
- async_start=True,
- lifecycle_listeners=[event_collector])
+ self.client = HazelcastClient(
+ cluster_name=self.cluster.id,
+ cluster_members=["localhost:5701"],
+ async_start=True,
+ lifecycle_listeners=[event_collector],
+ )
self.assertTrueEventually(lambda: self.assertEqual(1, len(event_collector.events)))
self.client.get_map(random_string())
@@ -77,13 +80,16 @@ def on_state_change(event):
on_state_change.events = events
return on_state_change
+
event_collector = collector()
- self.client = HazelcastClient(cluster_members=["localhost:5701"],
- cluster_name=self.cluster.id,
- reconnect_mode=ReconnectMode.OFF,
- cluster_connect_timeout=six.MAXSIZE,
- lifecycle_listeners=[event_collector])
+ self.client = HazelcastClient(
+ cluster_members=["localhost:5701"],
+ cluster_name=self.cluster.id,
+ reconnect_mode=ReconnectMode.OFF,
+ cluster_connect_timeout=six.MAXSIZE,
+ lifecycle_listeners=[event_collector],
+ )
m = self.client.get_map(random_string()).blocking()
# no exception at this point
m.put(1, 1)
@@ -106,13 +112,16 @@ def on_state_change(event):
on_state_change.events = events
return on_state_change
+
disconnected_collector = collector(LifecycleState.DISCONNECTED)
- self.client = HazelcastClient(cluster_members=["localhost:5701"],
- cluster_name=self.cluster.id,
- reconnect_mode=ReconnectMode.ASYNC,
- cluster_connect_timeout=six.MAXSIZE,
- lifecycle_listeners=[disconnected_collector])
+ self.client = HazelcastClient(
+ cluster_members=["localhost:5701"],
+ cluster_name=self.cluster.id,
+ reconnect_mode=ReconnectMode.ASYNC,
+ cluster_connect_timeout=six.MAXSIZE,
+ lifecycle_listeners=[disconnected_collector],
+ )
m = self.client.get_map(random_string()).blocking()
# no exception at this point
m.put(1, 1)
@@ -135,4 +144,3 @@ def test_async_start_with_partition_specific_proxies(self):
with self.assertRaises(ClientOfflineError):
self.client.get_list(random_string())
-
diff --git a/tests/cp_test.py b/tests/cp_test.py
index f0cf8992a0..c0cfadcad8 100644
--- a/tests/cp_test.py
+++ b/tests/cp_test.py
@@ -66,7 +66,9 @@ def test_acquire_session_after_shutdown(self):
def test_acquire_session_with_unknown_group_id(self):
m = self.mock_request_new_session()
- self.assertEqual(self.session_id, self.manager.acquire_session(self.raft_group_id, 3).result())
+ self.assertEqual(
+ self.session_id, self.manager.acquire_session(self.raft_group_id, 3).result()
+ )
self.assertEqual(3, self.get_acquire_count())
m.assert_called_once_with(self.raft_group_id)
@@ -75,7 +77,9 @@ def test_acquire_session_with_existing_invalid_session(self):
state = MagicMock(is_valid=lambda: False)
self.set_session(state)
- self.assertEqual(self.session_id, self.manager.acquire_session(self.raft_group_id, 1).result())
+ self.assertEqual(
+ self.session_id, self.manager.acquire_session(self.raft_group_id, 1).result()
+ )
m.assert_called_once_with(self.raft_group_id)
self.assertEqual(1, self.get_acquire_count())
@@ -83,7 +87,9 @@ def test_acquire_session_for_valid_session(self):
m = self.mock_request_new_session()
self.set_session(self.prepare_state())
- self.assertEqual(self.session_id, self.manager.acquire_session(self.raft_group_id, 10).result())
+ self.assertEqual(
+ self.session_id, self.manager.acquire_session(self.raft_group_id, 10).result()
+ )
m.assert_not_called()
self.assertEqual(10, self.get_acquire_count())
@@ -119,14 +125,18 @@ def test_create_thread_id_after_shutdown(self):
def test_create_thread_id(self):
m = self.mock_request_generate_thread_id(5)
- self.assertEqual(5, self.manager.get_or_create_unique_thread_id(self.raft_group_id).result())
+ self.assertEqual(
+ 5, self.manager.get_or_create_unique_thread_id(self.raft_group_id).result()
+ )
m.assert_called_once_with(self.raft_group_id)
self.assertEqual(5, self.manager._thread_ids.get((self.raft_group_id, thread_id())))
def test_create_thread_id_with_known_group_id(self):
m = self.mock_request_generate_thread_id(12)
self.set_thread_id(13)
- self.assertEqual(13, self.manager.get_or_create_unique_thread_id(self.raft_group_id).result())
+ self.assertEqual(
+ 13, self.manager.get_or_create_unique_thread_id(self.raft_group_id).result()
+ )
m.assert_not_called()
self.assertEqual(13, self.manager._thread_ids.get((self.raft_group_id, thread_id())))
@@ -153,7 +163,8 @@ def test_heartbeat(self):
time.sleep(2)
self.manager.shutdown()
reactor.shutdown()
- self.assertGreater(self.context.reactor.add_timer.call_count, 1) # assert that the heartbeat task is executed
+ # assert that the heartbeat task is executed
+ self.assertGreater(self.context.reactor.add_timer.call_count, 1)
r.assert_called()
r.assert_called_with(self.raft_group_id, self.session_id)
self.assertEqual(1, len(self.manager._sessions))
@@ -165,18 +176,22 @@ def test_heartbeat_when_session_is_released(self):
r = MagicMock(return_value=ImmediateFuture(None))
self.manager._request_heartbeat = r
self.manager.acquire_session(self.raft_group_id, 1).add_done_callback(
- lambda _: self.manager.release_session(self.raft_group_id, self.session_id, 1))
+ lambda _: self.manager.release_session(self.raft_group_id, self.session_id, 1)
+ )
time.sleep(2)
self.manager.shutdown()
reactor.shutdown()
- self.assertGreater(self.context.reactor.add_timer.call_count, 1) # assert that the heartbeat task is executed
+ # assert that the heartbeat task is executed
+ self.assertGreater(self.context.reactor.add_timer.call_count, 1)
r.assert_not_called()
self.assertEqual(1, len(self.manager._sessions))
def test_heartbeat_on_failure(self):
reactor = self.mock_reactor()
self.mock_request_new_session()
- self.manager._request_heartbeat = MagicMock(return_value=ImmediateExceptionFuture(SessionExpiredError()))
+ self.manager._request_heartbeat = MagicMock(
+ return_value=ImmediateExceptionFuture(SessionExpiredError())
+ )
m = MagicMock(side_effect=self.manager.invalidate_session)
self.manager.invalidate_session = m
@@ -185,7 +200,8 @@ def test_heartbeat_on_failure(self):
time.sleep(2)
self.manager.shutdown()
reactor.shutdown()
- self.assertGreater(self.context.reactor.add_timer.call_count, 1) # assert that the heartbeat task is executed
+ # assert that the heartbeat task is executed
+ self.assertGreater(self.context.reactor.add_timer.call_count, 1)
m.assert_called_once_with(self.raft_group_id, self.session_id)
self.assertEqual(0, len(self.manager._sessions))
@@ -197,7 +213,9 @@ def mock(*_, **__):
self.manager._request_generate_thread_id = m
return m
- def mock_request_new_session(self, ):
+ def mock_request_new_session(
+ self,
+ ):
def mock(*_, **__):
d = {
"session_id": self.session_id,
diff --git a/tests/data_test.py b/tests/data_test.py
index e5e8153f5e..12b1ac2c9e 100644
--- a/tests/data_test.py
+++ b/tests/data_test.py
@@ -24,4 +24,4 @@ def test_data(self):
self.assertEqual(0x12345678, self._data.get_partition_hash())
def test_data_len(self):
- self.assertEqual(10, len(Data("1"* 10)))
+ self.assertEqual(10, len(Data("1" * 10)))
diff --git a/tests/discovery/default_address_provider_test.py b/tests/discovery/default_address_provider_test.py
index 2581031b6c..8808dd094d 100644
--- a/tests/discovery/default_address_provider_test.py
+++ b/tests/discovery/default_address_provider_test.py
@@ -16,9 +16,15 @@ def test_load_addresses_with_multiple_addresses(self):
initial_list = ["192.168.0.1:5701", "192.168.0.1:5702", "192.168.0.2:5701"]
provider = DefaultAddressProvider(initial_list)
primaries, secondaries = provider.load_addresses()
- six.assertCountEqual(self, primaries, [Address("192.168.0.1", 5701),
- Address("192.168.0.1", 5702),
- Address("192.168.0.2", 5701)])
+ six.assertCountEqual(
+ self,
+ primaries,
+ [
+ Address("192.168.0.1", 5701),
+ Address("192.168.0.1", 5702),
+ Address("192.168.0.2", 5701),
+ ],
+ )
six.assertCountEqual(self, secondaries, [])
# we deal with duplicate addresses in the ConnectionManager#_get_possible_addresses
@@ -26,8 +32,9 @@ def test_load_addresses_with_duplicate_addresses(self):
initial_list = ["192.168.0.1:5701", "192.168.0.1:5701"]
provider = DefaultAddressProvider(initial_list)
primaries, secondaries = provider.load_addresses()
- six.assertCountEqual(self, primaries, [Address("192.168.0.1", 5701),
- Address("192.168.0.1", 5701)])
+ six.assertCountEqual(
+ self, primaries, [Address("192.168.0.1", 5701), Address("192.168.0.1", 5701)]
+ )
six.assertCountEqual(self, secondaries, [])
def test_load_addresses_with_empty_addresses(self):
@@ -35,14 +42,18 @@ def test_load_addresses_with_empty_addresses(self):
provider = DefaultAddressProvider(initial_list)
primaries, secondaries = provider.load_addresses()
six.assertCountEqual(self, primaries, [Address("127.0.0.1", 5701)])
- six.assertCountEqual(self, secondaries, [Address("127.0.0.1", 5702), Address("127.0.0.1", 5703)])
+ six.assertCountEqual(
+ self, secondaries, [Address("127.0.0.1", 5702), Address("127.0.0.1", 5703)]
+ )
def test_load_addresses_without_port(self):
initial_list = ["192.168.0.1"]
provider = DefaultAddressProvider(initial_list)
primaries, secondaries = provider.load_addresses()
six.assertCountEqual(self, primaries, [Address("192.168.0.1", 5701)])
- six.assertCountEqual(self, secondaries, [Address("192.168.0.1", 5702), Address("192.168.0.1", 5703)])
+ six.assertCountEqual(
+ self, secondaries, [Address("192.168.0.1", 5702), Address("192.168.0.1", 5703)]
+ )
def test_translate(self):
provider = DefaultAddressProvider([])
diff --git a/tests/discovery/hazelcast_cloud_discovery_test.py b/tests/discovery/hazelcast_cloud_discovery_test.py
index e36333daa2..ac69cb5f46 100644
--- a/tests/discovery/hazelcast_cloud_discovery_test.py
+++ b/tests/discovery/hazelcast_cloud_discovery_test.py
@@ -29,28 +29,35 @@
HOST = "localhost"
-ADDRESSES = {Address("10.47.0.8", 32298): Address("54.213.63.142", 32298),
- Address("10.47.0.9", 32298): Address("54.245.77.185", 32298),
- Address("10.47.0.10", 32298): Address("54.186.232.37", 32298)}
+ADDRESSES = {
+ Address("10.47.0.8", 32298): Address("54.213.63.142", 32298),
+ Address("10.47.0.9", 32298): Address("54.245.77.185", 32298),
+ Address("10.47.0.10", 32298): Address("54.186.232.37", 32298),
+}
-PRIVATE_LINK_ADDRESSES = {Address("100.96.5.1", 5701): Address("10.113.44.139", 31115),
- Address("100.96.4.2", 5701): Address("10.113.44.130", 31115)}
+PRIVATE_LINK_ADDRESSES = {
+ Address("100.96.5.1", 5701): Address("10.113.44.139", 31115),
+ Address("100.96.4.2", 5701): Address("10.113.44.130", 31115),
+}
class CloudHTTPHandler(BaseHTTPServer.BaseHTTPRequestHandler):
def do_GET(self):
idx = self.path.find("=")
if idx > 0:
- if self.path[:idx + 1] == CLOUD_URL:
+ if self.path[: idx + 1] == CLOUD_URL:
# Found a cluster with the given token
- token = self.path[idx + 1:]
+ token = self.path[idx + 1 :]
if token == TOKEN:
self._set_response(200, RESPONSE)
elif token == PRIVATE_LINK_TOKEN:
self._set_response(200, PRIVATE_LINK_RESPONSE)
# Can not find a cluster with the given token
else:
- self._set_response(404, '{"message":"Cluster with token: ' + self.path[idx + 1:] + ' not found."}')
+ self._set_response(
+ 404,
+ '{"message":"Cluster with token: ' + self.path[idx + 1 :] + ' not found."}',
+ )
else:
# Wrong URL
self._set_response(404, "default backend - 404")
@@ -67,9 +74,12 @@ class Server(object):
def __init__(self):
self.server = BaseHTTPServer.HTTPServer((HOST, 0), CloudHTTPHandler)
- self.server.socket = ssl.wrap_socket(self.server.socket, get_abs_path(self.cur_dir, "key.pem"),
- get_abs_path(self.cur_dir, "cert.pem"),
- server_side=True)
+ self.server.socket = ssl.wrap_socket(
+ self.server.socket,
+ get_abs_path(self.cur_dir, "key.pem"),
+ get_abs_path(self.cur_dir, "cert.pem"),
+ server_side=True,
+ )
self.port = self.server.socket.getsockname()[1]
def start_server(self):
diff --git a/tests/future_test.py b/tests/future_test.py
index 4850f78178..e42f1a1f38 100644
--- a/tests/future_test.py
+++ b/tests/future_test.py
@@ -3,7 +3,13 @@
import unittest
from threading import Thread, Event
-from hazelcast.future import Future, ImmediateFuture, combine_futures, make_blocking, ImmediateExceptionFuture
+from hazelcast.future import (
+ Future,
+ ImmediateFuture,
+ combine_futures,
+ make_blocking,
+ ImmediateExceptionFuture,
+)
from hazelcast import six
from hazelcast.six.moves import range
diff --git a/tests/hazelcast_json_value_test.py b/tests/hazelcast_json_value_test.py
index a802c30bfb..0c53187a51 100644
--- a/tests/hazelcast_json_value_test.py
+++ b/tests/hazelcast_json_value_test.py
@@ -27,7 +27,7 @@ def test_hazelcast_json_value_construction_with_json_serializable_object(self):
def test_hazelcast_json_value_construction_with_non_json_serializable_object(self):
class A(object):
def __init__(self):
- self.b = 'c'
+ self.b = "c"
with self.assertRaises(TypeError):
HazelcastJsonValue(A())
diff --git a/tests/heartbeat_test.py b/tests/heartbeat_test.py
index 9dbdf549ef..6551cfafce 100644
--- a/tests/heartbeat_test.py
+++ b/tests/heartbeat_test.py
@@ -16,9 +16,9 @@ def tearDownClass(cls):
def setUp(self):
self.cluster = self.create_cluster(self.rc)
self.member = self.rc.startMember(self.cluster.id)
- self.client = HazelcastClient(cluster_name=self.cluster.id,
- heartbeat_interval=0.5,
- heartbeat_timeout=2)
+ self.client = HazelcastClient(
+ cluster_name=self.cluster.id, heartbeat_interval=0.5, heartbeat_timeout=2
+ )
def tearDown(self):
self.client.shutdown()
@@ -42,7 +42,9 @@ def collector(c, *_):
connection_added_collector = connection_collector()
connection_removed_collector = connection_collector()
- self.client._connection_manager.add_listener(connection_added_collector, connection_removed_collector)
+ self.client._connection_manager.add_listener(
+ connection_added_collector, connection_removed_collector
+ )
self.simulate_heartbeat_lost(self.client, addr, 2)
@@ -51,8 +53,12 @@ def assert_heartbeat_stopped_and_restored():
self.assertEqual(1, len(connection_removed_collector.connections))
stopped_connection = connection_added_collector.connections[0]
restored_connection = connection_removed_collector.connections[0]
- self.assertEqual(stopped_connection.connected_address, Address(member2.host, member2.port))
- self.assertEqual(restored_connection.connected_address, Address(member2.host, member2.port))
+ self.assertEqual(
+ stopped_connection.connected_address, Address(member2.host, member2.port)
+ )
+ self.assertEqual(
+ restored_connection.connected_address, Address(member2.host, member2.port)
+ )
self.assertTrueEventually(assert_heartbeat_stopped_and_restored)
diff --git a/tests/hzrc/RemoteController.py b/tests/hzrc/RemoteController.py
index 39a82a1d08..d1d21af41f 100644
--- a/tests/hzrc/RemoteController.py
+++ b/tests/hzrc/RemoteController.py
@@ -1,3 +1,4 @@
+# fmt: off
#
# Autogenerated by Thrift Compiler (0.13.0)
#
@@ -2967,3 +2968,4 @@ def __ne__(self, other):
)
fix_spec(all_structs)
del all_structs
+# fmt: on
diff --git a/tests/hzrc/__init__.py b/tests/hzrc/__init__.py
index f8773b31a6..95b2221815 100644
--- a/tests/hzrc/__init__.py
+++ b/tests/hzrc/__init__.py
@@ -1 +1 @@
-__all__ = ['ttypes', 'constants', 'RemoteController']
+__all__ = ["ttypes", "constants", "RemoteController"]
diff --git a/tests/hzrc/client.py b/tests/hzrc/client.py
index 959a682a1d..11fd58c1bb 100644
--- a/tests/hzrc/client.py
+++ b/tests/hzrc/client.py
@@ -22,7 +22,7 @@ def __init__(self, host, port):
# Connect!
transport.open()
except Thrift.TException:
- self.logger.exception('Something went wrong while connecting to remote controller.')
+ self.logger.exception("Something went wrong while connecting to remote controller.")
def ping(self):
return self.remote_controller.ping()
diff --git a/tests/hzrc/constants.py b/tests/hzrc/constants.py
index b941bb8a2b..0f5c1d92f2 100644
--- a/tests/hzrc/constants.py
+++ b/tests/hzrc/constants.py
@@ -1,3 +1,4 @@
+# fmt: off
#
# Autogenerated by Thrift Compiler (0.13.0)
#
@@ -12,3 +13,4 @@
import sys
from .ttypes import *
+# fmt: on
diff --git a/tests/hzrc/ttypes.py b/tests/hzrc/ttypes.py
index de7a0463d1..8e1d01de72 100644
--- a/tests/hzrc/ttypes.py
+++ b/tests/hzrc/ttypes.py
@@ -1,3 +1,4 @@
+# fmt: off
#
# Autogenerated by Thrift Compiler (0.13.0)
#
@@ -336,3 +337,4 @@ def __ne__(self, other):
)
fix_spec(all_structs)
del all_structs
+# fmt: on
diff --git a/tests/invocation_test.py b/tests/invocation_test.py
index cdbd2bcb13..2fbe191705 100644
--- a/tests/invocation_test.py
+++ b/tests/invocation_test.py
@@ -95,7 +95,9 @@ def test_notify_backup_complete_with_pending_acks(self):
def test_notify_backup_complete_when_all_acks_are_received(self):
_, service = self._start_service()
message = "x"
- invocation = MagicMock(backup_acks_received=1, backup_acks_expected=2, pending_response=message)
+ invocation = MagicMock(
+ backup_acks_received=1, backup_acks_expected=2, pending_response=message
+ )
service._notify_backup_complete(invocation)
invocation.set_response.assert_called_once_with(message)
self.assertEqual(2, invocation.backup_acks_received)
@@ -108,34 +110,52 @@ def test_backup_handler_when_all_acks_are_received(self):
def test_backup_handler_when_all_acks_are_not_received_and_not_reached_timeout(self):
_, service = self._start_service()
- invocation = MagicMock(backup_acks_received=1, backup_acks_expected=2, pending_response="x",
- pending_response_received_time=40)
+ invocation = MagicMock(
+ backup_acks_received=1,
+ backup_acks_expected=2,
+ pending_response="x",
+ pending_response_received_time=40,
+ )
service._detect_and_handle_backup_timeout(invocation, 1) # expiration_time = 40 + 5 > 1
invocation.set_response.assert_not_called()
def test_backup_handler_when_all_acks_are_not_received_and_reached_timeout(self):
_, service = self._start_service()
message = "x"
- invocation = MagicMock(backup_acks_received=1, backup_acks_expected=2, pending_response=message,
- pending_response_received_time=40)
+ invocation = MagicMock(
+ backup_acks_received=1,
+ backup_acks_expected=2,
+ pending_response=message,
+ pending_response_received_time=40,
+ )
service._detect_and_handle_backup_timeout(invocation, 46) # expiration_time = 40 + 5 < 46
invocation.set_response.assert_called_once_with(message)
- def test_backup_handler_when_all_acks_are_not_received_and_reached_timeout_with_fail_on_indeterminate_state(self):
+ def test_backup_handler_when_all_acks_are_not_received_and_reached_timeout_with_fail_on_indeterminate_state(
+ self,
+ ):
_, service = self._start_service()
service._fail_on_indeterminate_state = True
- invocation = MagicMock(backup_acks_received=1, backup_acks_expected=2, pending_response="x",
- pending_response_received_time=40)
+ invocation = MagicMock(
+ backup_acks_received=1,
+ backup_acks_expected=2,
+ pending_response="x",
+ pending_response_received_time=40,
+ )
service._detect_and_handle_backup_timeout(invocation, 46) # expiration_time = 40 + 5 < 46
invocation.set_response.assert_not_called()
invocation.set_exception.assert_called_once()
- self.assertIsInstance(invocation.set_exception.call_args[0][0], IndeterminateOperationStateError)
+ self.assertIsInstance(
+ invocation.set_exception.call_args[0][0], IndeterminateOperationStateError
+ )
def _start_service(self, config=_Config()):
c = MagicMock(config=config)
invocation_service = InvocationService(c, c._reactor)
self.service = invocation_service
- invocation_service.init(c._internal_partition_service, c._connection_manager, c._listener_service)
+ invocation_service.init(
+ c._internal_partition_service, c._connection_manager, c._listener_service
+ )
invocation_service.start()
return c, invocation_service
diff --git a/tests/lifecycle_test.py b/tests/lifecycle_test.py
index 746bfb6510..3eef40facd 100644
--- a/tests/lifecycle_test.py
+++ b/tests/lifecycle_test.py
@@ -17,38 +17,54 @@ def tearDown(self):
def test_lifecycle_listener_receives_events_in_order(self):
collector = event_collector()
self.cluster.start_member()
- client = self.create_client({
- "cluster_name": self.cluster.id,
- "lifecycle_listeners": [
- collector,
- ]
- })
+ client = self.create_client(
+ {
+ "cluster_name": self.cluster.id,
+ "lifecycle_listeners": [
+ collector,
+ ],
+ }
+ )
client.shutdown()
- self.assertEqual(collector.events,
- [LifecycleState.STARTING, LifecycleState.STARTED, LifecycleState.CONNECTED,
- LifecycleState.SHUTTING_DOWN, LifecycleState.DISCONNECTED, LifecycleState.SHUTDOWN])
+ self.assertEqual(
+ collector.events,
+ [
+ LifecycleState.STARTING,
+ LifecycleState.STARTED,
+ LifecycleState.CONNECTED,
+ LifecycleState.SHUTTING_DOWN,
+ LifecycleState.DISCONNECTED,
+ LifecycleState.SHUTDOWN,
+ ],
+ )
def test_lifecycle_listener_receives_events_in_order_after_startup(self):
self.cluster.start_member()
collector = event_collector()
- client = self.create_client({
- "cluster_name": self.cluster.id,
- })
+ client = self.create_client(
+ {
+ "cluster_name": self.cluster.id,
+ }
+ )
client.lifecycle_service.add_listener(collector)
client.shutdown()
- self.assertEqual(collector.events,
- [LifecycleState.SHUTTING_DOWN, LifecycleState.DISCONNECTED, LifecycleState.SHUTDOWN])
+ self.assertEqual(
+ collector.events,
+ [LifecycleState.SHUTTING_DOWN, LifecycleState.DISCONNECTED, LifecycleState.SHUTDOWN],
+ )
def test_lifecycle_listener_receives_disconnected_event(self):
member = self.cluster.start_member()
collector = event_collector()
- client = self.create_client({
- "cluster_name": self.cluster.id,
- })
+ client = self.create_client(
+ {
+ "cluster_name": self.cluster.id,
+ }
+ )
client.lifecycle_service.add_listener(collector)
member.shutdown()
self.assertEqual(collector.events, [LifecycleState.DISCONNECTED])
@@ -58,9 +74,11 @@ def test_remove_lifecycle_listener(self):
collector = event_collector()
self.cluster.start_member()
- client = self.create_client({
- "cluster_name": self.cluster.id,
- })
+ client = self.create_client(
+ {
+ "cluster_name": self.cluster.id,
+ }
+ )
registration_id = client.lifecycle_service.add_listener(collector)
client.lifecycle_service.remove_listener(registration_id)
client.shutdown()
@@ -70,10 +88,13 @@ def test_remove_lifecycle_listener(self):
def test_exception_in_listener(self):
def listener(_):
raise RuntimeError("error")
+
self.cluster.start_member()
- self.create_client({
- "cluster_name": self.cluster.id,
- "lifecycle_listeners": [
- listener,
- ],
- })
+ self.create_client(
+ {
+ "cluster_name": self.cluster.id,
+ "lifecycle_listeners": [
+ listener,
+ ],
+ }
+ )
diff --git a/tests/listener_test.py b/tests/listener_test.py
index 0b0638909e..8be0569fac 100644
--- a/tests/listener_test.py
+++ b/tests/listener_test.py
@@ -1,5 +1,10 @@
from tests.base import HazelcastTestCase
-from tests.util import random_string, event_collector, generate_key_owned_by_instance, wait_for_partition_table
+from tests.util import (
+ random_string,
+ event_collector,
+ generate_key_owned_by_instance,
+ wait_for_partition_table,
+)
class ListenerTest(HazelcastTestCase):
@@ -24,13 +29,14 @@ def test_smart_listener_remove_member(self):
wait_for_partition_table(client)
key_m1 = generate_key_owned_by_instance(client, self.m1.uuid)
map = client.get_map(random_string()).blocking()
- map.put(key_m1, 'value1')
+ map.put(key_m1, "value1")
map.add_entry_listener(updated_func=self.collector)
self.m1.shutdown()
- map.put(key_m1, 'value2')
+ map.put(key_m1, "value2")
def assert_event():
self.assertEqual(1, len(self.collector.events))
+
self.assertTrueEventually(assert_event)
def test_non_smart_listener_remove_member(self):
@@ -42,10 +48,11 @@ def test_non_smart_listener_remove_member(self):
wait_for_partition_table(client)
generated_key = generate_key_owned_by_instance(client, self.m1.uuid)
- map.put(generated_key, 'value')
+ map.put(generated_key, "value")
def assert_event():
self.assertEqual(1, len(self.collector.events))
+
self.assertTrueEventually(assert_event)
# -------------------------- test_add_member ----------------------- #
@@ -57,10 +64,11 @@ def test_smart_listener_add_member(self):
m3 = self.cluster.start_member()
wait_for_partition_table(client)
key_m3 = generate_key_owned_by_instance(client, m3.uuid)
- map.put(key_m3, 'value')
+ map.put(key_m3, "value")
def assert_event():
self.assertEqual(1, len(self.collector.events))
+
self.assertTrueEventually(assert_event)
def test_non_smart_listener_add_member(self):
@@ -71,8 +79,9 @@ def test_non_smart_listener_add_member(self):
m3 = self.cluster.start_member()
wait_for_partition_table(client)
key_m3 = generate_key_owned_by_instance(client, m3.uuid)
- map.put(key_m3, 'value')
+ map.put(key_m3, "value")
def assert_event():
self.assertEqual(1, len(self.collector.events))
+
self.assertTrueEventually(assert_event)
diff --git a/tests/logger_test.py b/tests/logger_test.py
index b9149645ce..db5c9fbaea 100644
--- a/tests/logger_test.py
+++ b/tests/logger_test.py
@@ -63,4 +63,3 @@ def test_logging_when_handlers_are_added_to_root_logger(self):
finally:
logger.setLevel(original_level)
logger.removeHandler(handler)
-
diff --git a/tests/near_cache_test.py b/tests/near_cache_test.py
index 3a6462eced..b56f1d1668 100644
--- a/tests/near_cache_test.py
+++ b/tests/near_cache_test.py
@@ -27,13 +27,17 @@ def test_DataRecord_max_idle_seconds(self):
self.assertTrue(data_rec.is_expired(max_idle_seconds=1))
def test_put_get_data(self):
- near_cache = self.create_near_cache(self.service, InMemoryFormat.BINARY, 1000, 1000, EvictionPolicy.LRU, 1000)
+ near_cache = self.create_near_cache(
+ self.service, InMemoryFormat.BINARY, 1000, 1000, EvictionPolicy.LRU, 1000
+ )
key_data = self.service.to_data("key")
near_cache[key_data] = "value"
self.assertEqual("value", near_cache[key_data])
def test_put_get(self):
- near_cache = self.create_near_cache(self.service, InMemoryFormat.OBJECT, 1000, 1000, EvictionPolicy.LRU, 1000)
+ near_cache = self.create_near_cache(
+ self.service, InMemoryFormat.OBJECT, 1000, 1000, EvictionPolicy.LRU, 1000
+ )
for i in range(0, 10000):
key = "key-{}".format(i)
value = "value-{}".format(i)
@@ -43,7 +47,9 @@ def test_put_get(self):
self.assertGreaterEqual(near_cache.eviction_max_size * 1.1, near_cache.__len__())
def test_expiry_time(self):
- near_cache = self.create_near_cache(self.service, InMemoryFormat.OBJECT, 1, 1000, EvictionPolicy.LRU, 1000)
+ near_cache = self.create_near_cache(
+ self.service, InMemoryFormat.OBJECT, 1, 1000, EvictionPolicy.LRU, 1000
+ )
for i in range(0, 1000):
key = "key-{}".format(i)
value = "value-{}".format(i)
@@ -59,7 +65,9 @@ def test_expiry_time(self):
self.assertGreater(expire, 8)
def test_max_idle_time(self):
- near_cache = self.create_near_cache(self.service, InMemoryFormat.OBJECT, 1000, 2, EvictionPolicy.LRU, 1000)
+ near_cache = self.create_near_cache(
+ self.service, InMemoryFormat.OBJECT, 1000, 2, EvictionPolicy.LRU, 1000
+ )
for i in range(0, 1000):
key = "key-{}".format(i)
value = "value-{}".format(i)
@@ -72,7 +80,9 @@ def test_max_idle_time(self):
self.assertEqual(expire, near_cache.eviction_sampling_count)
def test_LRU_time(self):
- near_cache = self.create_near_cache(self.service, InMemoryFormat.OBJECT, 1000, 1000, EvictionPolicy.LRU, 10000, 16, 16)
+ near_cache = self.create_near_cache(
+ self.service, InMemoryFormat.OBJECT, 1000, 1000, EvictionPolicy.LRU, 10000, 16, 16
+ )
for i in range(0, 10000):
key = "key-{}".format(i)
value = "value-{}".format(i)
@@ -88,7 +98,9 @@ def test_LRU_time(self):
self.assertLess(evict, 10000)
def test_LRU_time_with_update(self):
- near_cache = self.create_near_cache(self.service, InMemoryFormat.OBJECT, 1000, 1000, EvictionPolicy.LRU, 10, 10, 10)
+ near_cache = self.create_near_cache(
+ self.service, InMemoryFormat.OBJECT, 1000, 1000, EvictionPolicy.LRU, 10, 10, 10
+ )
for i in range(0, 10):
key = "key-{}".format(i)
value = "value-{}".format(i)
@@ -104,7 +116,9 @@ def test_LRU_time_with_update(self):
val = near_cache["key-9"]
def test_LFU_time(self):
- near_cache = self.create_near_cache(self.service, InMemoryFormat.BINARY, 1000, 1000, EvictionPolicy.LFU, 1000)
+ near_cache = self.create_near_cache(
+ self.service, InMemoryFormat.BINARY, 1000, 1000, EvictionPolicy.LFU, 1000
+ )
for i in range(0, 1000):
key = "key-{}".format(i)
value = "value-{}".format(i)
@@ -121,7 +135,9 @@ def test_LFU_time(self):
self.assertLess(evict, 1000)
def test_RANDOM_time(self):
- near_cache = self.create_near_cache(self.service, InMemoryFormat.BINARY, 1000, 1000, EvictionPolicy.LFU, 1000)
+ near_cache = self.create_near_cache(
+ self.service, InMemoryFormat.BINARY, 1000, 1000, EvictionPolicy.LFU, 1000
+ )
for i in range(0, 2000):
key = "key-{}".format(i)
value = "value-{}".format(i)
@@ -131,7 +147,26 @@ def test_RANDOM_time(self):
self.assertEqual(expire, 0)
self.assertGreaterEqual(evict, 1000)
- def create_near_cache(self, service, im_format, ttl, max_idle, policy, max_size, eviction_sampling_count=None,
- eviction_sampling_pool_size=None):
- return NearCache("default", service, im_format, ttl, max_idle, True, policy, max_size, eviction_sampling_count,
- eviction_sampling_pool_size)
+ def create_near_cache(
+ self,
+ service,
+ im_format,
+ ttl,
+ max_idle,
+ policy,
+ max_size,
+ eviction_sampling_count=None,
+ eviction_sampling_pool_size=None,
+ ):
+ return NearCache(
+ "default",
+ service,
+ im_format,
+ ttl,
+ max_idle,
+ True,
+ policy,
+ max_size,
+ eviction_sampling_count,
+ eviction_sampling_pool_size,
+ )
diff --git a/tests/predicate_test.py b/tests/predicate_test.py
index 799ad9940d..e2ab3032d7 100644
--- a/tests/predicate_test.py
+++ b/tests/predicate_test.py
@@ -1,9 +1,27 @@
import os
from unittest import TestCase
-from hazelcast.predicate import equal, and_, between, less, \
- less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, \
- ilike, regex, sql, true, false, in_, instance_of, paging
+from hazelcast.predicate import (
+ equal,
+ and_,
+ between,
+ less,
+ less_or_equal,
+ greater,
+ greater_or_equal,
+ or_,
+ not_equal,
+ not_,
+ like,
+ ilike,
+ regex,
+ sql,
+ true,
+ false,
+ in_,
+ instance_of,
+ paging,
+)
from hazelcast.serialization.api import Portable, IdentifiedDataSerializable
from hazelcast.util import IterationType
from tests.base import SingleMemberTestCase, HazelcastTestCase
@@ -20,8 +38,11 @@ def test_sql(self):
def test_and(self):
predicate = and_(equal("this", "value-1"), equal("this", "value-2"))
- self.assertEqual(str(predicate), "AndPredicate(EqualPredicate(attribute='this', value=value-1),"
- " EqualPredicate(attribute='this', value=value-2))")
+ self.assertEqual(
+ str(predicate),
+ "AndPredicate(EqualPredicate(attribute='this', value=value-1),"
+ " EqualPredicate(attribute='this', value=value-2))",
+ )
def test_between(self):
predicate = between("this", 1, 20)
@@ -33,8 +54,10 @@ def test_equal_str(self):
def test_greater_less(self):
predicate = less_or_equal("this", 10)
- self.assertEqual(str(predicate),
- "GreaterLessPredicate(attribute='this', value=10, is_equal=True, is_less=True)")
+ self.assertEqual(
+ str(predicate),
+ "GreaterLessPredicate(attribute='this', value=10, is_equal=True, is_less=True)",
+ )
def test_like(self):
predicate = like("this", "a%")
@@ -58,12 +81,18 @@ def test_not_equal(self):
def test_not(self):
predicate = not_(equal("this", "value-1"))
- self.assertEqual(str(predicate), "NotPredicate(predicate=EqualPredicate(attribute='this', value=value-1))")
+ self.assertEqual(
+ str(predicate),
+ "NotPredicate(predicate=EqualPredicate(attribute='this', value=value-1))",
+ )
def test_or(self):
predicate = or_(equal("this", "value-1"), equal("this", "value-2"))
- self.assertEqual(str(predicate), "OrPredicate(EqualPredicate(attribute='this', value=value-1),"
- " EqualPredicate(attribute='this', value=value-2))")
+ self.assertEqual(
+ str(predicate),
+ "OrPredicate(EqualPredicate(attribute='this', value=value-1),"
+ " EqualPredicate(attribute='this', value=value-2))",
+ )
def test_regex(self):
predicate = regex("this", "c[ar].*")
@@ -79,7 +108,10 @@ def test_false(self):
def test_paging(self):
predicate = paging(true(), 5)
- self.assertEqual(str(predicate), "PagingPredicate(predicate=TruePredicate(), page_size=5, comparator=None)")
+ self.assertEqual(
+ str(predicate),
+ "PagingPredicate(predicate=TruePredicate(), page_size=5, comparator=None)",
+ )
class PredicateTest(SingleMemberTestCase):
@@ -234,11 +266,7 @@ class PredicatePortableTest(SingleMemberTestCase):
@classmethod
def configure_client(cls, config):
config["cluster_name"] = cls.cluster.id
- config["portable_factories"] = {
- FACTORY_ID: {
- InnerPortable.CLASS_ID: InnerPortable
- }
- }
+ config["portable_factories"] = {FACTORY_ID: {InnerPortable.CLASS_ID: InnerPortable}}
return config
def setUp(self):
@@ -248,7 +276,10 @@ def tearDown(self):
self.map.destroy()
def _fill_map(self, count=1000):
- m = {InnerPortable("key-%d" % x, x): InnerPortable("value-%d" % x, x) for x in range(0, count)}
+ m = {
+ InnerPortable("key-%d" % x, x): InnerPortable("value-%d" % x, x)
+ for x in range(0, count)
+ }
self.map.put_all(m)
return m
@@ -288,7 +319,10 @@ def read_portable(self, reader):
self.limb = reader.read_portable("limb")
def __eq__(self, other):
- return isinstance(other, self.__class__) and (self.name, self.limb) == (other.name, other.limb)
+ return isinstance(other, self.__class__) and (self.name, self.limb) == (
+ other.name,
+ other.limb,
+ )
class Limb(Portable):
def __init__(self, name=None):
@@ -325,8 +359,12 @@ def configure_client(cls, config):
def setUp(self):
self.map = self.client.get_map(random_string()).blocking()
- self.map.put(1, NestedPredicatePortableTest.Body("body1", NestedPredicatePortableTest.Limb("hand")))
- self.map.put(2, NestedPredicatePortableTest.Body("body2", NestedPredicatePortableTest.Limb("leg")))
+ self.map.put(
+ 1, NestedPredicatePortableTest.Body("body1", NestedPredicatePortableTest.Limb("hand"))
+ )
+ self.map.put(
+ 2, NestedPredicatePortableTest.Body("body2", NestedPredicatePortableTest.Limb("leg"))
+ )
def tearDown(self):
self.map.destroy()
@@ -376,7 +414,9 @@ def tearDownClass(cls):
@staticmethod
def configure_cluster():
current_directory = os.path.dirname(__file__)
- with open(get_abs_path(os.path.join(current_directory, "proxy"), "hazelcast.xml"), "r") as f:
+ with open(
+ get_abs_path(os.path.join(current_directory, "proxy"), "hazelcast.xml"), "r"
+ ) as f:
return f.read()
def test_with_inner_paging_predicate(self):
@@ -399,19 +439,19 @@ def test_previous_page_when_index_is_zero(self):
def test_entry_set_with_paging_predicate(self):
self.fill_map(3)
- entry_set = self.map.entry_set(paging(greater_or_equal('this', 2), 1))
+ entry_set = self.map.entry_set(paging(greater_or_equal("this", 2), 1))
self.assertEqual(len(entry_set), 1)
- self.assertEqual(entry_set[0], ('key-2', 2))
+ self.assertEqual(entry_set[0], ("key-2", 2))
def test_key_set_with_paging_predicate(self):
self.fill_map(3)
- key_set = self.map.key_set(paging(greater_or_equal('this', 2), 1))
+ key_set = self.map.key_set(paging(greater_or_equal("this", 2), 1))
self.assertEqual(len(key_set), 1)
- self.assertEqual(key_set[0], 'key-2')
+ self.assertEqual(key_set[0], "key-2")
def test_values_with_paging_predicate(self):
self.fill_map(3)
- values = self.map.values(paging(greater_or_equal('this', 2), 1))
+ values = self.map.values(paging(greater_or_equal("this", 2), 1))
self.assertEqual(len(values), 1)
self.assertEqual(values[0], 2)
@@ -422,40 +462,40 @@ def test_with_none_inner_predicate(self):
def test_first_page(self):
self.fill_map()
- predicate = paging(greater_or_equal('this', 40), 2)
+ predicate = paging(greater_or_equal("this", 40), 2)
self.assertEqual(self.map.values(predicate), [40, 41])
def test_next_page(self):
self.fill_map()
- predicate = paging(greater_or_equal('this', 40), 2)
+ predicate = paging(greater_or_equal("this", 40), 2)
predicate.next_page()
self.assertEqual(self.map.values(predicate), [42, 43])
def test_set_page(self):
self.fill_map()
- predicate = paging(greater_or_equal('this', 40), 2)
+ predicate = paging(greater_or_equal("this", 40), 2)
predicate.page = 4
self.assertEqual(self.map.values(predicate), [48, 49])
def test_get_page(self):
- predicate = paging(greater_or_equal('this', 40), 2)
+ predicate = paging(greater_or_equal("this", 40), 2)
predicate.page = 4
self.assertEqual(predicate.page, 4)
def test_page_size(self):
- predicate = paging(greater_or_equal('this', 40), 2)
+ predicate = paging(greater_or_equal("this", 40), 2)
self.assertEqual(predicate.page_size, 2)
def test_previous_page(self):
self.fill_map()
- predicate = paging(greater_or_equal('this', 40), 2)
+ predicate = paging(greater_or_equal("this", 40), 2)
predicate.page = 4
predicate.previous_page()
self.assertEqual(self.map.values(predicate), [46, 47])
def test_get_4th_then_previous_page(self):
self.fill_map()
- predicate = paging(greater_or_equal('this', 40), 2)
+ predicate = paging(greater_or_equal("this", 40), 2)
predicate.page = 4
self.map.values(predicate)
predicate.previous_page()
@@ -463,7 +503,7 @@ def test_get_4th_then_previous_page(self):
def test_get_3rd_then_next_page(self):
self.fill_map()
- predicate = paging(greater_or_equal('this', 40), 2)
+ predicate = paging(greater_or_equal("this", 40), 2)
predicate.page = 3
self.map.values(predicate)
predicate.next_page()
@@ -472,21 +512,21 @@ def test_get_3rd_then_next_page(self):
def test_set_nonexistent_page(self):
# Trying to get page 10, which is out of range, should return empty list.
self.fill_map()
- predicate = paging(greater_or_equal('this', 40), 2)
+ predicate = paging(greater_or_equal("this", 40), 2)
predicate.page = 10
self.assertEqual(self.map.values(predicate), [])
def test_nonexistent_previous_page(self):
# Trying to get previous page while already at first page should return first page.
self.fill_map()
- predicate = paging(greater_or_equal('this', 40), 2)
+ predicate = paging(greater_or_equal("this", 40), 2)
predicate.previous_page()
self.assertEqual(self.map.values(predicate), [40, 41])
def test_nonexistent_next_page(self):
# Trying to get next page while already at last page should return empty list.
self.fill_map()
- predicate = paging(greater_or_equal('this', 40), 2)
+ predicate = paging(greater_or_equal("this", 40), 2)
predicate.page = 4
predicate.next_page()
self.assertEqual(self.map.values(predicate), [])
@@ -494,13 +534,13 @@ def test_nonexistent_next_page(self):
def test_get_half_full_last_page(self):
# Page size set to 2, but last page only has 1 element.
self.fill_map()
- predicate = paging(greater_or_equal('this', 41), 2)
+ predicate = paging(greater_or_equal("this", 41), 2)
predicate.page = 4
self.assertEqual(self.map.values(predicate), [49])
def test_reset(self):
self.fill_map()
- predicate = paging(greater_or_equal('this', 40), 2)
+ predicate = paging(greater_or_equal("this", 40), 2)
self.assertEqual(self.map.values(predicate), [40, 41])
predicate.next_page()
self.assertEqual(self.map.values(predicate), [42, 43])
@@ -509,7 +549,7 @@ def test_reset(self):
def test_empty_map(self):
# Empty map should return empty list.
- predicate = paging(greater_or_equal('this', 30), 2)
+ predicate = paging(greater_or_equal("this", 30), 2)
self.assertEqual(self.map.values(predicate), [])
def test_equal_values_paging(self):
@@ -518,7 +558,7 @@ def test_equal_values_paging(self):
m = {"key-%d" % i: i - 50 for i in range(50, 100)}
self.map.put_all(m)
- predicate = paging(less_or_equal('this', 8), 5)
+ predicate = paging(less_or_equal("this", 8), 5)
self.assertEqual(self.map.values(predicate), [0, 0, 1, 1, 2])
predicate.next_page()
@@ -535,11 +575,10 @@ def test_entry_set_with_custom_comparator(self):
def entries(start, end):
return list(
sorted(
- map(lambda k: (k, m[k]),
- filter(lambda k: start <= m[k] < end, m)
- ),
+ map(lambda k: (k, m[k]), filter(lambda k: start <= m[k] < end, m)),
key=lambda e: e[1],
- reverse=True)
+ reverse=True,
+ )
)
self.assertEqual(entries(5, 10), self.map.entry_set(predicate))
diff --git a/tests/proxy/cp/atomic_long_test.py b/tests/proxy/cp/atomic_long_test.py
index 3811a7548d..0c42d21011 100644
--- a/tests/proxy/cp/atomic_long_test.py
+++ b/tests/proxy/cp/atomic_long_test.py
@@ -82,10 +82,10 @@ def test_get(self):
self.assertEqual(0, self.atomic_long.get())
self.atomic_long.set(11)
self.assertEqual(11, self.atomic_long.get())
- long_max = 2**63 - 1
+ long_max = 2 ** 63 - 1
self.atomic_long.set(long_max)
self.assertEqual(long_max, self.atomic_long.get())
- long_min = -2**63
+ long_min = -(2 ** 63)
self.atomic_long.set(long_min)
self.assertEqual(long_min, self.atomic_long.get())
diff --git a/tests/proxy/cp/count_down_latch_test.py b/tests/proxy/cp/count_down_latch_test.py
index b05da4a549..adb2770acf 100644
--- a/tests/proxy/cp/count_down_latch_test.py
+++ b/tests/proxy/cp/count_down_latch_test.py
@@ -17,7 +17,9 @@
class CountDownLatchTest(CPTestCase):
def test_latch_in_another_group(self):
latch = self._get_latch()
- another_latch = self.client.cp_subsystem.get_count_down_latch(latch._proxy_name + "@another").blocking()
+ another_latch = self.client.cp_subsystem.get_count_down_latch(
+ latch._proxy_name + "@another"
+ ).blocking()
another_latch.try_set_count(42)
self.assertEqual(42, another_latch.get_count())
@@ -116,7 +118,8 @@ def mock(expected_round, invocation_uuid):
latch._wrapped._request_count_down = mock
latch.count_down()
- self.assertEqual(3, called_count.get()) # Will resolve on it's third call. First 2 throws timeout error
+ # Will resolve on it's third call. First 2 throws timeout error
+ self.assertEqual(3, called_count.get())
self.assertEqual(0, latch.get_count())
def test_get_count(self):
diff --git a/tests/proxy/cp/fenced_lock_test.py b/tests/proxy/cp/fenced_lock_test.py
index 448643e8c7..ea2c9c5e33 100644
--- a/tests/proxy/cp/fenced_lock_test.py
+++ b/tests/proxy/cp/fenced_lock_test.py
@@ -5,9 +5,15 @@
from hazelcast import HazelcastClient
from hazelcast.cp import LOCK_SERVICE
-from hazelcast.errors import DistributedObjectDestroyedError, IllegalMonitorStateError, \
- LockOwnershipLostError, LockAcquireLimitReachedError, SessionExpiredError, WaitKeyCancelledError, \
- HazelcastRuntimeError
+from hazelcast.errors import (
+ DistributedObjectDestroyedError,
+ IllegalMonitorStateError,
+ LockOwnershipLostError,
+ LockAcquireLimitReachedError,
+ SessionExpiredError,
+ WaitKeyCancelledError,
+ HazelcastRuntimeError,
+)
from hazelcast.future import ImmediateFuture, ImmediateExceptionFuture
from hazelcast.protocol import RaftGroupId
from hazelcast.proxy.cp.fenced_lock import FencedLock
@@ -25,7 +31,9 @@ def tearDown(self):
self.lock.destroy()
def test_lock_in_another_group(self):
- another_lock = self.client.cp_subsystem.get_lock(self.lock._proxy_name + "@another").blocking()
+ another_lock = self.client.cp_subsystem.get_lock(
+ self.lock._proxy_name + "@another"
+ ).blocking()
self.assert_valid_fence(another_lock.lock())
try:
self.assertTrue(another_lock.is_locked())
@@ -249,11 +257,16 @@ def setUp(self):
self.acquire_session = MagicMock()
self.release_session = MagicMock()
self.invalidate_session = MagicMock()
- self.session_manager = MagicMock(acquire_session=self.acquire_session, release_session=self.release_session,
- invalidate_session=self.invalidate_session)
+ self.session_manager = MagicMock(
+ acquire_session=self.acquire_session,
+ release_session=self.release_session,
+ invalidate_session=self.invalidate_session,
+ )
context = MagicMock(proxy_session_manager=self.session_manager)
group_id = RaftGroupId("test", 0, 42)
- self.proxy = FencedLock(context, group_id, LOCK_SERVICE, "mylock@mygroup", "mylock").blocking()
+ self.proxy = FencedLock(
+ context, group_id, LOCK_SERVICE, "mylock@mygroup", "mylock"
+ ).blocking()
def test_lock(self):
# Everything succeeds
@@ -606,7 +619,9 @@ def test_is_locked_by_current_thread_when_server_closes_old_session(self):
self.assert_call_counts(0, 0, 0)
self.assert_no_lock_session_id()
- def test_is_locked_by_current_thread_when_server_returns_a_different_thread_id_for_lock_holder(self):
+ def test_is_locked_by_current_thread_when_server_returns_a_different_thread_id_for_lock_holder(
+ self,
+ ):
# Client thinks that it holds the lock, but server
# says it's not.
self.prepare_get_session(1)
diff --git a/tests/proxy/cp/semaphore_test.py b/tests/proxy/cp/semaphore_test.py
index 9e4a04510a..b270c05932 100644
--- a/tests/proxy/cp/semaphore_test.py
+++ b/tests/proxy/cp/semaphore_test.py
@@ -7,8 +7,13 @@
from hazelcast import HazelcastClient
from hazelcast.cp import SEMAPHORE_SERVICE
-from hazelcast.errors import DistributedObjectDestroyedError, IllegalStateError, HazelcastRuntimeError, \
- SessionExpiredError, WaitKeyCancelledError
+from hazelcast.errors import (
+ DistributedObjectDestroyedError,
+ IllegalStateError,
+ HazelcastRuntimeError,
+ SessionExpiredError,
+ WaitKeyCancelledError,
+)
from hazelcast.future import ImmediateExceptionFuture, ImmediateFuture
from hazelcast.protocol import RaftGroupId
from hazelcast.proxy.cp.semaphore import SessionlessSemaphore, SessionAwareSemaphore
@@ -33,7 +38,9 @@ def tearDown(self):
@parameterized.expand(SEMAPHORE_TYPES)
def test_semaphore_in_another_group(self, semaphore_type):
semaphore = self.get_semaphore(semaphore_type, 1)
- another_semaphore = self.client.cp_subsystem.get_semaphore(semaphore._proxy_name + "@another").blocking()
+ another_semaphore = self.client.cp_subsystem.get_semaphore(
+ semaphore._proxy_name + "@another"
+ ).blocking()
self.assertEqual(1, semaphore.available_permits())
self.assertEqual(0, another_semaphore.available_permits())
@@ -59,7 +66,9 @@ def test_use_after_destroy(self, semaphore_type):
def test_session_aware_semaphore_after_client_shutdown(self):
semaphore = self.get_semaphore("sessionaware", 1)
another_client = HazelcastClient(cluster_name=self.cluster.id)
- another_semaphore = another_client.cp_subsystem.get_semaphore(semaphore._proxy_name).blocking()
+ another_semaphore = another_client.cp_subsystem.get_semaphore(
+ semaphore._proxy_name
+ ).blocking()
another_semaphore.acquire(1)
self.assertEqual(0, another_semaphore.available_permits())
self.assertEqual(0, semaphore.available_permits())
@@ -229,7 +238,9 @@ def test_release(self, semaphore_type):
def test_release_when_acquired_by_another_client_sessionless(self):
semaphore = self.get_semaphore("sessionless")
another_client = HazelcastClient(cluster_name=self.cluster.id)
- another_semaphore = another_client.cp_subsystem.get_semaphore(semaphore._proxy_name).blocking()
+ another_semaphore = another_client.cp_subsystem.get_semaphore(
+ semaphore._proxy_name
+ ).blocking()
self.assertTrue(another_semaphore.init(1))
another_semaphore.acquire()
@@ -279,7 +290,9 @@ def test_try_acquire_when_not_enough_permits_with_timeout(self, semaphore_type):
self.assertEqual(1, semaphore.available_permits())
def get_semaphore(self, semaphore_type, initialize_with=None):
- semaphore = self.client.cp_subsystem.get_semaphore(semaphore_type + random_string()).blocking()
+ semaphore = self.client.cp_subsystem.get_semaphore(
+ semaphore_type + random_string()
+ ).blocking()
if initialize_with is not None:
semaphore.init(initialize_with)
self.semaphore = semaphore
@@ -359,9 +372,13 @@ def get_semaphore(self, semaphore_type):
proxy_name = "semaphore@mygroup"
object_name = "semaphore"
if semaphore_type == "sessionless":
- return SessionlessSemaphore(context, self.group_id, SEMAPHORE_SERVICE, proxy_name, object_name)
+ return SessionlessSemaphore(
+ context, self.group_id, SEMAPHORE_SERVICE, proxy_name, object_name
+ )
elif semaphore_type == "sessionaware":
- return SessionAwareSemaphore(context, self.group_id, SEMAPHORE_SERVICE, proxy_name, object_name)
+ return SessionAwareSemaphore(
+ context, self.group_id, SEMAPHORE_SERVICE, proxy_name, object_name
+ )
else:
self.fail("Unknown semaphore type")
@@ -374,12 +391,16 @@ def setUp(self):
self.acquire_session = MagicMock()
self.release_session = MagicMock()
self.invalidate_session = MagicMock()
- self.session_manager = MagicMock(acquire_session=self.acquire_session, release_session=self.release_session,
- invalidate_session=self.invalidate_session)
+ self.session_manager = MagicMock(
+ acquire_session=self.acquire_session,
+ release_session=self.release_session,
+ invalidate_session=self.invalidate_session,
+ )
context = MagicMock(proxy_session_manager=self.session_manager)
self.group_id = RaftGroupId("test", 0, 42)
- self.semaphore = SessionAwareSemaphore(context, self.group_id, SEMAPHORE_SERVICE, "semaphore@mygroup",
- "semaphore").blocking()
+ self.semaphore = SessionAwareSemaphore(
+ context, self.group_id, SEMAPHORE_SERVICE, "semaphore@mygroup", "semaphore"
+ ).blocking()
def test_acquire(self):
# Everything works
@@ -705,8 +726,13 @@ class SessionlessSemaphoreProxy(unittest.TestCase):
def setUp(self):
self.session_manager = MagicMock()
self.context = MagicMock(proxy_session_manager=self.session_manager)
- self.semaphore = SessionlessSemaphore(self.context, RaftGroupId("name", 0, 42), SEMAPHORE_SERVICE,
- "semaphore@mygroup", "semaphore").blocking()
+ self.semaphore = SessionlessSemaphore(
+ self.context,
+ RaftGroupId("name", 0, 42),
+ SEMAPHORE_SERVICE,
+ "semaphore@mygroup",
+ "semaphore",
+ ).blocking()
def test_acquire(self):
# Everything works
diff --git a/tests/proxy/distributed_objects_test.py b/tests/proxy/distributed_objects_test.py
index 15fec68dbb..e7f8bd3a62 100644
--- a/tests/proxy/distributed_objects_test.py
+++ b/tests/proxy/distributed_objects_test.py
@@ -15,9 +15,7 @@ class DistributedObjectsTest(SingleMemberTestCase):
def setUpClass(cls):
cls.rc = cls.create_rc()
cls.cluster = cls.create_cluster(cls.rc, cls.configure_cluster())
- cls.config = {
- "cluster_name": cls.cluster.id
- }
+ cls.config = {"cluster_name": cls.cluster.id}
@classmethod
def tearDownClass(cls):
@@ -61,7 +59,9 @@ def test_add_distributed_object_listener_object_created(self):
def assert_event():
self.assertEqual(1, len(collector.events))
event = collector.events[0]
- self.assertDistributedObjectEvent(event, "test-map", MAP_SERVICE, DistributedObjectEventType.CREATED)
+ self.assertDistributedObjectEvent(
+ event, "test-map", MAP_SERVICE, DistributedObjectEventType.CREATED
+ )
self.assertTrueEventually(assert_event)
@@ -76,7 +76,9 @@ def test_add_distributed_object_listener_object_destroyed(self):
def assert_event():
self.assertEqual(1, len(collector.events))
event = collector.events[0]
- self.assertDistributedObjectEvent(event, "test-map", MAP_SERVICE, DistributedObjectEventType.DESTROYED)
+ self.assertDistributedObjectEvent(
+ event, "test-map", MAP_SERVICE, DistributedObjectEventType.DESTROYED
+ )
self.assertTrueEventually(assert_event)
@@ -91,10 +93,12 @@ def assert_event():
self.assertEqual(2, len(collector.events))
created_event = collector.events[0]
destroyed_event = collector.events[1]
- self.assertDistributedObjectEvent(created_event, "test-map", MAP_SERVICE,
- DistributedObjectEventType.CREATED)
- self.assertDistributedObjectEvent(destroyed_event, "test-map", MAP_SERVICE,
- DistributedObjectEventType.DESTROYED)
+ self.assertDistributedObjectEvent(
+ created_event, "test-map", MAP_SERVICE, DistributedObjectEventType.CREATED
+ )
+ self.assertDistributedObjectEvent(
+ destroyed_event, "test-map", MAP_SERVICE, DistributedObjectEventType.DESTROYED
+ )
self.assertTrueEventually(assert_event)
@@ -106,7 +110,9 @@ def test_remove_distributed_object_listener(self):
def assert_event():
self.assertEqual(1, len(collector.events))
event = collector.events[0]
- self.assertDistributedObjectEvent(event, "test-map", MAP_SERVICE, DistributedObjectEventType.CREATED)
+ self.assertDistributedObjectEvent(
+ event, "test-map", MAP_SERVICE, DistributedObjectEventType.CREATED
+ )
self.assertTrueEventually(assert_event)
diff --git a/tests/proxy/executor_test.py b/tests/proxy/executor_test.py
index a4bb0f2400..d8bcdd9c4e 100644
--- a/tests/proxy/executor_test.py
+++ b/tests/proxy/executor_test.py
@@ -7,6 +7,7 @@
class _AppendTask(IdentifiedDataSerializable):
"""Client side version of com.hazelcast.client.test.executor.tasks.AppendCallable"""
+
def __init__(self, message):
self.message = message
@@ -56,7 +57,7 @@ def test_execute_on_member(self):
member = self.client.cluster_service.get_members()[0]
result = self.executor.execute_on_member(member, self.task)
self.assertEqual(self.message + _APPENDAGE, result)
-
+
def test_execute_on_members(self):
members = self.client.cluster_service.get_members()
result = self.executor.execute_on_members(members, self.task)
diff --git a/tests/proxy/flake_id_generator_test.py b/tests/proxy/flake_id_generator_test.py
index 91bc2dcf43..b5cde1df97 100644
--- a/tests/proxy/flake_id_generator_test.py
+++ b/tests/proxy/flake_id_generator_test.py
@@ -234,9 +234,11 @@ def test_new_id_fails_when_all_members_are_out_of_node_id_range(self):
client.shutdown()
def _assign_out_of_range_node_id(self, cluster_id, instance_id):
- script = "def assign_out_of_range_node_id():\n" \
- "\tinstance_{}.getCluster().getLocalMember().setMemberListJoinVersion(100000)\n" \
- "\treturn instance_{}.getCluster().getLocalMember().getMemberListJoinVersion()\n" \
+ script = (
+ "def assign_out_of_range_node_id():\n"
+ "\tinstance_{}.getCluster().getLocalMember().setMemberListJoinVersion(100000)\n"
+ "\treturn instance_{}.getCluster().getLocalMember().getMemberListJoinVersion()\n"
"result = str(assign_out_of_range_node_id())\n".format(instance_id, instance_id)
+ )
return self.rc.executeOnController(cluster_id, script, Lang.PYTHON)
diff --git a/tests/proxy/list_test.py b/tests/proxy/list_test.py
index b56e03de5b..d57983df01 100644
--- a/tests/proxy/list_test.py
+++ b/tests/proxy/list_test.py
@@ -16,7 +16,7 @@ def configure_client(cls, config):
def test_add_entry_listener_item_added(self):
collector = event_collector()
self.list.add_listener(include_value=False, item_added_func=collector)
- self.list.add('item-value')
+ self.list.add("item-value")
def assert_event():
self.assertEqual(len(collector.events), 1)
@@ -29,12 +29,12 @@ def assert_event():
def test_add_entry_listener_item_added_include_value(self):
collector = event_collector()
self.list.add_listener(include_value=True, item_added_func=collector)
- self.list.add('item-value')
+ self.list.add("item-value")
def assert_event():
self.assertEqual(len(collector.events), 1)
event = collector.events[0]
- self.assertEqual(event.item, 'item-value')
+ self.assertEqual(event.item, "item-value")
self.assertEqual(event.event_type, ItemEventType.ADDED)
self.assertTrueEventually(assert_event, 5)
@@ -42,8 +42,8 @@ def assert_event():
def test_add_entry_listener_item_removed(self):
collector = event_collector()
self.list.add_listener(include_value=False, item_removed_func=collector)
- self.list.add('item-value')
- self.list.remove('item-value')
+ self.list.add("item-value")
+ self.list.remove("item-value")
def assert_event():
self.assertEqual(len(collector.events), 1)
@@ -56,13 +56,13 @@ def assert_event():
def test_add_entry_listener_item_removed_include_value(self):
collector = event_collector()
self.list.add_listener(include_value=True, item_removed_func=collector)
- self.list.add('item-value')
- self.list.remove('item-value')
+ self.list.add("item-value")
+ self.list.remove("item-value")
def assert_event():
self.assertEqual(len(collector.events), 1)
event = collector.events[0]
- self.assertEqual(event.item, 'item-value')
+ self.assertEqual(event.item, "item-value")
self.assertEqual(event.event_type, ItemEventType.REMOVED)
self.assertTrueEventually(assert_event, 5)
@@ -71,7 +71,7 @@ def test_remove_entry_listener_item_added(self):
collector = event_collector()
reg_id = self.list.add_listener(include_value=False, item_added_func=collector)
self.list.remove_listener(reg_id)
- self.list.add('item-value')
+ self.list.add("item-value")
def assert_event():
self.assertEqual(len(collector.events), 0)
diff --git a/tests/proxy/map_nearcache_test.py b/tests/proxy/map_nearcache_test.py
index b230d67127..3f619b5ed5 100644
--- a/tests/proxy/map_nearcache_test.py
+++ b/tests/proxy/map_nearcache_test.py
@@ -19,9 +19,7 @@ def configure_cluster(cls):
@classmethod
def configure_client(cls, config):
config["cluster_name"] = cls.cluster.id
- config["near_caches"] = {
- random_string(): {}
- }
+ config["near_caches"] = {random_string(): {}}
return config
def setUp(self):
@@ -71,9 +69,16 @@ def assertion():
def test_invalidate_nonexist_key(self):
self._fill_map_and_near_cache(10)
initial_cache_size = len(self.map._near_cache)
- script = """map = instance_0.getMap("{}");map.put("key-99","x");map.put("key-NonExist","x");map.remove("key-NonExist")"""\
- .format(self.map.name)
- response = self.rc.executeOnController(self.cluster.id, script, Lang.PYTHON)
+ script = (
+ """
+ var map = instance_0.getMap("%s");
+ map.put("key-99","x");
+ map.put("key-NonExist","x");
+ map.remove("key-NonExist");"""
+ % self.map.name
+ )
+
+ response = self.rc.executeOnController(self.cluster.id, script, Lang.JAVASCRIPT)
self.assertTrue(response.success)
self.assertEqual(initial_cache_size, 10)
diff --git a/tests/proxy/map_test.py b/tests/proxy/map_test.py
index 4ff05ea49b..1dd20c7a80 100644
--- a/tests/proxy/map_test.py
+++ b/tests/proxy/map_test.py
@@ -44,9 +44,7 @@ def configure_cluster(cls):
def configure_client(cls, config):
config["cluster_name"] = cls.cluster.id
config["data_serializable_factories"] = {
- EntryProcessor.FACTORY_ID: {
- EntryProcessor.CLASS_ID: EntryProcessor
- }
+ EntryProcessor.FACTORY_ID: {EntryProcessor.CLASS_ID: EntryProcessor}
}
return config
@@ -59,102 +57,124 @@ def tearDown(self):
def test_add_entry_listener_item_added(self):
collector = event_collector()
self.map.add_entry_listener(include_value=True, added_func=collector)
- self.map.put('key', 'value')
+ self.map.put("key", "value")
def assert_event():
self.assertEqual(len(collector.events), 1)
event = collector.events[0]
- self.assertEntryEvent(event, key='key', event_type=EntryEventType.ADDED, value='value')
+ self.assertEntryEvent(event, key="key", event_type=EntryEventType.ADDED, value="value")
self.assertTrueEventually(assert_event, 5)
def test_add_entry_listener_item_removed(self):
collector = event_collector()
self.map.add_entry_listener(include_value=True, removed_func=collector)
- self.map.put('key', 'value')
- self.map.remove('key')
+ self.map.put("key", "value")
+ self.map.remove("key")
def assert_event():
self.assertEqual(len(collector.events), 1)
event = collector.events[0]
- self.assertEntryEvent(event, key='key', event_type=EntryEventType.REMOVED, old_value='value')
+ self.assertEntryEvent(
+ event, key="key", event_type=EntryEventType.REMOVED, old_value="value"
+ )
self.assertTrueEventually(assert_event, 5)
def test_add_entry_listener_item_updated(self):
collector = event_collector()
self.map.add_entry_listener(include_value=True, updated_func=collector)
- self.map.put('key', 'value')
- self.map.put('key', 'new_value')
+ self.map.put("key", "value")
+ self.map.put("key", "new_value")
def assert_event():
self.assertEqual(len(collector.events), 1)
event = collector.events[0]
- self.assertEntryEvent(event, key='key', event_type=EntryEventType.UPDATED, old_value='value',
- value='new_value')
+ self.assertEntryEvent(
+ event,
+ key="key",
+ event_type=EntryEventType.UPDATED,
+ old_value="value",
+ value="new_value",
+ )
self.assertTrueEventually(assert_event, 5)
def test_add_entry_listener_item_expired(self):
collector = event_collector()
self.map.add_entry_listener(include_value=True, expired_func=collector)
- self.map.put('key', 'value', ttl=0.1)
+ self.map.put("key", "value", ttl=0.1)
def assert_event():
self.assertEqual(len(collector.events), 1)
event = collector.events[0]
- self.assertEntryEvent(event, key='key', event_type=EntryEventType.EXPIRED, old_value='value')
+ self.assertEntryEvent(
+ event, key="key", event_type=EntryEventType.EXPIRED, old_value="value"
+ )
self.assertTrueEventually(assert_event, 10)
def test_add_entry_listener_with_key(self):
collector = event_collector()
- self.map.add_entry_listener(key='key1', include_value=True, added_func=collector)
- self.map.put('key2', 'value2')
- self.map.put('key1', 'value1')
+ self.map.add_entry_listener(key="key1", include_value=True, added_func=collector)
+ self.map.put("key2", "value2")
+ self.map.put("key1", "value1")
def assert_event():
self.assertEqual(len(collector.events), 1)
event = collector.events[0]
- self.assertEntryEvent(event, key='key1', event_type=EntryEventType.ADDED, value='value1')
+ self.assertEntryEvent(
+ event, key="key1", event_type=EntryEventType.ADDED, value="value1"
+ )
self.assertTrueEventually(assert_event, 5)
def test_add_entry_listener_with_predicate(self):
collector = event_collector()
- self.map.add_entry_listener(predicate=sql("this == value1"), include_value=True, added_func=collector)
- self.map.put('key2', 'value2')
- self.map.put('key1', 'value1')
+ self.map.add_entry_listener(
+ predicate=sql("this == value1"), include_value=True, added_func=collector
+ )
+ self.map.put("key2", "value2")
+ self.map.put("key1", "value1")
def assert_event():
self.assertEqual(len(collector.events), 1)
event = collector.events[0]
- self.assertEntryEvent(event, key='key1', event_type=EntryEventType.ADDED, value='value1')
+ self.assertEntryEvent(
+ event, key="key1", event_type=EntryEventType.ADDED, value="value1"
+ )
self.assertTrueEventually(assert_event, 5)
def test_add_entry_listener_with_key_and_predicate(self):
collector = event_collector()
- self.map.add_entry_listener(key='key1', predicate=sql("this == value3"),
- include_value=True, added_func=collector)
- self.map.put('key2', 'value2')
- self.map.put('key1', 'value1')
- self.map.remove('key1')
- self.map.put('key1', 'value3')
+ self.map.add_entry_listener(
+ key="key1", predicate=sql("this == value3"), include_value=True, added_func=collector
+ )
+ self.map.put("key2", "value2")
+ self.map.put("key1", "value1")
+ self.map.remove("key1")
+ self.map.put("key1", "value3")
def assert_event():
self.assertEqual(len(collector.events), 1)
event = collector.events[0]
- self.assertEntryEvent(event, key='key1', event_type=EntryEventType.ADDED, value='value3')
+ self.assertEntryEvent(
+ event, key="key1", event_type=EntryEventType.ADDED, value="value3"
+ )
self.assertTrueEventually(assert_event, 5)
def test_add_index(self):
self.map.add_index(attributes=["this"])
self.map.add_index(attributes=["this"], index_type=IndexType.HASH)
- self.map.add_index(attributes=["this"], index_type=IndexType.BITMAP, bitmap_index_options={
- "unique_key": "this",
- })
+ self.map.add_index(
+ attributes=["this"],
+ index_type=IndexType.BITMAP,
+ bitmap_index_options={
+ "unique_key": "this",
+ },
+ )
def test_add_index_duplicate_fields(self):
with self.assertRaises(ValueError):
@@ -369,7 +389,7 @@ def test_put_get(self):
self.assertEqual(self.map.get("key"), "value")
def test_put_get2(self):
- val = "x"*5000
+ val = "x" * 5000
self.assertIsNone(self.map.put("key-x", val))
self.assertEqual(self.map.get("key-x"), val)
@@ -408,10 +428,10 @@ def test_remove_entry_listener(self):
collector = event_collector()
reg_id = self.map.add_entry_listener(added_func=collector)
- self.map.put('key', 'value')
+ self.map.put("key", "value")
self.assertTrueEventually(lambda: self.assertEqual(len(collector.events), 1))
self.map.remove_entry_listener(reg_id)
- self.map.put('key2', 'value')
+ self.map.put("key2", "value")
time.sleep(1)
self.assertEqual(len(collector.events), 1)
@@ -546,32 +566,32 @@ def test_load_all_with_no_args_loads_all_keys(self):
def test_load_all_with_key_set_loads_given_keys(self):
self.map.evict_all()
- self.map.load_all(['key0', 'key1'])
- entry_set = self.map.get_all(['key0', 'key1'])
- six.assertCountEqual(self, entry_set, {'key0': 'val0', 'key1': 'val1'})
+ self.map.load_all(["key0", "key1"])
+ entry_set = self.map.get_all(["key0", "key1"])
+ six.assertCountEqual(self, entry_set, {"key0": "val0", "key1": "val1"})
def test_load_all_overrides_entries_in_memory_by_default(self):
self.map.evict_all()
- self.map.put_transient('key0', 'new0')
- self.map.put_transient('key1', 'new1')
- self.map.load_all(['key0', 'key1'])
- entry_set = self.map.get_all(['key0', 'key1'])
- six.assertCountEqual(self, entry_set, {'key0': 'val0', 'key1': 'val1'})
+ self.map.put_transient("key0", "new0")
+ self.map.put_transient("key1", "new1")
+ self.map.load_all(["key0", "key1"])
+ entry_set = self.map.get_all(["key0", "key1"])
+ six.assertCountEqual(self, entry_set, {"key0": "val0", "key1": "val1"})
def test_load_all_with_replace_existing_false_does_not_override(self):
self.map.evict_all()
- self.map.put_transient('key0', 'new0')
- self.map.put_transient('key1', 'new1')
- self.map.load_all(['key0', 'key1'], replace_existing_values=False)
- entry_set = self.map.get_all(['key0', 'key1'])
- six.assertCountEqual(self, entry_set, {'key0': 'new0', 'key1': 'new1'})
+ self.map.put_transient("key0", "new0")
+ self.map.put_transient("key1", "new1")
+ self.map.load_all(["key0", "key1"], replace_existing_values=False)
+ entry_set = self.map.get_all(["key0", "key1"])
+ six.assertCountEqual(self, entry_set, {"key0": "new0", "key1": "new1"})
def test_evict(self):
- self.map.evict('key0')
+ self.map.evict("key0")
self.assertEqual(self.map.size(), 9)
def test_evict_non_existing_key(self):
- self.map.evict('non_existing_key')
+ self.map.evict("non_existing_key")
self.assertEqual(self.map.size(), 10)
def test_evict_all(self):
@@ -581,14 +601,14 @@ def test_evict_all(self):
def test_add_entry_listener_item_loaded(self):
collector = event_collector()
self.map.add_entry_listener(include_value=True, loaded_func=collector)
- self.map.put('key', 'value', ttl=0.1)
+ self.map.put("key", "value", ttl=0.1)
time.sleep(2)
- self.map.get('key')
+ self.map.get("key")
def assert_event():
self.assertEqual(len(collector.events), 1)
event = collector.events[0]
- self.assertEntryEvent(event, key='key', value='value', event_type=EntryEventType.LOADED)
+ self.assertEntryEvent(event, key="key", value="value", event_type=EntryEventType.LOADED)
self.assertTrueEventually(assert_event, 10)
diff --git a/tests/proxy/multi_map_test.py b/tests/proxy/multi_map_test.py
index da3475468f..3e50298e38 100644
--- a/tests/proxy/multi_map_test.py
+++ b/tests/proxy/multi_map_test.py
@@ -25,51 +25,57 @@ def tearDown(self):
def test_add_entry_listener_item_added(self):
collector = event_collector()
self.multi_map.add_entry_listener(include_value=True, added_func=collector)
- self.multi_map.put('key', 'value')
+ self.multi_map.put("key", "value")
def assert_event():
self.assertEqual(len(collector.events), 1)
event = collector.events[0]
- self.assertEntryEvent(event, key='key', event_type=EntryEventType.ADDED, value='value')
+ self.assertEntryEvent(event, key="key", event_type=EntryEventType.ADDED, value="value")
self.assertTrueEventually(assert_event, 5)
def test_add_entry_listener_item_removed(self):
collector = event_collector()
self.multi_map.add_entry_listener(include_value=True, removed_func=collector)
- self.multi_map.put('key', 'value')
- self.multi_map.remove('key', 'value')
+ self.multi_map.put("key", "value")
+ self.multi_map.remove("key", "value")
def assert_event():
self.assertEqual(len(collector.events), 1)
event = collector.events[0]
- self.assertEntryEvent(event, key='key', event_type=EntryEventType.REMOVED, old_value='value')
+ self.assertEntryEvent(
+ event, key="key", event_type=EntryEventType.REMOVED, old_value="value"
+ )
self.assertTrueEventually(assert_event, 5)
def test_add_entry_listener_clear_all(self):
collector = event_collector()
self.multi_map.add_entry_listener(include_value=True, clear_all_func=collector)
- self.multi_map.put('key', 'value')
+ self.multi_map.put("key", "value")
self.multi_map.clear()
def assert_event():
self.assertEqual(len(collector.events), 1)
event = collector.events[0]
- self.assertEntryEvent(event, event_type=EntryEventType.CLEAR_ALL, number_of_affected_entries=1)
+ self.assertEntryEvent(
+ event, event_type=EntryEventType.CLEAR_ALL, number_of_affected_entries=1
+ )
self.assertTrueEventually(assert_event, 5)
def test_add_entry_listener_with_key(self):
collector = event_collector()
- id = self.multi_map.add_entry_listener(key='key1', include_value=True, added_func=collector)
- self.multi_map.put('key2', 'value2')
- self.multi_map.put('key1', 'value1')
+ id = self.multi_map.add_entry_listener(key="key1", include_value=True, added_func=collector)
+ self.multi_map.put("key2", "value2")
+ self.multi_map.put("key1", "value1")
def assert_event():
self.assertEqual(len(collector.events), 1)
event = collector.events[0]
- self.assertEntryEvent(event, key='key1', event_type=EntryEventType.ADDED, value='value1')
+ self.assertEntryEvent(
+ event, key="key1", event_type=EntryEventType.ADDED, value="value1"
+ )
self.assertTrueEventually(assert_event, 5)
@@ -169,10 +175,10 @@ def test_remove_entry_listener(self):
collector = event_collector()
id = self.multi_map.add_entry_listener(added_func=collector)
- self.multi_map.put('key', 'value')
+ self.multi_map.put("key", "value")
self.assertTrueEventually(lambda: self.assertEqual(len(collector.events), 1))
self.multi_map.remove_entry_listener(id)
- self.multi_map.put('key2', 'value')
+ self.multi_map.put("key2", "value")
time.sleep(1)
self.assertEqual(len(collector.events), 1)
@@ -214,9 +220,12 @@ def test_str(self):
self.assertTrue(str(self.multi_map).startswith("MultiMap"))
def _fill_map(self, key_count=5, value_count=5):
- map = {"key-%d" % x: ["value-%d-%d" % (x, y) for y in range(0, value_count)] for x in range(0, key_count)}
+ map = {
+ "key-%d" % x: ["value-%d-%d" % (x, y) for y in range(0, value_count)]
+ for x in range(0, key_count)
+ }
for k, l in six.iteritems(map):
for v in l:
self.multi_map.put(k, v)
- return map
\ No newline at end of file
+ return map
diff --git a/tests/proxy/pn_counter_test.py b/tests/proxy/pn_counter_test.py
index 21bc772a20..562fbabda1 100644
--- a/tests/proxy/pn_counter_test.py
+++ b/tests/proxy/pn_counter_test.py
@@ -97,7 +97,9 @@ def test_counter_can_continue_session_by_calling_reset(self):
def _configure_cluster(self):
current_directory = os.path.dirname(__file__)
- with open(get_abs_path(current_directory, "hazelcast_crdtreplication_delayed.xml"), "r") as f:
+ with open(
+ get_abs_path(current_directory, "hazelcast_crdtreplication_delayed.xml"), "r"
+ ) as f:
return f.read()
diff --git a/tests/proxy/queue_test.py b/tests/proxy/queue_test.py
index e33bb4e63a..f381efb28f 100644
--- a/tests/proxy/queue_test.py
+++ b/tests/proxy/queue_test.py
@@ -26,7 +26,7 @@ def setUp(self):
def test_add_entry_listener_item_added(self):
collector = event_collector()
self.queue.add_listener(include_value=False, item_added_func=collector)
- self.queue.add('item-value')
+ self.queue.add("item-value")
def assert_event():
self.assertEqual(len(collector.events), 1)
@@ -39,12 +39,12 @@ def assert_event():
def test_add_entry_listener_item_added_include_value(self):
collector = event_collector()
self.queue.add_listener(include_value=True, item_added_func=collector)
- self.queue.add('item-value')
+ self.queue.add("item-value")
def assert_event():
self.assertEqual(len(collector.events), 1)
event = collector.events[0]
- self.assertEqual(event.item, 'item-value')
+ self.assertEqual(event.item, "item-value")
self.assertEqual(event.event_type, ItemEventType.ADDED)
self.assertTrueEventually(assert_event, 5)
@@ -52,8 +52,8 @@ def assert_event():
def test_add_entry_listener_item_removed(self):
collector = event_collector()
self.queue.add_listener(include_value=False, item_removed_func=collector)
- self.queue.add('item-value')
- self.queue.remove('item-value')
+ self.queue.add("item-value")
+ self.queue.remove("item-value")
def assert_event():
self.assertEqual(len(collector.events), 1)
@@ -66,13 +66,13 @@ def assert_event():
def test_add_entry_listener_item_removed_include_value(self):
collector = event_collector()
self.queue.add_listener(include_value=True, item_removed_func=collector)
- self.queue.add('item-value')
- self.queue.remove('item-value')
+ self.queue.add("item-value")
+ self.queue.remove("item-value")
def assert_event():
self.assertEqual(len(collector.events), 1)
event = collector.events[0]
- self.assertEqual(event.item, 'item-value')
+ self.assertEqual(event.item, "item-value")
self.assertEqual(event.event_type, ItemEventType.REMOVED)
self.assertTrueEventually(assert_event, 5)
@@ -81,7 +81,7 @@ def test_remove_entry_listener_item_added(self):
collector = event_collector()
reg_id = self.queue.add_listener(include_value=False, item_added_func=collector)
self.queue.remove_listener(reg_id)
- self.queue.add('item-value')
+ self.queue.add("item-value")
def assert_event():
self.assertEqual(len(collector.events), 0)
diff --git a/tests/proxy/replicated_map_test.py b/tests/proxy/replicated_map_test.py
index 7206728e8e..d4fc7e5a12 100644
--- a/tests/proxy/replicated_map_test.py
+++ b/tests/proxy/replicated_map_test.py
@@ -23,105 +23,126 @@ def tearDown(self):
def test_add_entry_listener_item_added(self):
collector = event_collector()
self.replicated_map.add_entry_listener(added_func=collector)
- self.replicated_map.put('key', 'value')
+ self.replicated_map.put("key", "value")
def assert_event():
self.assertEqual(len(collector.events), 1)
event = collector.events[0]
- self.assertEntryEvent(event, key='key', event_type=EntryEventType.ADDED, value='value')
+ self.assertEntryEvent(event, key="key", event_type=EntryEventType.ADDED, value="value")
self.assertTrueEventually(assert_event, 5)
def test_add_entry_listener_item_removed(self):
collector = event_collector()
self.replicated_map.add_entry_listener(removed_func=collector)
- self.replicated_map.put('key', 'value')
- self.replicated_map.remove('key')
+ self.replicated_map.put("key", "value")
+ self.replicated_map.remove("key")
def assert_event():
self.assertEqual(len(collector.events), 1)
event = collector.events[0]
- self.assertEntryEvent(event, key='key', event_type=EntryEventType.REMOVED, old_value='value')
+ self.assertEntryEvent(
+ event, key="key", event_type=EntryEventType.REMOVED, old_value="value"
+ )
self.assertTrueEventually(assert_event, 5)
def test_add_entry_listener_item_updated(self):
collector = event_collector()
self.replicated_map.add_entry_listener(updated_func=collector)
- self.replicated_map.put('key', 'value')
- self.replicated_map.put('key', 'new_value')
+ self.replicated_map.put("key", "value")
+ self.replicated_map.put("key", "new_value")
def assert_event():
self.assertEqual(len(collector.events), 1)
event = collector.events[0]
- self.assertEntryEvent(event, key='key', event_type=EntryEventType.UPDATED, old_value='value',
- value='new_value')
+ self.assertEntryEvent(
+ event,
+ key="key",
+ event_type=EntryEventType.UPDATED,
+ old_value="value",
+ value="new_value",
+ )
self.assertTrueEventually(assert_event, 5)
def test_add_entry_listener_item_evicted(self):
collector = event_collector()
self.replicated_map.add_entry_listener(evicted_func=collector)
- self.replicated_map.put('key', 'value', ttl=1)
+ self.replicated_map.put("key", "value", ttl=1)
def assert_event():
self.assertEqual(len(collector.events), 1)
event = collector.events[0]
- self.assertEntryEvent(event, key='key', event_type=EntryEventType.EVICTED, old_value='value')
+ self.assertEntryEvent(
+ event, key="key", event_type=EntryEventType.EVICTED, old_value="value"
+ )
self.assertTrueEventually(assert_event, 10)
def test_add_entry_listener_with_key(self):
collector = event_collector()
- id = self.replicated_map.add_entry_listener(key='key1', added_func=collector)
- self.replicated_map.put('key2', 'value2')
- self.replicated_map.put('key1', 'value1')
+ id = self.replicated_map.add_entry_listener(key="key1", added_func=collector)
+ self.replicated_map.put("key2", "value2")
+ self.replicated_map.put("key1", "value1")
def assert_event():
self.assertEqual(len(collector.events), 1)
event = collector.events[0]
- self.assertEntryEvent(event, key='key1', event_type=EntryEventType.ADDED, value='value1')
+ self.assertEntryEvent(
+ event, key="key1", event_type=EntryEventType.ADDED, value="value1"
+ )
self.assertTrueEventually(assert_event, 5)
def test_add_entry_listener_with_predicate(self):
collector = event_collector()
- self.replicated_map.add_entry_listener(predicate=sql("this == value1"), added_func=collector)
- self.replicated_map.put('key2', 'value2')
- self.replicated_map.put('key1', 'value1')
+ self.replicated_map.add_entry_listener(
+ predicate=sql("this == value1"), added_func=collector
+ )
+ self.replicated_map.put("key2", "value2")
+ self.replicated_map.put("key1", "value1")
def assert_event():
self.assertEqual(len(collector.events), 1)
event = collector.events[0]
- self.assertEntryEvent(event, key='key1', event_type=EntryEventType.ADDED, value='value1')
+ self.assertEntryEvent(
+ event, key="key1", event_type=EntryEventType.ADDED, value="value1"
+ )
self.assertTrueEventually(assert_event, 5)
def test_add_entry_listener_with_key_and_predicate(self):
collector = event_collector()
- self.replicated_map.add_entry_listener(key='key1', predicate=sql("this == value3"), added_func=collector)
- self.replicated_map.put('key2', 'value2')
- self.replicated_map.put('key1', 'value1')
- self.replicated_map.remove('key1')
- self.replicated_map.put('key1', 'value3')
+ self.replicated_map.add_entry_listener(
+ key="key1", predicate=sql("this == value3"), added_func=collector
+ )
+ self.replicated_map.put("key2", "value2")
+ self.replicated_map.put("key1", "value1")
+ self.replicated_map.remove("key1")
+ self.replicated_map.put("key1", "value3")
def assert_event():
self.assertEqual(len(collector.events), 1)
event = collector.events[0]
- self.assertEntryEvent(event, key='key1', event_type=EntryEventType.ADDED, value='value3')
+ self.assertEntryEvent(
+ event, key="key1", event_type=EntryEventType.ADDED, value="value3"
+ )
self.assertTrueEventually(assert_event, 5)
def test_add_entry_listener_clear_all(self):
collector = event_collector()
self.replicated_map.add_entry_listener(clear_all_func=collector)
- self.replicated_map.put('key', 'value')
+ self.replicated_map.put("key", "value")
self.replicated_map.clear()
def assert_event():
self.assertEqual(len(collector.events), 1)
event = collector.events[0]
- self.assertEntryEvent(event, event_type=EntryEventType.CLEAR_ALL, number_of_affected_entries=1)
+ self.assertEntryEvent(
+ event, event_type=EntryEventType.CLEAR_ALL, number_of_affected_entries=1
+ )
self.assertTrueEventually(assert_event, 5)
@@ -152,7 +173,8 @@ def test_entry_set(self):
map = self._fill_map()
self.assertTrueEventually(
- lambda: six.assertCountEqual(self, six.iteritems(map), self.replicated_map.entry_set()))
+ lambda: six.assertCountEqual(self, six.iteritems(map), self.replicated_map.entry_set())
+ )
def test_is_empty(self):
self.replicated_map.put("key", " value")
@@ -165,7 +187,9 @@ def test_is_empty_when_empty(self):
def test_key_set(self):
map = self._fill_map()
- self.assertTrueEventually(lambda: six.assertCountEqual(self, list(map.keys()), self.replicated_map.key_set()))
+ self.assertTrueEventually(
+ lambda: six.assertCountEqual(self, list(map.keys()), self.replicated_map.key_set())
+ )
def test_put_get(self):
self.assertIsNone(self.replicated_map.put("key", "value"))
@@ -178,7 +202,9 @@ def test_put_all(self):
self.replicated_map.put_all(map)
- self.assertTrueEventually(lambda: six.assertCountEqual(self, six.iteritems(map), self.replicated_map.entry_set()))
+ self.assertTrueEventually(
+ lambda: six.assertCountEqual(self, six.iteritems(map), self.replicated_map.entry_set())
+ )
def test_remove(self):
self.replicated_map.put("key", "value")
@@ -189,10 +215,10 @@ def test_remove_entry_listener(self):
collector = event_collector()
id = self.replicated_map.add_entry_listener(added_func=collector)
- self.replicated_map.put('key', 'value')
+ self.replicated_map.put("key", "value")
self.assertTrueEventually(lambda: self.assertEqual(len(collector.events), 1))
self.replicated_map.remove_entry_listener(id)
- self.replicated_map.put('key2', 'value')
+ self.replicated_map.put("key2", "value")
time.sleep(1)
self.assertEqual(len(collector.events), 1)
@@ -204,7 +230,11 @@ def test_size(self):
def test_values(self):
map = self._fill_map()
- self.assertTrueEventually(lambda: six.assertCountEqual(self, list(map.values()), list(self.replicated_map.values())))
+ self.assertTrueEventually(
+ lambda: six.assertCountEqual(
+ self, list(map.values()), list(self.replicated_map.values())
+ )
+ )
def test_str(self):
self.assertTrue(str(self.replicated_map).startswith("ReplicatedMap"))
diff --git a/tests/proxy/ringbuffer_test.py b/tests/proxy/ringbuffer_test.py
index 1ea5bee296..9b8eca90c2 100644
--- a/tests/proxy/ringbuffer_test.py
+++ b/tests/proxy/ringbuffer_test.py
@@ -22,7 +22,9 @@ def configure_cluster(cls):
return f.read()
def setUp(self):
- self.ringbuffer = self.client.get_ringbuffer("ClientRingbufferTestWithTTL-" + random_string()).blocking()
+ self.ringbuffer = self.client.get_ringbuffer(
+ "ClientRingbufferTestWithTTL-" + random_string()
+ ).blocking()
def tearDown(self):
self.ringbuffer.destroy()
@@ -46,7 +48,9 @@ def test_add_all(self):
self.assertEqual(CAPACITY - 1, self.ringbuffer.add_all(list(range(0, CAPACITY))))
def test_add_all_when_full(self):
- self.assertEqual(-1, self.ringbuffer.add_all(list(range(0, CAPACITY * 2)), OVERFLOW_POLICY_FAIL))
+ self.assertEqual(
+ -1, self.ringbuffer.add_all(list(range(0, CAPACITY * 2)), OVERFLOW_POLICY_FAIL)
+ )
def test_add_all_when_empty_list(self):
with self.assertRaises(AssertionError):
@@ -98,11 +102,11 @@ def test_read_many_when_min_count_greater_than_max_count(self):
def test_read_many_when_min_count_greater_than_capacity(self):
with self.assertRaises(AssertionError):
- self.ringbuffer.read_many(0, CAPACITY+1, CAPACITY+1)
+ self.ringbuffer.read_many(0, CAPACITY + 1, CAPACITY + 1)
def test_read_many_when_max_count_greater_than_batch_size(self):
with self.assertRaises(AssertionError):
- self.ringbuffer.read_many(0, 0, MAX_BATCH_SIZE+1)
+ self.ringbuffer.read_many(0, 0, MAX_BATCH_SIZE + 1)
def _fill_ringbuffer(self, n=CAPACITY):
for x in range(0, n):
diff --git a/tests/proxy/set_test.py b/tests/proxy/set_test.py
index 41b8ef5ca8..6d939a396c 100644
--- a/tests/proxy/set_test.py
+++ b/tests/proxy/set_test.py
@@ -16,7 +16,7 @@ def configure_client(cls, config):
def test_add_entry_listener_item_added(self):
collector = event_collector()
self.set.add_listener(include_value=False, item_added_func=collector)
- self.set.add('item-value')
+ self.set.add("item-value")
def assert_event():
self.assertEqual(len(collector.events), 1)
@@ -29,12 +29,12 @@ def assert_event():
def test_add_entry_listener_item_added_include_value(self):
collector = event_collector()
self.set.add_listener(include_value=True, item_added_func=collector)
- self.set.add('item-value')
+ self.set.add("item-value")
def assert_event():
self.assertEqual(len(collector.events), 1)
event = collector.events[0]
- self.assertEqual(event.item, 'item-value')
+ self.assertEqual(event.item, "item-value")
self.assertEqual(event.event_type, ItemEventType.ADDED)
self.assertTrueEventually(assert_event, 5)
@@ -42,8 +42,8 @@ def assert_event():
def test_add_entry_listener_item_removed(self):
collector = event_collector()
self.set.add_listener(include_value=False, item_removed_func=collector)
- self.set.add('item-value')
- self.set.remove('item-value')
+ self.set.add("item-value")
+ self.set.remove("item-value")
def assert_event():
self.assertEqual(len(collector.events), 1)
@@ -56,13 +56,13 @@ def assert_event():
def test_add_entry_listener_item_removed_include_value(self):
collector = event_collector()
self.set.add_listener(include_value=True, item_removed_func=collector)
- self.set.add('item-value')
- self.set.remove('item-value')
+ self.set.add("item-value")
+ self.set.remove("item-value")
def assert_event():
self.assertEqual(len(collector.events), 1)
event = collector.events[0]
- self.assertEqual(event.item, 'item-value')
+ self.assertEqual(event.item, "item-value")
self.assertEqual(event.event_type, ItemEventType.REMOVED)
self.assertTrueEventually(assert_event, 5)
@@ -71,7 +71,7 @@ def test_remove_entry_listener_item_added(self):
collector = event_collector()
reg_id = self.set.add_listener(include_value=False, item_added_func=collector)
self.set.remove_listener(reg_id)
- self.set.add('item-value')
+ self.set.add("item-value")
def assert_event():
self.assertEqual(len(collector.events), 0)
diff --git a/tests/proxy/topic_test.py b/tests/proxy/topic_test.py
index 7edbd9d86b..eae452432e 100644
--- a/tests/proxy/topic_test.py
+++ b/tests/proxy/topic_test.py
@@ -17,12 +17,12 @@ def tearDown(self):
def test_add_listener(self):
collector = event_collector()
self.topic.add_listener(on_message=collector)
- self.topic.publish('item-value')
+ self.topic.publish("item-value")
def assert_event():
self.assertEqual(len(collector.events), 1)
event = collector.events[0]
- self.assertEqual(event.message, 'item-value')
+ self.assertEqual(event.message, "item-value")
self.assertGreater(event.publish_time, 0)
self.assertTrueEventually(assert_event, 5)
@@ -31,14 +31,15 @@ def test_remove_listener(self):
collector = event_collector()
reg_id = self.topic.add_listener(on_message=collector)
self.topic.remove_listener(reg_id)
- self.topic.publish('item-value')
+ self.topic.publish("item-value")
def assert_event():
self.assertEqual(len(collector.events), 0)
if len(collector.events) > 0:
event = collector.events[0]
- self.assertEqual(event.message, 'item-value')
+ self.assertEqual(event.message, "item-value")
self.assertGreater(event.publish_time, 0)
+
self.assertTrueEventually(assert_event, 5)
def test_str(self):
diff --git a/tests/proxy/transactional_list_test.py b/tests/proxy/transactional_list_test.py
index 18173eba87..c27cc4f18c 100644
--- a/tests/proxy/transactional_list_test.py
+++ b/tests/proxy/transactional_list_test.py
@@ -36,4 +36,4 @@ def test_size(self):
def test_str(self):
with self.client.new_transaction() as tx:
tx_list = tx.get_list(self.list.name)
- self.assertTrue(str(tx_list).startswith("TransactionalList"))
\ No newline at end of file
+ self.assertTrue(str(tx_list).startswith("TransactionalList"))
diff --git a/tests/proxy/transactional_set_test.py b/tests/proxy/transactional_set_test.py
index c4dc205924..44f9910f12 100644
--- a/tests/proxy/transactional_set_test.py
+++ b/tests/proxy/transactional_set_test.py
@@ -36,4 +36,4 @@ def test_size(self):
def test_str(self):
with self.client.new_transaction() as tx:
tx_set = tx.get_set(self.set.name)
- self.assertTrue(str(tx_set).startswith("TransactionalSet"))
\ No newline at end of file
+ self.assertTrue(str(tx_set).startswith("TransactionalSet"))
diff --git a/tests/reactor_test.py b/tests/reactor_test.py
index 869b4348e0..43e2bedae1 100644
--- a/tests/reactor_test.py
+++ b/tests/reactor_test.py
@@ -10,8 +10,14 @@
from hazelcast import six
from hazelcast.config import _Config
from hazelcast.core import Address
-from hazelcast.reactor import AsyncoreReactor, _WakeableLoop, _SocketedWaker, _PipedWaker, _BasicLoop, \
- AsyncoreConnection
+from hazelcast.reactor import (
+ AsyncoreReactor,
+ _WakeableLoop,
+ _SocketedWaker,
+ _PipedWaker,
+ _BasicLoop,
+ AsyncoreConnection,
+)
from hazelcast.util import AtomicInteger
from tests.base import HazelcastTestCase
@@ -33,8 +39,14 @@ def test_reactor_lifetime(self):
LOOP_CLASSES = [
- ("wakeable", _WakeableLoop,),
- ("basic", _BasicLoop,),
+ (
+ "wakeable",
+ _WakeableLoop,
+ ),
+ (
+ "basic",
+ _BasicLoop,
+ ),
]
@@ -106,7 +118,7 @@ def callback():
loop = cls({})
loop.start()
- loop.add_timer(float('inf'), callback) # never expired, must be cleaned up
+ loop.add_timer(float("inf"), callback) # never expired, must be cleaned up
time.sleep(1)
try:
self.assertEqual(0, call_count.get())
@@ -130,7 +142,7 @@ def callback():
loop.add_timer(0, callback)
call_count.add(1)
- loop.add_timer(float('inf'), callback)
+ loop.add_timer(float("inf"), callback)
loop.shutdown()
@@ -187,8 +199,10 @@ def test_handle_read(self):
waker.handle_read()
self.assertFalse(waker.awake)
- with self.assertRaises((IOError, socket.error)): # BlockingIOError on Py3, socket.error on Py2
- waker._reader.recv(1) # handle_read should consume the socket, there should be nothing
+ # BlockingIOError on Py3, socket.error on Py2
+ with self.assertRaises((IOError, socket.error)):
+ # handle_read should consume the socket, there should be nothing
+ waker._reader.recv(1)
def test_close(self):
waker = self.waker
@@ -243,10 +257,13 @@ def test_handle_read(self):
self.assertFalse(waker.awake)
if os.name == "nt":
- return # pipes are not non-blocking on Windows, assertion below blocks forever on Windows
+ # pipes are not non-blocking on Windows, assertion below blocks forever on Windows
+ return
- with self.assertRaises((IOError, OSError)): # BlockingIOError on Py3, OSError on Py2
- os.read(waker._read_fd, 1) # handle_read should consume the pipe, there should be nothing
+ # BlockingIOError on Py3, OSError on Py2
+ with self.assertRaises((IOError, OSError)):
+ # handle_read should consume the pipe, there should be nothing
+ os.read(waker._read_fd, 1)
def test_close(self):
waker = self.waker
@@ -278,10 +295,10 @@ def tearDownClass(cls):
def test_socket_options(self):
config = _Config()
- config.socket_options = [
- (socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
- ]
- conn = AsyncoreConnection(MagicMock(map=dict()), None, None, self.member.address, config, None)
+ config.socket_options = [(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)]
+ conn = AsyncoreConnection(
+ MagicMock(map=dict()), None, None, self.member.address, config, None
+ )
try:
# By default this is set to 0
@@ -294,10 +311,10 @@ def test_receive_buffer_size(self):
# to use that value while trying to read something.
config = _Config()
size = 64 * 1024
- config.socket_options = [
- (socket.SOL_SOCKET, socket.SO_RCVBUF, size)
- ]
- conn = AsyncoreConnection(MagicMock(map=dict()), None, None, self.member.address, config, None)
+ config.socket_options = [(socket.SOL_SOCKET, socket.SO_RCVBUF, size)]
+ conn = AsyncoreConnection(
+ MagicMock(map=dict()), None, None, self.member.address, config, None
+ )
try:
# By default this is set to 128000
@@ -310,10 +327,10 @@ def test_send_buffer_size(self):
# to use that value while trying to write something.
config = _Config()
size = 64 * 1024
- config.socket_options = [
- (socket.SOL_SOCKET, socket.SO_SNDBUF, size)
- ]
- conn = AsyncoreConnection(MagicMock(map=dict()), None, None, self.member.address, config, None)
+ config.socket_options = [(socket.SOL_SOCKET, socket.SO_SNDBUF, size)]
+ conn = AsyncoreConnection(
+ MagicMock(map=dict()), None, None, self.member.address, config, None
+ )
try:
# By default this is set to 128000
diff --git a/tests/reconnect_test.py b/tests/reconnect_test.py
index 40667d9f6f..fa23a07eca 100644
--- a/tests/reconnect_test.py
+++ b/tests/reconnect_test.py
@@ -21,14 +21,16 @@ def tearDown(self):
def test_start_client_with_no_member(self):
with self.assertRaises(HazelcastError):
- self.create_client({
- "cluster_members": [
- "127.0.0.1:5701",
- "127.0.0.1:5702",
- "127.0.0.1:5703",
- ],
- "cluster_connect_timeout": 2,
- })
+ self.create_client(
+ {
+ "cluster_members": [
+ "127.0.0.1:5701",
+ "127.0.0.1:5702",
+ "127.0.0.1:5703",
+ ],
+ "cluster_connect_timeout": 2,
+ }
+ )
def test_start_client_before_member(self):
def run():
@@ -37,18 +39,22 @@ def run():
t = Thread(target=run)
t.start()
- self.create_client({
- "cluster_name": self.cluster.id,
- "cluster_connect_timeout": 5.0,
- })
+ self.create_client(
+ {
+ "cluster_name": self.cluster.id,
+ "cluster_connect_timeout": 5.0,
+ }
+ )
t.join()
def test_restart_member(self):
member = self.cluster.start_member()
- client = self.create_client({
- "cluster_name": self.cluster.id,
- "cluster_connect_timeout": 5.0,
- })
+ client = self.create_client(
+ {
+ "cluster_name": self.cluster.id,
+ "cluster_connect_timeout": 5.0,
+ }
+ )
state = [None]
@@ -64,10 +70,12 @@ def listener(s):
def test_listener_re_register(self):
member = self.cluster.start_member()
- client = self.create_client({
- "cluster_name": self.cluster.id,
- "cluster_connect_timeout": 5.0,
- })
+ client = self.create_client(
+ {
+ "cluster_name": self.cluster.id,
+ "cluster_connect_timeout": 5.0,
+ }
+ )
map = client.get_map("map").blocking()
@@ -95,10 +103,12 @@ def assert_events():
def test_member_list_after_reconnect(self):
old_member = self.cluster.start_member()
- client = self.create_client({
- "cluster_name": self.cluster.id,
- "cluster_connect_timeout": 5.0,
- })
+ client = self.create_client(
+ {
+ "cluster_name": self.cluster.id,
+ "cluster_connect_timeout": 5.0,
+ }
+ )
old_member.shutdown()
new_member = self.cluster.start_member()
@@ -112,14 +122,16 @@ def assert_member_list():
def test_reconnect_toNewNode_ViaLastMemberList(self):
old_member = self.cluster.start_member()
- client = self.create_client({
- "cluster_name": self.cluster.id,
- "cluster_members": [
- "127.0.0.1:5701",
- ],
- "smart_routing": False,
- "cluster_connect_timeout": 10.0,
- })
+ client = self.create_client(
+ {
+ "cluster_name": self.cluster.id,
+ "cluster_members": [
+ "127.0.0.1:5701",
+ ],
+ "smart_routing": False,
+ "cluster_connect_timeout": 10.0,
+ }
+ )
new_member = self.cluster.start_member()
old_member.shutdown()
diff --git a/tests/serialization/api_test.py b/tests/serialization/api_test.py
index 2d222f56c3..12c3a1a725 100644
--- a/tests/serialization/api_test.py
+++ b/tests/serialization/api_test.py
@@ -1,8 +1,15 @@
import unittest
from types import FunctionType
-from hazelcast.serialization.api import ObjectDataOutput, ObjectDataInput, Portable, PortableReader, PortableWriter, \
- StreamSerializer, IdentifiedDataSerializable
+from hazelcast.serialization.api import (
+ ObjectDataOutput,
+ ObjectDataInput,
+ Portable,
+ PortableReader,
+ PortableWriter,
+ StreamSerializer,
+ IdentifiedDataSerializable,
+)
from hazelcast import six
from hazelcast.six.moves import range
diff --git a/tests/serialization/binary_compatibility/binary_compatibility_test.py b/tests/serialization/binary_compatibility/binary_compatibility_test.py
index 0897038cd8..61ccba74b9 100644
--- a/tests/serialization/binary_compatibility/binary_compatibility_test.py
+++ b/tests/serialization/binary_compatibility/binary_compatibility_test.py
@@ -39,20 +39,30 @@ def setUpClass(cls):
cls.data_map = data_map
- @parameterized.expand(map(lambda x: ("%s_is_big_endian=%s" % (x[0], x[1]), x[0], x[1]),
- itertools.product(REFERENCE_OBJECTS.keys(), IS_BIG_ENDIAN)))
+ @parameterized.expand(
+ map(
+ lambda x: ("%s_is_big_endian=%s" % (x[0], x[1]), x[0], x[1]),
+ itertools.product(REFERENCE_OBJECTS.keys(), IS_BIG_ENDIAN),
+ )
+ )
def test_serialize(self, _, name, is_big_endian):
if skip_on_serialize(name):
return
- ss = self._create_serialization_service(is_big_endian, OBJECT_KEY_TO_INT_TYPE.get(name, IntType.INT))
+ ss = self._create_serialization_service(
+ is_big_endian, OBJECT_KEY_TO_INT_TYPE.get(name, IntType.INT)
+ )
object_key = self._create_object_key(name, is_big_endian)
from_binary = self.data_map[object_key]
serialized = ss.to_data(REFERENCE_OBJECTS[name])
self.assertEqual(from_binary, serialized)
- @parameterized.expand(map(lambda x: ("%s_is_big_endian=%s" % (x[0], x[1]), x[0], x[1]),
- itertools.product(REFERENCE_OBJECTS.keys(), IS_BIG_ENDIAN)))
+ @parameterized.expand(
+ map(
+ lambda x: ("%s_is_big_endian=%s" % (x[0], x[1]), x[0], x[1]),
+ itertools.product(REFERENCE_OBJECTS.keys(), IS_BIG_ENDIAN),
+ )
+ )
def test_deserialize(self, _, name, is_big_endian):
if skip_on_deserialize(name):
return
@@ -63,13 +73,19 @@ def test_deserialize(self, _, name, is_big_endian):
deserialized = ss.to_object(from_binary)
self.assertTrue(is_equal(REFERENCE_OBJECTS[name], deserialized))
- @parameterized.expand(map(lambda x: ("%s_is_big_endian=%s" % (x[0], x[1]), x[0], x[1]),
- itertools.product(REFERENCE_OBJECTS.keys(), IS_BIG_ENDIAN)))
+ @parameterized.expand(
+ map(
+ lambda x: ("%s_is_big_endian=%s" % (x[0], x[1]), x[0], x[1]),
+ itertools.product(REFERENCE_OBJECTS.keys(), IS_BIG_ENDIAN),
+ )
+ )
def test_serialize_deserialize(self, _, name, is_big_endian):
if skip_on_deserialize(name) or skip_on_serialize(name):
return
- ss = self._create_serialization_service(is_big_endian, OBJECT_KEY_TO_INT_TYPE.get(name, IntType.INT))
+ ss = self._create_serialization_service(
+ is_big_endian, OBJECT_KEY_TO_INT_TYPE.get(name, IntType.INT)
+ )
obj = REFERENCE_OBJECTS[name]
data = ss.to_data(obj)
deserialized = ss.to_object(data)
@@ -90,7 +106,7 @@ def _create_serialization_service(is_big_endian, int_type):
config = _Config()
config.custom_serializers = {
CustomStreamSerializable: CustomStreamSerializer,
- CustomByteArraySerializable: CustomByteArraySerializer
+ CustomByteArraySerializable: CustomByteArraySerializer,
}
config.is_big_endian = is_big_endian
cdb = ClassDefinitionBuilder(PORTABLE_FACTORY_ID, INNER_PORTABLE_CLASS_ID)
@@ -101,7 +117,7 @@ def _create_serialization_service(is_big_endian, int_type):
config.portable_factories = {
PORTABLE_FACTORY_ID: {
PORTABLE_CLASS_ID: APortable,
- INNER_PORTABLE_CLASS_ID: AnInnerPortable
+ INNER_PORTABLE_CLASS_ID: AnInnerPortable,
}
}
config.data_serializable_factories = {
@@ -137,7 +153,9 @@ def write(self, out, obj):
def read(self, inp):
buf = inp.read_byte_array()
- return CustomByteArraySerializable(BE_INT.unpack_from(buf, 0)[0], BE_FLOAT.unpack_from(buf, 4)[0])
+ return CustomByteArraySerializable(
+ BE_INT.unpack_from(buf, 0)[0], BE_FLOAT.unpack_from(buf, 4)[0]
+ )
def get_type_id(self):
return CUSTOM_BYTE_ARRAY_SERIALIZABLE_ID
diff --git a/tests/serialization/binary_compatibility/reference_objects.py b/tests/serialization/binary_compatibility/reference_objects.py
index a96558d660..7fa637f7d8 100644
--- a/tests/serialization/binary_compatibility/reference_objects.py
+++ b/tests/serialization/binary_compatibility/reference_objects.py
@@ -79,7 +79,9 @@ def get_class_id(self):
return INNER_PORTABLE_CLASS_ID
def __eq__(self, other):
- return isinstance(other, AnInnerPortable) and self.i == other.i and is_equal(self.f, other.f)
+ return (
+ isinstance(other, AnInnerPortable) and self.i == other.i and is_equal(self.f, other.f)
+ )
def __ne__(self, other):
return not self.__eq__(other)
@@ -91,7 +93,11 @@ def __init__(self, i=None, f=None):
self.f = f
def __eq__(self, other):
- return isinstance(other, CustomStreamSerializable) and self.i == other.i and is_equal(self.f, other.f)
+ return (
+ isinstance(other, CustomStreamSerializable)
+ and self.i == other.i
+ and is_equal(self.f, other.f)
+ )
def __ne__(self, other):
return not self.__eq__(other)
@@ -103,7 +109,11 @@ def __init__(self, i=None, f=None):
self.f = f
def __eq__(self, other):
- return isinstance(other, CustomByteArraySerializable) and self.i == other.i and is_equal(self.f, other.f)
+ return (
+ isinstance(other, CustomByteArraySerializable)
+ and self.i == other.i
+ and is_equal(self.f, other.f)
+ )
def __ne__(self, other):
return not self.__eq__(other)
@@ -120,10 +130,32 @@ def _read_data_from_inp(inp):
class AnIdentifiedDataSerializable(IdentifiedDataSerializable):
- def __init__(self, boolean=None, b=None, c=None, d=None, s=None, f=None, i=None, l=None, string=None,
- booleans=None, bytes_=None, chars=None, doubles=None, shorts=None, floats=None,
- ints=None, longs=None, strings=None, portable=None, identified=None,
- custom_serializable=None, custom_byte_array_serializable=None, data=None):
+ def __init__(
+ self,
+ boolean=None,
+ b=None,
+ c=None,
+ d=None,
+ s=None,
+ f=None,
+ i=None,
+ l=None,
+ string=None,
+ booleans=None,
+ bytes_=None,
+ chars=None,
+ doubles=None,
+ shorts=None,
+ floats=None,
+ ints=None,
+ longs=None,
+ strings=None,
+ portable=None,
+ identified=None,
+ custom_serializable=None,
+ custom_byte_array_serializable=None,
+ data=None,
+ ):
self.boolean = boolean
self.b = b
self.c = c
@@ -276,54 +308,78 @@ def get_class_id(self):
return DATA_SERIALIZABLE_CLASS_ID
def __eq__(self, other):
- return isinstance(other, AnIdentifiedDataSerializable) \
- and self.boolean == other.boolean \
- and self.b == other.b \
- and self.c == other.c \
- and self.d == other.d \
- and self.s == other.s \
- and is_equal(self.f, other.f) \
- and self.i == other.i \
- and self.l == other.l \
- and self.bytes_size == other.bytes_size \
- and self.unsigned_byte == other.unsigned_byte \
- and self.unsigned_short == other.unsigned_short \
- and self.string == other.string \
- and self.booleans == other.booleans \
- and self.bytes_ == other.bytes_ \
- and self.chars == other.chars \
- and self.doubles == other.doubles \
- and self.shorts == other.shorts \
- and is_equal(self.floats, other.floats) \
- and self.ints == other.ints \
- and self.longs == other.longs \
- and self.strings == other.strings \
- and self.booleans_none == other.booleans_none \
- and self.bytes_none == other.bytes_none \
- and self.chars_none == other.chars_none \
- and self.doubles_none == other.doubles_none \
- and self.shorts_none == other.shorts_none \
- and self.floats_none == other.floats_none \
- and self.ints_none == other.ints_none \
- and self.longs_none == other.longs_none \
- and self.strings_none == other.strings_none \
- and self.bytes_fully == other.bytes_fully \
- and self.bytes_offset == other.bytes_offset \
- and self.str_chars == other.str_chars \
- and self.str_bytes == other.str_bytes \
- and self.portable == other.portable \
- and self.identified == other.identified \
- and self.custom_serializable == other.custom_serializable \
- and self.custom_byte_array_serializable == other.custom_byte_array_serializable \
- and self.data == other.data
+ return (
+ isinstance(other, AnIdentifiedDataSerializable)
+ and self.boolean == other.boolean
+ and self.b == other.b
+ and self.c == other.c
+ and self.d == other.d
+ and self.s == other.s
+ and is_equal(self.f, other.f)
+ and self.i == other.i
+ and self.l == other.l
+ and self.bytes_size == other.bytes_size
+ and self.unsigned_byte == other.unsigned_byte
+ and self.unsigned_short == other.unsigned_short
+ and self.string == other.string
+ and self.booleans == other.booleans
+ and self.bytes_ == other.bytes_
+ and self.chars == other.chars
+ and self.doubles == other.doubles
+ and self.shorts == other.shorts
+ and is_equal(self.floats, other.floats)
+ and self.ints == other.ints
+ and self.longs == other.longs
+ and self.strings == other.strings
+ and self.booleans_none == other.booleans_none
+ and self.bytes_none == other.bytes_none
+ and self.chars_none == other.chars_none
+ and self.doubles_none == other.doubles_none
+ and self.shorts_none == other.shorts_none
+ and self.floats_none == other.floats_none
+ and self.ints_none == other.ints_none
+ and self.longs_none == other.longs_none
+ and self.strings_none == other.strings_none
+ and self.bytes_fully == other.bytes_fully
+ and self.bytes_offset == other.bytes_offset
+ and self.str_chars == other.str_chars
+ and self.str_bytes == other.str_bytes
+ and self.portable == other.portable
+ and self.identified == other.identified
+ and self.custom_serializable == other.custom_serializable
+ and self.custom_byte_array_serializable == other.custom_byte_array_serializable
+ and self.data == other.data
+ )
class APortable(Portable):
- def __init__(self, boolean=None, b=None, c=None, d=None, s=None, f=None, i=None, l=None, string=None,
- p=None,
- booleans=None, bytes_=None, chars=None, doubles=None, shorts=None, floats=None,
- ints=None, longs=None, strings=None, portables=None, identified=None,
- custom_serializable=None, custom_byte_array_serializable=None, data=None):
+ def __init__(
+ self,
+ boolean=None,
+ b=None,
+ c=None,
+ d=None,
+ s=None,
+ f=None,
+ i=None,
+ l=None,
+ string=None,
+ p=None,
+ booleans=None,
+ bytes_=None,
+ chars=None,
+ doubles=None,
+ shorts=None,
+ floats=None,
+ ints=None,
+ longs=None,
+ strings=None,
+ portables=None,
+ identified=None,
+ custom_serializable=None,
+ custom_byte_array_serializable=None,
+ data=None,
+ ):
self.boolean = boolean
self.b = b
self.c = c
@@ -548,47 +604,49 @@ def get_class_id(self):
return PORTABLE_CLASS_ID
def __eq__(self, other):
- return isinstance(other, APortable) \
- and self.boolean == other.boolean \
- and self.b == other.b \
- and self.c == other.c \
- and self.d == other.d \
- and self.s == other.s \
- and is_equal(self.f, other.f) \
- and self.i == other.i \
- and self.l == other.l \
- and self.bytes_size == other.bytes_size \
- and self.unsigned_byte == other.unsigned_byte \
- and self.unsigned_short == other.unsigned_short \
- and self.string == other.string \
- and self.p == other.p \
- and self.booleans == other.booleans \
- and self.bytes_ == other.bytes_ \
- and self.chars == other.chars \
- and self.doubles == other.doubles \
- and self.shorts == other.shorts \
- and is_equal(self.floats, other.floats) \
- and self.ints == other.ints \
- and self.longs == other.longs \
- and self.strings == other.strings \
- and self.portables == other.portables \
- and self.booleans_none == other.booleans_none \
- and self.bytes_none == other.bytes_none \
- and self.chars_none == other.chars_none \
- and self.doubles_none == other.doubles_none \
- and self.shorts_none == other.shorts_none \
- and self.floats_none == other.floats_none \
- and self.ints_none == other.ints_none \
- and self.longs_none == other.longs_none \
- and self.strings_none == other.strings_none \
- and self.bytes_fully == other.bytes_fully \
- and self.bytes_offset == other.bytes_offset \
- and self.str_chars == other.str_chars \
- and self.str_bytes == other.str_bytes \
- and self.identified == other.identified \
- and self.custom_serializable == other.custom_serializable \
- and self.custom_byte_array_serializable == other.custom_byte_array_serializable \
- and self.data == other.data
+ return (
+ isinstance(other, APortable)
+ and self.boolean == other.boolean
+ and self.b == other.b
+ and self.c == other.c
+ and self.d == other.d
+ and self.s == other.s
+ and is_equal(self.f, other.f)
+ and self.i == other.i
+ and self.l == other.l
+ and self.bytes_size == other.bytes_size
+ and self.unsigned_byte == other.unsigned_byte
+ and self.unsigned_short == other.unsigned_short
+ and self.string == other.string
+ and self.p == other.p
+ and self.booleans == other.booleans
+ and self.bytes_ == other.bytes_
+ and self.chars == other.chars
+ and self.doubles == other.doubles
+ and self.shorts == other.shorts
+ and is_equal(self.floats, other.floats)
+ and self.ints == other.ints
+ and self.longs == other.longs
+ and self.strings == other.strings
+ and self.portables == other.portables
+ and self.booleans_none == other.booleans_none
+ and self.bytes_none == other.bytes_none
+ and self.chars_none == other.chars_none
+ and self.doubles_none == other.doubles_none
+ and self.shorts_none == other.shorts_none
+ and self.floats_none == other.floats_none
+ and self.ints_none == other.ints_none
+ and self.longs_none == other.longs_none
+ and self.strings_none == other.strings_none
+ and self.bytes_fully == other.bytes_fully
+ and self.bytes_offset == other.bytes_offset
+ and self.str_chars == other.str_chars
+ and self.str_bytes == other.str_bytes
+ and self.identified == other.identified
+ and self.custom_serializable == other.custom_serializable
+ and self.custom_byte_array_serializable == other.custom_byte_array_serializable
+ and self.data == other.data
+ )
_sql_string = _to_unicode("this > 5 AND this < 100")
@@ -613,11 +671,16 @@ def __eq__(self, other):
"float[]": [900.5678, 1.0, 2.1, 3.4],
"int[]": [56789, 2, 3],
"long[]": [-50992225, 1231232141, 2, 3],
- "String[]": list(map(_to_unicode, [
- "PijamalΔ± hasta, yaΔΔ±z ΕofΓΆre Γ§abucak gΓΌvendi.",
- "γ€γγγγγγ γγͺγγ«γ² γ―γ«γ¨γΏγ¬γ½ γγγγ©γ ",
- "The quick brown fox jumps over the lazy dog"
- ])),
+ "String[]": list(
+ map(
+ _to_unicode,
+ [
+ "PijamalΔ± hasta, yaΔΔ±z ΕofΓΆre Γ§abucak gΓΌvendi.",
+ "γ€γγγγγγ γγͺγγ«γ² γ―γ«γ¨γΏγ¬γ½ γγγγ©γ ",
+ "The quick brown fox jumps over the lazy dog",
+ ],
+ )
+ ),
"Date": datetime.datetime.fromtimestamp(633830400),
"BigInteger": 1314432323232411,
"Class": _to_unicode("java.math.BigDecimal"),
@@ -627,76 +690,126 @@ def __eq__(self, other):
_inner_portable = AnInnerPortable(REFERENCE_OBJECTS["Integer"], REFERENCE_OBJECTS["Float"])
-_custom_serializable = CustomStreamSerializable(REFERENCE_OBJECTS["Integer"], REFERENCE_OBJECTS["Float"])
-
-_custom_byte_array_serializable = CustomByteArraySerializable(REFERENCE_OBJECTS["Integer"], REFERENCE_OBJECTS["Float"])
-
-_identified = AnIdentifiedDataSerializable(REFERENCE_OBJECTS["Boolean"], REFERENCE_OBJECTS["Byte"],
- REFERENCE_OBJECTS["Character"], REFERENCE_OBJECTS["Double"],
- REFERENCE_OBJECTS["Short"], REFERENCE_OBJECTS["Float"],
- REFERENCE_OBJECTS["Integer"], REFERENCE_OBJECTS["Long"],
- _sql_string, REFERENCE_OBJECTS["boolean[]"],
- REFERENCE_OBJECTS["byte[]"], REFERENCE_OBJECTS["char[]"],
- REFERENCE_OBJECTS["double[]"], REFERENCE_OBJECTS["short[]"],
- REFERENCE_OBJECTS["float[]"], REFERENCE_OBJECTS["int[]"],
- REFERENCE_OBJECTS["long[]"], REFERENCE_OBJECTS["String[]"],
- _inner_portable, None,
- _custom_serializable, _custom_byte_array_serializable, _data)
+_custom_serializable = CustomStreamSerializable(
+ REFERENCE_OBJECTS["Integer"], REFERENCE_OBJECTS["Float"]
+)
+
+_custom_byte_array_serializable = CustomByteArraySerializable(
+ REFERENCE_OBJECTS["Integer"], REFERENCE_OBJECTS["Float"]
+)
+
+_identified = AnIdentifiedDataSerializable(
+ REFERENCE_OBJECTS["Boolean"],
+ REFERENCE_OBJECTS["Byte"],
+ REFERENCE_OBJECTS["Character"],
+ REFERENCE_OBJECTS["Double"],
+ REFERENCE_OBJECTS["Short"],
+ REFERENCE_OBJECTS["Float"],
+ REFERENCE_OBJECTS["Integer"],
+ REFERENCE_OBJECTS["Long"],
+ _sql_string,
+ REFERENCE_OBJECTS["boolean[]"],
+ REFERENCE_OBJECTS["byte[]"],
+ REFERENCE_OBJECTS["char[]"],
+ REFERENCE_OBJECTS["double[]"],
+ REFERENCE_OBJECTS["short[]"],
+ REFERENCE_OBJECTS["float[]"],
+ REFERENCE_OBJECTS["int[]"],
+ REFERENCE_OBJECTS["long[]"],
+ REFERENCE_OBJECTS["String[]"],
+ _inner_portable,
+ None,
+ _custom_serializable,
+ _custom_byte_array_serializable,
+ _data,
+)
_portables = [_inner_portable, _inner_portable, _inner_portable]
-_portable = APortable(REFERENCE_OBJECTS["Boolean"], REFERENCE_OBJECTS["Byte"],
- REFERENCE_OBJECTS["Character"], REFERENCE_OBJECTS["Double"],
- REFERENCE_OBJECTS["Short"], REFERENCE_OBJECTS["Float"],
- REFERENCE_OBJECTS["Integer"], REFERENCE_OBJECTS["Long"],
- _sql_string, _inner_portable,
- REFERENCE_OBJECTS["boolean[]"], REFERENCE_OBJECTS["byte[]"],
- REFERENCE_OBJECTS["char[]"], REFERENCE_OBJECTS["double[]"],
- REFERENCE_OBJECTS["short[]"], REFERENCE_OBJECTS["float[]"],
- REFERENCE_OBJECTS["int[]"], REFERENCE_OBJECTS["long[]"],
- REFERENCE_OBJECTS["String[]"], _portables,
- _identified, _custom_serializable,
- _custom_byte_array_serializable, _data)
-
-_non_null_list = [REFERENCE_OBJECTS["Boolean"],
- REFERENCE_OBJECTS["Double"],
- REFERENCE_OBJECTS["Integer"], _sql_string,
- _inner_portable, REFERENCE_OBJECTS["byte[]"],
- _custom_serializable, _custom_byte_array_serializable,
- _identified, _portable, REFERENCE_OBJECTS["Date"]]
-
-REFERENCE_OBJECTS.update({
- "AnInnerPortable": _inner_portable,
- "CustomStreamSerializable": _custom_serializable,
- "CustomByteArraySerializable": _custom_byte_array_serializable,
- "AnIdentifiedDataSerializable": _identified,
- "APortable": _portable,
- "ArrayList": [None, _non_null_list],
- "LinkedList": [None, _non_null_list],
- "TruePredicate": predicate.true(),
- "FalsePredicate": predicate.false(),
- "SqlPredicate": predicate.sql(_sql_string),
- "EqualPredicate": predicate.equal(_sql_string, REFERENCE_OBJECTS["Integer"]),
- "NotEqualPredicate": predicate.not_equal(_sql_string, REFERENCE_OBJECTS["Integer"]),
- "GreaterLessPredicate": predicate.greater(_sql_string, REFERENCE_OBJECTS["Integer"]),
- "BetweenPredicate": predicate.between(_sql_string, REFERENCE_OBJECTS["Integer"], REFERENCE_OBJECTS["Integer"]),
- "LikePredicate": predicate.like(_sql_string, _sql_string),
- "ILikePredicate": predicate.ilike(_sql_string, _sql_string),
- "InPredicate": predicate.in_(_sql_string, REFERENCE_OBJECTS["Integer"], REFERENCE_OBJECTS["Integer"]),
- "RegexPredicate": predicate.regex(_sql_string, _sql_string),
- "AndPredicate": predicate.and_(predicate.sql(_sql_string),
- predicate.equal(_sql_string, REFERENCE_OBJECTS["Integer"]),
- predicate.not_equal(_sql_string, REFERENCE_OBJECTS["Integer"]),
- predicate.greater(_sql_string, REFERENCE_OBJECTS["Integer"]),
- predicate.greater_or_equal(_sql_string, REFERENCE_OBJECTS["Integer"])),
- "OrPredicate": predicate.or_(predicate.sql(_sql_string),
- predicate.equal(_sql_string, REFERENCE_OBJECTS["Integer"]),
- predicate.not_equal(_sql_string, REFERENCE_OBJECTS["Integer"]),
- predicate.greater(_sql_string, REFERENCE_OBJECTS["Integer"]),
- predicate.greater_or_equal(_sql_string, REFERENCE_OBJECTS["Integer"])),
- "InstanceOfPredicate": predicate.instance_of(
- "com.hazelcast.nio.serialization.compatibility.CustomStreamSerializable")
-})
+_portable = APortable(
+ REFERENCE_OBJECTS["Boolean"],
+ REFERENCE_OBJECTS["Byte"],
+ REFERENCE_OBJECTS["Character"],
+ REFERENCE_OBJECTS["Double"],
+ REFERENCE_OBJECTS["Short"],
+ REFERENCE_OBJECTS["Float"],
+ REFERENCE_OBJECTS["Integer"],
+ REFERENCE_OBJECTS["Long"],
+ _sql_string,
+ _inner_portable,
+ REFERENCE_OBJECTS["boolean[]"],
+ REFERENCE_OBJECTS["byte[]"],
+ REFERENCE_OBJECTS["char[]"],
+ REFERENCE_OBJECTS["double[]"],
+ REFERENCE_OBJECTS["short[]"],
+ REFERENCE_OBJECTS["float[]"],
+ REFERENCE_OBJECTS["int[]"],
+ REFERENCE_OBJECTS["long[]"],
+ REFERENCE_OBJECTS["String[]"],
+ _portables,
+ _identified,
+ _custom_serializable,
+ _custom_byte_array_serializable,
+ _data,
+)
+
+_non_null_list = [
+ REFERENCE_OBJECTS["Boolean"],
+ REFERENCE_OBJECTS["Double"],
+ REFERENCE_OBJECTS["Integer"],
+ _sql_string,
+ _inner_portable,
+ REFERENCE_OBJECTS["byte[]"],
+ _custom_serializable,
+ _custom_byte_array_serializable,
+ _identified,
+ _portable,
+ REFERENCE_OBJECTS["Date"],
+]
+
+REFERENCE_OBJECTS.update(
+ {
+ "AnInnerPortable": _inner_portable,
+ "CustomStreamSerializable": _custom_serializable,
+ "CustomByteArraySerializable": _custom_byte_array_serializable,
+ "AnIdentifiedDataSerializable": _identified,
+ "APortable": _portable,
+ "ArrayList": [None, _non_null_list],
+ "LinkedList": [None, _non_null_list],
+ "TruePredicate": predicate.true(),
+ "FalsePredicate": predicate.false(),
+ "SqlPredicate": predicate.sql(_sql_string),
+ "EqualPredicate": predicate.equal(_sql_string, REFERENCE_OBJECTS["Integer"]),
+ "NotEqualPredicate": predicate.not_equal(_sql_string, REFERENCE_OBJECTS["Integer"]),
+ "GreaterLessPredicate": predicate.greater(_sql_string, REFERENCE_OBJECTS["Integer"]),
+ "BetweenPredicate": predicate.between(
+ _sql_string, REFERENCE_OBJECTS["Integer"], REFERENCE_OBJECTS["Integer"]
+ ),
+ "LikePredicate": predicate.like(_sql_string, _sql_string),
+ "ILikePredicate": predicate.ilike(_sql_string, _sql_string),
+ "InPredicate": predicate.in_(
+ _sql_string, REFERENCE_OBJECTS["Integer"], REFERENCE_OBJECTS["Integer"]
+ ),
+ "RegexPredicate": predicate.regex(_sql_string, _sql_string),
+ "AndPredicate": predicate.and_(
+ predicate.sql(_sql_string),
+ predicate.equal(_sql_string, REFERENCE_OBJECTS["Integer"]),
+ predicate.not_equal(_sql_string, REFERENCE_OBJECTS["Integer"]),
+ predicate.greater(_sql_string, REFERENCE_OBJECTS["Integer"]),
+ predicate.greater_or_equal(_sql_string, REFERENCE_OBJECTS["Integer"]),
+ ),
+ "OrPredicate": predicate.or_(
+ predicate.sql(_sql_string),
+ predicate.equal(_sql_string, REFERENCE_OBJECTS["Integer"]),
+ predicate.not_equal(_sql_string, REFERENCE_OBJECTS["Integer"]),
+ predicate.greater(_sql_string, REFERENCE_OBJECTS["Integer"]),
+ predicate.greater_or_equal(_sql_string, REFERENCE_OBJECTS["Integer"]),
+ ),
+ "InstanceOfPredicate": predicate.instance_of(
+ "com.hazelcast.nio.serialization.compatibility.CustomStreamSerializable"
+ ),
+ }
+)
_SKIP_ON_SERIALIZE = {
"Character",
diff --git a/tests/serialization/custom_global_serialization_test.py b/tests/serialization/custom_global_serialization_test.py
index 33674bf946..faf5f119af 100644
--- a/tests/serialization/custom_global_serialization_test.py
+++ b/tests/serialization/custom_global_serialization_test.py
@@ -52,10 +52,7 @@ def write(self, out, obj):
raise ValueError("Can only serialize CustomClass")
def read(self, inp):
- return CustomClass(inp.read_utf(), # uid
- inp.read_utf(), # name
- inp.read_utf(), # text
- "CUSTOM") # Source
+ return CustomClass(inp.read_utf(), inp.read_utf(), inp.read_utf(), "CUSTOM")
def get_type_id(self):
return 10001
@@ -101,9 +98,7 @@ def test_global_encode_decode(self):
def test_custom_serializer(self):
config = _Config()
- config.custom_serializers = {
- CustomClass: CustomSerializer
- }
+ config.custom_serializers = {CustomClass: CustomSerializer}
service = SerializationServiceV1(config)
obj = CustomClass("uid", "some name", "description text")
@@ -115,9 +110,7 @@ def test_custom_serializer(self):
def test_global_custom_serializer(self):
config = _Config()
- config.custom_serializers = {
- CustomClass: CustomSerializer
- }
+ config.custom_serializers = {CustomClass: CustomSerializer}
config.global_serializer = TestGlobalSerializer
service = SerializationServiceV1(config)
@@ -130,9 +123,7 @@ def test_global_custom_serializer(self):
def test_double_register_custom_serializer(self):
config = _Config()
- config.custom_serializers = {
- CustomClass: CustomSerializer
- }
+ config.custom_serializers = {CustomClass: CustomSerializer}
service = SerializationServiceV1(config)
with self.assertRaises(ValueError):
diff --git a/tests/serialization/identified_test.py b/tests/serialization/identified_test.py
index fc370a98d8..5fffa3d267 100644
--- a/tests/serialization/identified_test.py
+++ b/tests/serialization/identified_test.py
@@ -10,9 +10,27 @@
class SerializationV1Identified(IdentifiedDataSerializable):
CLASS_ID = 1
- def __init__(self, a_byte=None, a_boolean=None, a_character=None, a_short=None, a_integer=None, a_long=None, a_float=None,
- a_double=None, bytes_=None, booleans=None, chars=None, shorts=None, ints=None, longs=None,
- floats=None, doubles=None, a_string=None, strings=None):
+ def __init__(
+ self,
+ a_byte=None,
+ a_boolean=None,
+ a_character=None,
+ a_short=None,
+ a_integer=None,
+ a_long=None,
+ a_float=None,
+ a_double=None,
+ bytes_=None,
+ booleans=None,
+ chars=None,
+ shorts=None,
+ ints=None,
+ longs=None,
+ floats=None,
+ doubles=None,
+ a_string=None,
+ strings=None,
+ ):
self.a_byte = a_byte
self.a_boolean = a_boolean
self.a_character = a_character
@@ -99,27 +117,58 @@ def __eq__(self, other):
doubles = self.doubles == other.doubles
string = self.a_string == other.a_string
strings = self.strings == other.strings
- return byte and boolean and char and short and integer and long and float and double and \
- bytes_ and booleans and chars and shorts and integers and longs and floats and doubles and string and strings
+ return (
+ byte
+ and boolean
+ and char
+ and short
+ and integer
+ and long
+ and float
+ and double
+ and bytes_
+ and booleans
+ and chars
+ and shorts
+ and integers
+ and longs
+ and floats
+ and doubles
+ and string
+ and strings
+ )
def create_identified():
- return SerializationV1Identified(99, True, 'c', 11, 1234134, 1341431221, 1.0, 2.0, bytearray([1, 2, 3]),
- [True, False, True], ['a', 'b', 'c'], [1, 2, 3], [4, 2, 3], [11, 2, 3],
- [1.0, 2.0, 3.0], [11.0, 22.0, 33.0], "the string text",
- ["item1", "item2", "item3"])
+ return SerializationV1Identified(
+ 99,
+ True,
+ "c",
+ 11,
+ 1234134,
+ 1341431221,
+ 1.0,
+ 2.0,
+ bytearray([1, 2, 3]),
+ [True, False, True],
+ ["a", "b", "c"],
+ [1, 2, 3],
+ [4, 2, 3],
+ [11, 2, 3],
+ [1.0, 2.0, 3.0],
+ [11.0, 22.0, 33.0],
+ "the string text",
+ ["item1", "item2", "item3"],
+ )
the_factory = {SerializationV1Identified.CLASS_ID: SerializationV1Identified}
class IdentifiedSerializationTestCase(unittest.TestCase):
-
def test_encode_decode(self):
config = _Config()
- config.data_serializable_factories = {
- FACTORY_ID: the_factory
- }
+ config.data_serializable_factories = {FACTORY_ID: the_factory}
service = SerializationServiceV1(config)
obj = create_identified()
data = service.to_data(obj)
diff --git a/tests/serialization/input_test.py b/tests/serialization/input_test.py
index c17da70ec1..3f5319fa10 100644
--- a/tests/serialization/input_test.py
+++ b/tests/serialization/input_test.py
@@ -41,7 +41,7 @@ def test_char_be(self):
initial_pos = _input._pos
char = _input.read_char()
self.assertEqual(0, initial_pos)
- self.assertEqual(six.unichr(0x00e7), char)
+ self.assertEqual(six.unichr(0x00E7), char)
def test_char_le(self):
buff = bytearray(binascii.unhexlify("e7000000"))
@@ -49,7 +49,7 @@ def test_char_le(self):
initial_pos = _input._pos
char = _input.read_char()
self.assertEqual(0, initial_pos)
- self.assertEqual(six.unichr(0x00e7), char)
+ self.assertEqual(six.unichr(0x00E7), char)
def test_skip_bytes(self):
inp = _ObjectDataInput(bytearray(10))
diff --git a/tests/serialization/int_serialization_test.py b/tests/serialization/int_serialization_test.py
index 2279e0230e..e268a0ecc7 100644
--- a/tests/serialization/int_serialization_test.py
+++ b/tests/serialization/int_serialization_test.py
@@ -2,8 +2,12 @@
from hazelcast.config import IntType, _Config
from hazelcast.errors import HazelcastSerializationError
-from hazelcast.serialization.serialization_const import CONSTANT_TYPE_BYTE, CONSTANT_TYPE_SHORT, CONSTANT_TYPE_INTEGER, \
- CONSTANT_TYPE_LONG
+from hazelcast.serialization.serialization_const import (
+ CONSTANT_TYPE_BYTE,
+ CONSTANT_TYPE_SHORT,
+ CONSTANT_TYPE_INTEGER,
+ CONSTANT_TYPE_LONG,
+)
from hazelcast.serialization.service import SerializationServiceV1
byte_val = 0x12
diff --git a/tests/serialization/morphing_portable_test.py b/tests/serialization/morphing_portable_test.py
index 1f9c4ddc19..79598a1e20 100644
--- a/tests/serialization/morphing_portable_test.py
+++ b/tests/serialization/morphing_portable_test.py
@@ -2,7 +2,12 @@
from hazelcast.config import _Config
from hazelcast.serialization import SerializationServiceV1
-from tests.serialization.portable_test import create_portable, SerializationV1Portable, InnerPortable, FACTORY_ID
+from tests.serialization.portable_test import (
+ create_portable,
+ SerializationV1Portable,
+ InnerPortable,
+ FACTORY_ID,
+)
from hazelcast import six
if not six.PY2:
@@ -12,31 +17,51 @@
class MorphingPortable(SerializationV1Portable):
@classmethod
def clone(cls, base):
- return MorphingPortable(base.a_byte, base.a_boolean, base.a_character, base.a_short, base.a_integer,
- base.a_long, base.a_float, base.a_double,
- base.bytes, base.booleans, base.chars, base.shorts, base.ints, base.longs,
- base.floats, base.doubles, base.a_string, base.strings,
- base.inner_portable, base.inner_portable_array, base.identified_serializable)
+ return MorphingPortable(
+ base.a_byte,
+ base.a_boolean,
+ base.a_character,
+ base.a_short,
+ base.a_integer,
+ base.a_long,
+ base.a_float,
+ base.a_double,
+ base.bytes,
+ base.booleans,
+ base.chars,
+ base.shorts,
+ base.ints,
+ base.longs,
+ base.floats,
+ base.doubles,
+ base.a_string,
+ base.strings,
+ base.inner_portable,
+ base.inner_portable_array,
+ base.identified_serializable,
+ )
def get_class_version(self):
return 2
-the_factory_1 = {SerializationV1Portable.CLASS_ID: SerializationV1Portable, InnerPortable.CLASS_ID: InnerPortable}
-the_factory_2 = {SerializationV1Portable.CLASS_ID: MorphingPortable, InnerPortable.CLASS_ID: InnerPortable}
+the_factory_1 = {
+ SerializationV1Portable.CLASS_ID: SerializationV1Portable,
+ InnerPortable.CLASS_ID: InnerPortable,
+}
+the_factory_2 = {
+ SerializationV1Portable.CLASS_ID: MorphingPortable,
+ InnerPortable.CLASS_ID: InnerPortable,
+}
class MorphingPortableTestCase(unittest.TestCase):
def setUp(self):
config1 = _Config()
- config1.portable_factories = {
- FACTORY_ID: the_factory_1
- }
+ config1.portable_factories = {FACTORY_ID: the_factory_1}
config2 = _Config()
- config2.portable_factories = {
- FACTORY_ID: the_factory_2
- }
+ config2.portable_factories = {FACTORY_ID: the_factory_2}
self.service1 = SerializationServiceV1(config1)
self.service2 = SerializationServiceV1(config2)
@@ -59,7 +84,7 @@ def test_read_long(self):
a_integer = self.reader.read_long("5")
a_long = self.reader.read_long("6")
self.assertEqual(99, a_byte)
- self.assertEqual('c', a_character)
+ self.assertEqual("c", a_character)
self.assertEqual(11, a_short)
self.assertEqual(1234134, a_integer)
if six.PY2:
@@ -74,7 +99,7 @@ def test_read_int(self):
a_short = self.reader.read_int("4")
a_integer = self.reader.read_int("5")
self.assertEqual(99, a_byte)
- self.assertEqual('c', a_character)
+ self.assertEqual("c", a_character)
self.assertEqual(11, a_short)
self.assertEqual(1234134, a_integer)
self.assertEqual(0, self.reader.read_int("NO SUCH FIELD"))
@@ -93,7 +118,7 @@ def test_read_float(self):
a_integer = self.reader.read_float("5")
a_float = self.reader.read_float("7")
self.assertEqual(99, a_byte)
- self.assertEqual('c', a_character)
+ self.assertEqual("c", a_character)
self.assertEqual(11, a_short)
self.assertEqual(1234134, a_integer)
self.assertEqual(1.0, a_float)
@@ -108,7 +133,7 @@ def test_read_double(self):
a_float = self.reader.read_double("7")
a_double = self.reader.read_double("8")
self.assertEqual(99, a_byte)
- self.assertEqual('c', a_character)
+ self.assertEqual("c", a_character)
self.assertEqual(11, a_short)
self.assertEqual(1234134, a_integer)
if six.PY2:
@@ -131,7 +156,7 @@ def test_read_boolean(self):
def test_read_char(self):
a_character = self.reader.read_char("3")
- self.assertEqual('c', a_character)
+ self.assertEqual("c", a_character)
self.assertEqual(0, self.reader.read_char("NO SUCH FIELD"))
def test_encode_decode_with_parent_default_reader(self):
@@ -143,13 +168,27 @@ def test_encode_decode_with_parent_default_reader(self):
self.assertTrue(obj == obj2)
def test_incompatible_types(self):
- functions = [self.reader.read_byte, self.reader.read_boolean, self.reader.read_char, self.reader.read_short,
- self.reader.read_int, self.reader.read_long, self.reader.read_float, self.reader.read_double,
- self.reader.read_utf_array, self.reader.read_short_array, self.reader.read_int_array,
- self.reader.read_long_array,
- self.reader.read_float_array, self.reader.read_double_array, self.reader.read_char_array,
- self.reader.read_byte_array, self.reader.read_boolean_array, self.reader.read_portable,
- self.reader.read_portable_array]
+ functions = [
+ self.reader.read_byte,
+ self.reader.read_boolean,
+ self.reader.read_char,
+ self.reader.read_short,
+ self.reader.read_int,
+ self.reader.read_long,
+ self.reader.read_float,
+ self.reader.read_double,
+ self.reader.read_utf_array,
+ self.reader.read_short_array,
+ self.reader.read_int_array,
+ self.reader.read_long_array,
+ self.reader.read_float_array,
+ self.reader.read_double_array,
+ self.reader.read_char_array,
+ self.reader.read_byte_array,
+ self.reader.read_boolean_array,
+ self.reader.read_portable,
+ self.reader.read_portable_array,
+ ]
for read_fnc in functions:
with self.assertRaises(TypeError):
read_fnc("9")
@@ -157,10 +196,20 @@ def test_incompatible_types(self):
self.reader.read_utf("1")
def test_missing_fields(self):
- functions = [self.reader.read_utf, self.reader.read_utf_array, self.reader.read_short_array, self.reader.read_int_array,
- self.reader.read_long_array, self.reader.read_float_array, self.reader.read_double_array,
- self.reader.read_char_array, self.reader.read_byte_array, self.reader.read_boolean_array,
- self.reader.read_portable, self.reader.read_portable_array]
+ functions = [
+ self.reader.read_utf,
+ self.reader.read_utf_array,
+ self.reader.read_short_array,
+ self.reader.read_int_array,
+ self.reader.read_long_array,
+ self.reader.read_float_array,
+ self.reader.read_double_array,
+ self.reader.read_char_array,
+ self.reader.read_byte_array,
+ self.reader.read_boolean_array,
+ self.reader.read_portable,
+ self.reader.read_portable_array,
+ ]
for read_fnc in functions:
self.assertIsNone(read_fnc("NO SUCH FIELD"))
@@ -172,8 +221,28 @@ def test_reader_has_field(self):
self.assertFalse(self.reader.has_field("NO SUCH FIELD"))
def test_reader_get_field_names(self):
- expected_names = {"1", "2", "3", "4", "5", "6", "7", "8", "9", "a1", "a2", "a3", "a4", "a5", "a6", "a7", "a8", "a9", "p",
- "ap"}
+ expected_names = {
+ "1",
+ "2",
+ "3",
+ "4",
+ "5",
+ "6",
+ "7",
+ "8",
+ "9",
+ "a1",
+ "a2",
+ "a3",
+ "a4",
+ "a5",
+ "a6",
+ "a7",
+ "a8",
+ "a9",
+ "p",
+ "ap",
+ }
field_names = set(self.reader.get_field_names())
self.assertSetEqual(expected_names, field_names)
@@ -182,4 +251,3 @@ def test_reader_get_field_type(self):
def test_reader_get_field_class_id(self):
self.assertEqual(0, self.reader.get_field_class_id("1"))
-
diff --git a/tests/serialization/output_test.py b/tests/serialization/output_test.py
index 7b2eef7bb2..8bbf1256d1 100644
--- a/tests/serialization/output_test.py
+++ b/tests/serialization/output_test.py
@@ -14,20 +14,30 @@ def setUp(self):
def test_bool_array(self):
initial_pos = self._output._pos
self._output.write_boolean_array(self.BOOL_ARR)
- self.assertEqual(bytearray(binascii.unhexlify("00000004")), self._output._buffer[initial_pos:initial_pos + 4])
- self.assertEqual(bytearray(binascii.unhexlify("00010101")), self._output._buffer[initial_pos + 4:initial_pos + 8])
+ self.assertEqual(
+ bytearray(binascii.unhexlify("00000004")),
+ self._output._buffer[initial_pos : initial_pos + 4],
+ )
+ self.assertEqual(
+ bytearray(binascii.unhexlify("00010101")),
+ self._output._buffer[initial_pos + 4 : initial_pos + 8],
+ )
def test_int_array(self):
pos = self._output._pos
self._output.write_int_array(self.INT_ARR)
- self.assertEqual(bytearray(binascii.unhexlify("00000004")), self._output._buffer[pos:pos + 4])
- self.assertEqual(bytearray(binascii.unhexlify("00000001000000020000000300000004")), self._output._buffer[pos+4:pos + 20])
+ self.assertEqual(
+ bytearray(binascii.unhexlify("00000004")), self._output._buffer[pos : pos + 4]
+ )
+ self.assertEqual(
+ bytearray(binascii.unhexlify("00000001000000020000000300000004")),
+ self._output._buffer[pos + 4 : pos + 20],
+ )
def test_char(self):
pos = self._output._pos
- self._output.write_char(six.unichr(0x00e7))
- self.assertEqual(bytearray(binascii.unhexlify("00e70000000000000000")), self._output._buffer[pos:pos + 10])
-
-
-if __name__ == '__main__':
- unittest.main()
+ self._output.write_char(six.unichr(0x00E7))
+ self.assertEqual(
+ bytearray(binascii.unhexlify("00e70000000000000000")),
+ self._output._buffer[pos : pos + 10],
+ )
diff --git a/tests/serialization/portable_test.py b/tests/serialization/portable_test.py
index 075a1712a1..dbcb96e9c2 100644
--- a/tests/serialization/portable_test.py
+++ b/tests/serialization/portable_test.py
@@ -17,10 +17,30 @@
class SerializationV1Portable(Portable):
CLASS_ID = 8
- def __init__(self, a_byte=0, a_boolean=False, a_character=chr(0), a_short=0, a_integer=0, a_long=0, a_float=0.0,
- a_double=0.0, bytes_=None, booleans=None, chars=None, shorts=None, ints=None, longs=None,
- floats=None, doubles=None, string=None, strings=None, inner_portable=None, inner_portable_array=None,
- identified_serializable=None):
+ def __init__(
+ self,
+ a_byte=0,
+ a_boolean=False,
+ a_character=chr(0),
+ a_short=0,
+ a_integer=0,
+ a_long=0,
+ a_float=0.0,
+ a_double=0.0,
+ bytes_=None,
+ booleans=None,
+ chars=None,
+ shorts=None,
+ ints=None,
+ longs=None,
+ floats=None,
+ doubles=None,
+ string=None,
+ strings=None,
+ inner_portable=None,
+ inner_portable_array=None,
+ identified_serializable=None,
+ ):
self.a_byte = a_byte
self.a_boolean = a_boolean
self.a_character = a_character
@@ -142,9 +162,29 @@ def __eq__(self, other):
inner_portable = self.inner_portable == other.inner_portable
inner_portable_array = self.inner_portable_array == other.inner_portable_array
identified_serializable = self.identified_serializable == other.identified_serializable
- return byte and boolean and char and short and integer and long_ and float_ and double and \
- bytes_ and booleans and chars and shorts and integers and longs and floats and doubles and \
- string and strings and inner_portable and inner_portable_array and identified_serializable
+ return (
+ byte
+ and boolean
+ and char
+ and short
+ and integer
+ and long_
+ and float_
+ and double
+ and bytes_
+ and booleans
+ and chars
+ and shorts
+ and integers
+ and longs
+ and floats
+ and doubles
+ and string
+ and strings
+ and inner_portable
+ and inner_portable_array
+ and identified_serializable
+ )
class InnerPortable(Portable):
@@ -172,7 +212,7 @@ def __eq__(self, other):
return self.param_str == other.param_str and self.param_int == other.param_int
def __hash__(self):
- return id(self)//16
+ return id(self) // 16
class Parent(Portable):
@@ -219,16 +259,35 @@ def create_portable():
identified = create_identified()
inner_portable = InnerPortable("Inner Text", 666)
long_var = long("1341431221l") if six.PY2 else 1341431221
- return SerializationV1Portable(99, True, 'c', 11, 1234134, long_var, 1.0, 2.0, bytearray([1, 2, 3]),
- [True, False, True], ['a', 'b', 'c'],
- [1, 2, 3], [4, 2, 3], [11, 2, 3], [1.0, 2.0, 3.0],
- [11.0, 22.0, 33.0], "the string text",
- ["item1", "item2", "item3"], inner_portable,
- [InnerPortable("Portable array item 0", 0), InnerPortable("Portable array item 1", 1)],
- identified)
-
-
-the_factory = {SerializationV1Portable.CLASS_ID: SerializationV1Portable, InnerPortable.CLASS_ID: InnerPortable}
+ return SerializationV1Portable(
+ 99,
+ True,
+ "c",
+ 11,
+ 1234134,
+ long_var,
+ 1.0,
+ 2.0,
+ bytearray([1, 2, 3]),
+ [True, False, True],
+ ["a", "b", "c"],
+ [1, 2, 3],
+ [4, 2, 3],
+ [11, 2, 3],
+ [1.0, 2.0, 3.0],
+ [11.0, 22.0, 33.0],
+ "the string text",
+ ["item1", "item2", "item3"],
+ inner_portable,
+ [InnerPortable("Portable array item 0", 0), InnerPortable("Portable array item 1", 1)],
+ identified,
+ )
+
+
+the_factory = {
+ SerializationV1Portable.CLASS_ID: SerializationV1Portable,
+ InnerPortable.CLASS_ID: InnerPortable,
+}
class MyPortable1(Portable):
@@ -276,12 +335,11 @@ def __eq__(self, other):
def __ne__(self, other):
return not self.__eq__(other)
+
class PortableSerializationTestCase(unittest.TestCase):
def test_encode_decode(self):
config = _Config()
- config.portable_factories = {
- FACTORY_ID: the_factory
- }
+ config.portable_factories = {FACTORY_ID: the_factory}
service = SerializationServiceV1(config)
obj = create_portable()
self.assertTrue(obj.inner_portable)
@@ -293,9 +351,7 @@ def test_encode_decode(self):
def test_encode_decode_2(self):
config = _Config()
- config.portable_factories = {
- FACTORY_ID: the_factory
- }
+ config.portable_factories = {FACTORY_ID: the_factory}
service = SerializationServiceV1(config)
service2 = SerializationServiceV1(config)
obj = create_portable()
@@ -307,23 +363,21 @@ def test_encode_decode_2(self):
def test_portable_context(self):
config = _Config()
- config.portable_factories = {
- FACTORY_ID: the_factory
- }
+ config.portable_factories = {FACTORY_ID: the_factory}
service = SerializationServiceV1(config)
obj = create_portable()
self.assertTrue(obj.inner_portable)
service.to_data(obj)
- class_definition = service._portable_context.lookup_class_definition(FACTORY_ID, InnerPortable.CLASS_ID, 0)
+ class_definition = service._portable_context.lookup_class_definition(
+ FACTORY_ID, InnerPortable.CLASS_ID, 0
+ )
self.assertTrue(class_definition is not None)
def test_portable_null_fields(self):
config = _Config()
- config.portable_factories = {
- FACTORY_ID: the_factory
- }
+ config.portable_factories = {FACTORY_ID: the_factory}
service = SerializationServiceV1(config)
service.to_data(create_portable())
@@ -333,7 +387,7 @@ def test_portable_null_fields(self):
data = service.to_data(obj)
obj2 = service2.to_object(data)
self.assertTrue(obj == obj2)
-
+
def test_portable_class_def(self):
builder_inner = ClassDefinitionBuilder(FACTORY_ID, InnerPortable.CLASS_ID)
builder_inner.add_utf_field("param_str")
@@ -365,9 +419,7 @@ def test_portable_class_def(self):
class_def = builder.build()
config = _Config()
- config.portable_factories = {
- FACTORY_ID: the_factory
- }
+ config.portable_factories = {FACTORY_ID: the_factory}
config.class_definitions = [
class_def,
class_def_inner,
@@ -383,9 +435,7 @@ def test_portable_class_def(self):
def test_portable_read_without_factory(self):
config = _Config()
- config.portable_factories = {
- FACTORY_ID: the_factory
- }
+ config.portable_factories = {FACTORY_ID: the_factory}
service = SerializationServiceV1(config)
service2 = SerializationServiceV1(_Config())
obj = create_portable()
@@ -419,15 +469,14 @@ def test_nested_portable_serialization(self):
def test_nested_null_portable_serialization(self):
config = _Config()
- config.portable_factories = {
- 1: {
- 1: Parent,
- 2: Child
- }
- }
+ config.portable_factories = {1: {1: Parent, 2: Child}}
child_class_def = ClassDefinitionBuilder(FACTORY_ID, 2).add_utf_field("name").build()
- parent_class_def = ClassDefinitionBuilder(FACTORY_ID, 1).add_portable_field("child", child_class_def).build()
+ parent_class_def = (
+ ClassDefinitionBuilder(FACTORY_ID, 1)
+ .add_portable_field("child", child_class_def)
+ .build()
+ )
config.class_definitions = [child_class_def, parent_class_def]
@@ -451,14 +500,7 @@ def test_duplicate_class_definition(self):
def test_classes_with_same_class_id_in_different_factories(self):
config = _Config()
- config.portable_factories = {
- 1: {
- 1: MyPortable1
- },
- 2: {
- 1: MyPortable2
- }
- }
+ config.portable_factories = {1: {1: MyPortable1}, 2: {1: MyPortable2}}
class_def1 = ClassDefinitionBuilder(1, 1).add_utf_field("str_field").build()
class_def2 = ClassDefinitionBuilder(2, 1).add_int_field("int_field").build()
diff --git a/tests/serialization/serialization_test.py b/tests/serialization/serialization_test.py
index 4adeda22fd..1301ef7d48 100644
--- a/tests/serialization/serialization_test.py
+++ b/tests/serialization/serialization_test.py
@@ -37,7 +37,7 @@ def test_service_int_array(self):
self.assertEqual(obj, obj2)
def test_service_large_float_array(self):
- obj = 4000*[2.1]
+ obj = 4000 * [2.1]
data = self.service.to_data(obj)
obj2 = self.service.to_object(data)
diff --git a/tests/serialization/serializers_test.py b/tests/serialization/serializers_test.py
index 4b05f53364..deec3e11c1 100644
--- a/tests/serialization/serializers_test.py
+++ b/tests/serialization/serializers_test.py
@@ -66,7 +66,7 @@ def test_none(self):
self.validate(None)
def test_hazelcast_json_value(self):
- self.validate(HazelcastJsonValue("{\"abc\": \"abc\", \"five\": 5}"))
+ self.validate(HazelcastJsonValue('{"abc": "abc", "five": 5}'))
def test_uuid(self):
self.validate(uuid.uuid4())
@@ -131,9 +131,10 @@ def tearDown(self):
self.map.clear()
def get_from_server(self):
- script = """var StringArray = Java.type("java.lang.String[]");
+ script = (
+ """
function foo() {
- var map = instance_0.getMap(\"""" + self.map.name + """\");
+ var map = instance_0.getMap("%s");
var res = map.get("key");
if (res.getClass().isArray()) {
return Java.from(res);
@@ -142,12 +143,18 @@ def get_from_server(self):
}
}
result = ""+foo();"""
+ % self.map.name
+ )
response = self.rc.executeOnController(self.cluster.id, script, Lang.JAVASCRIPT)
return response.result.decode("utf-8")
def set_on_server(self, obj):
- script = """var map = instance_0.getMap(\"""" + self.map.name + """\");
- map.set("key", """ + obj + """);"""
+ script = """
+ var map = instance_0.getMap("%s");
+ map.set("key", %s);""" % (
+ self.map.name,
+ obj,
+ )
response = self.rc.executeOnController(self.cluster.id, script, Lang.JAVASCRIPT)
return response.success
@@ -339,7 +346,7 @@ def test_double_from_server(self):
self.assertEqual(-12332.0, self.map.get("key"))
def test_string_from_server(self):
- self.assertTrue(self.set_on_server(six.u("\"1βδΈπ¦2πβππ5\"")))
+ self.assertTrue(self.set_on_server(six.u('"1βδΈπ¦2πβππ5"')))
self.assertEqual(six.u("1βδΈπ¦2πβππ5"), self.map.get("key"))
def test_uuid_from_server(self):
@@ -347,15 +354,17 @@ def test_uuid_from_server(self):
self.assertEqual(uuid.UUID(int=1), self.map.get("key"))
def test_hjv_from_server(self):
- self.assertTrue(self.set_on_server("new com.hazelcast.core.HazelcastJsonValue(\"{\\\"a\\\": 3}\")"))
+ self.assertTrue(
+ self.set_on_server('new com.hazelcast.core.HazelcastJsonValue("{\\"a\\": 3}")')
+ )
self.assertEqual(HazelcastJsonValue({"a": 3}), self.map.get("key"))
def test_bool_array_from_server(self):
- self.assertTrue(self.set_on_server("Java.to([true, false], \"boolean[]\")"))
+ self.assertTrue(self.set_on_server('Java.to([true, false], "boolean[]")'))
self.assertEqual([True, False], self.map.get("key"))
def test_byte_array_from_server(self):
- self.assertTrue(self.set_on_server("Java.to([3, 123], \"byte[]\")"))
+ self.assertTrue(self.set_on_server('Java.to([3, 123], "byte[]")'))
self.assertEqual(bytearray([3, 123]), self.map.get("key"))
def test_char_array_from_server(self):
@@ -363,27 +372,31 @@ def test_char_array_from_server(self):
self.assertEqual(["x", "y"], self.map.get("key"))
def test_short_array_from_server(self):
- self.assertTrue(self.set_on_server("Java.to([1323, -1232], \"short[]\")"))
+ self.assertTrue(self.set_on_server('Java.to([1323, -1232], "short[]")'))
self.assertEqual([1323, -1232], self.map.get("key"))
def test_int_array_from_server(self):
- self.assertTrue(self.set_on_server("Java.to([2147483647, -2147483648], \"int[]\")"))
+ self.assertTrue(self.set_on_server('Java.to([2147483647, -2147483648], "int[]")'))
self.assertEqual([2147483647, -2147483648], self.map.get("key"))
def test_long_array_from_server(self):
- self.assertTrue(self.set_on_server("Java.to([1152921504606846976, -1152921504606846976], \"long[]\")"))
+ self.assertTrue(
+ self.set_on_server('Java.to([1152921504606846976, -1152921504606846976], "long[]")')
+ )
self.assertEqual([1152921504606846976, -1152921504606846976], self.map.get("key"))
def test_float_array_from_server(self):
- self.assertTrue(self.set_on_server("Java.to([3123.0, -123.0], \"float[]\")"))
+ self.assertTrue(self.set_on_server('Java.to([3123.0, -123.0], "float[]")'))
self.assertEqual([3123.0, -123.0], self.map.get("key"))
def test_double_array_from_server(self):
- self.assertTrue(self.set_on_server("Java.to([3123.0, -123.0], \"double[]\")"))
+ self.assertTrue(self.set_on_server('Java.to([3123.0, -123.0], "double[]")'))
self.assertEqual([3123.0, -123.0], self.map.get("key"))
def test_string_array_from_server(self):
- self.assertTrue(self.set_on_server(six.u("Java.to([\"hey\", \"1βδΈπ¦2πβππ5\"], \"java.lang.String[]\")")))
+ self.assertTrue(
+ self.set_on_server(six.u('Java.to(["hey", "1βδΈπ¦2πβππ5"], "java.lang.String[]")'))
+ )
self.assertEqual(["hey", six.u("1βδΈπ¦2πβππ5")], self.map.get("key"))
def test_date_from_server(self):
@@ -392,18 +405,24 @@ def test_date_from_server(self):
self.assertEqual(datetime.datetime(2000, 12, 15, 23, 59, 49), self.map.get("key"))
def test_big_integer_from_server(self):
- self.assertTrue(self.set_on_server("new java.math.BigInteger(\"12\", 10)"))
+ self.assertTrue(self.set_on_server('new java.math.BigInteger("12", 10)'))
self.assertEqual(12, self.map.get("key"))
- self.assertTrue(self.set_on_server("new java.math.BigInteger(\"-13\", 10)"))
+ self.assertTrue(self.set_on_server('new java.math.BigInteger("-13", 10)'))
self.assertEqual(-13, self.map.get("key"))
self.assertTrue(
- self.set_on_server("new java.math.BigInteger(\"1234567890123456789012345678901234567890\", 10)"))
+ self.set_on_server(
+ 'new java.math.BigInteger("1234567890123456789012345678901234567890", 10)'
+ )
+ )
self.assertEqual(1234567890123456789012345678901234567890, self.map.get("key"))
self.assertTrue(
- self.set_on_server("new java.math.BigInteger(\"-1234567890123456789012345678901234567890\", 10)"))
+ self.set_on_server(
+ 'new java.math.BigInteger("-1234567890123456789012345678901234567890", 10)'
+ )
+ )
self.assertEqual(-1234567890123456789012345678901234567890, self.map.get("key"))
def test_java_class_from_server(self):
@@ -411,23 +430,31 @@ def test_java_class_from_server(self):
self.assertEqual("java.lang.String", self.map.get("key"))
def test_array_list_from_server(self):
- script = """var list = new java.util.ArrayList();
+ script = (
+ """
+ var list = new java.util.ArrayList();
list.add(1);
list.add(2);
list.add(3);
- var map = instance_0.getMap(\"""" + self.map.name + """\");
+ var map = instance_0.getMap("%s");
map.set("key", list);"""
+ % self.map.name
+ )
response = self.rc.executeOnController(self.cluster.id, script, Lang.JAVASCRIPT)
self.assertTrue(response.success)
self.assertEqual([1, 2, 3], self.map.get("key"))
def test_linked_list_from_server(self):
- script = """var list = new java.util.LinkedList();
+ script = (
+ """
+ var list = new java.util.LinkedList();
list.add("a");
list.add("b");
list.add("c");
- var map = instance_0.getMap(\"""" + self.map.name + """\");
+ var map = instance_0.getMap("%s");
map.set("key", list);"""
+ % self.map.name
+ )
response = self.rc.executeOnController(self.cluster.id, script, Lang.JAVASCRIPT)
self.assertTrue(response.success)
self.assertEqual(["a", "b", "c"], self.map.get("key"))
diff --git a/tests/shutdown_test.py b/tests/shutdown_test.py
index fe34c01bb6..89295a61b8 100644
--- a/tests/shutdown_test.py
+++ b/tests/shutdown_test.py
@@ -18,10 +18,12 @@ def tearDown(self):
def test_shutdown_not_hang_on_member_closed(self):
member = self.cluster.start_member()
- client = self.create_client({
- "cluster_name": self.cluster.id,
- "cluster_connect_timeout": 5.0,
- })
+ client = self.create_client(
+ {
+ "cluster_name": self.cluster.id,
+ "cluster_connect_timeout": 5.0,
+ }
+ )
my_map = client.get_map("test")
my_map.put("key", "value").result()
member.shutdown()
@@ -31,9 +33,11 @@ def test_shutdown_not_hang_on_member_closed(self):
def test_invocations_finalised_when_client_shutdowns(self):
self.cluster.start_member()
- client = self.create_client({
- "cluster_name": self.cluster.id,
- })
+ client = self.create_client(
+ {
+ "cluster_name": self.cluster.id,
+ }
+ )
m = client.get_map("test")
m.put("key", "value").result()
diff --git a/tests/smart_listener_test.py b/tests/smart_listener_test.py
index 215761b920..7957216755 100644
--- a/tests/smart_listener_test.py
+++ b/tests/smart_listener_test.py
@@ -17,10 +17,12 @@ def tearDownClass(cls):
cls.rc.exit()
def setUp(self):
- self.client = self.create_client({
- "cluster_name": self.cluster.id,
- "smart_routing": True,
- })
+ self.client = self.create_client(
+ {
+ "cluster_name": self.cluster.id,
+ "smart_routing": True,
+ }
+ )
self.collector = event_collector()
def tearDown(self):
@@ -30,48 +32,48 @@ def tearDown(self):
def test_list_smart_listener_local_only(self):
list = self.client.get_list(random_string()).blocking()
list.add_listener(item_added_func=self.collector)
- list.add('item-value')
+ list.add("item-value")
sleep(5)
self.assertEqual(1, len(self.collector.events))
def test_map_smart_listener_local_only(self):
map = self.client.get_map(random_string()).blocking()
map.add_entry_listener(added_func=self.collector)
- map.put('key', 'value')
+ map.put("key", "value")
sleep(5)
self.assertEqual(1, len(self.collector.events))
def test_multimap_smart_listener_local_only(self):
multimap = self.client.get_map(random_string()).blocking()
multimap.add_entry_listener(added_func=self.collector)
- multimap.put('key', 'value')
+ multimap.put("key", "value")
sleep(5)
self.assertEqual(1, len(self.collector.events))
def test_queue_smart_listener_local_only(self):
queue = self.client.get_queue(random_string()).blocking()
queue.add_listener(item_added_func=self.collector)
- queue.add('item-value')
+ queue.add("item-value")
sleep(5)
self.assertEqual(1, len(self.collector.events))
def test_replicated_map_smart_listener_local_only(self):
replicated_map = self.client.get_replicated_map(random_string()).blocking()
replicated_map.add_entry_listener(added_func=self.collector)
- replicated_map.put('key', 'value')
+ replicated_map.put("key", "value")
sleep(5)
self.assertEqual(1, len(self.collector.events))
def test_set_smart_listener_local_only(self):
set = self.client.get_set(random_string()).blocking()
set.add_listener(item_added_func=self.collector)
- set.add('item-value')
+ set.add("item-value")
sleep(5)
self.assertEqual(1, len(self.collector.events))
def test_topic_smart_listener_local_only(self):
topic = self.client.get_topic(random_string()).blocking()
topic.add_listener(on_message=self.collector)
- topic.publish('item-value')
+ topic.publish("item-value")
sleep(5)
self.assertEqual(1, len(self.collector.events))
diff --git a/tests/soak_test/map_soak_test.py b/tests/soak_test/map_soak_test.py
index f702d290a6..20de91d066 100644
--- a/tests/soak_test/map_soak_test.py
+++ b/tests/soak_test/map_soak_test.py
@@ -64,8 +64,12 @@ def start():
thread_count_before = threading.active_count()
parser = argparse.ArgumentParser()
parser.add_argument("--hour", default=48.0, type=float, help="Duration of the test in hours")
- parser.add_argument("--addresses", default="127.0.0.1", type=str,
- help="List of cluster member addresses separated by -")
+ parser.add_argument(
+ "--addresses",
+ default="127.0.0.1",
+ type=str,
+ help="List of cluster member addresses separated by -",
+ )
parser.add_argument("--log", default="default_log_file", type=str, help="Name of the log file")
args = parser.parse_args()
@@ -74,11 +78,13 @@ def start():
addresses = args.addresses
log_file = args.log
- logging.basicConfig(filename=log_file,
- filemode="w",
- format="%(asctime)s %(message)s",
- datefmt="%H:%M:%S",
- level=logging.INFO)
+ logging.basicConfig(
+ filename=log_file,
+ filemode="w",
+ format="%(asctime)s %(message)s",
+ datefmt="%H:%M:%S",
+ level=logging.INFO,
+ )
try:
client = HazelcastClient(
@@ -87,7 +93,7 @@ def start():
SimpleEntryProcessor.FACTORY_ID: {
SimpleEntryProcessor.CLASS_ID: SimpleEntryProcessor
}
- }
+ },
)
except Exception:
logging.exception("Client failed to start")
@@ -95,7 +101,9 @@ def start():
processor = SimpleEntryProcessor("test")
test_map = client.get_map("test-map").blocking()
- test_map.add_entry_listener(False, added_func=listener, removed_func=listener, updated_func=listener)
+ test_map.add_entry_listener(
+ False, added_func=listener, removed_func=listener, updated_func=listener
+ )
logging.info("Soak test operations are starting!")
logging.info("* " * 20 + "\n")
diff --git a/tests/ssl_tests/mutual_authentication_test.py b/tests/ssl_tests/mutual_authentication_test.py
index 76f26a65c4..219cd25093 100644
--- a/tests/ssl_tests/mutual_authentication_test.py
+++ b/tests/ssl_tests/mutual_authentication_test.py
@@ -23,10 +23,15 @@ def tearDown(self):
def test_ma_required_client_and_server_authenticated(self):
cluster = self.create_cluster(self.rc, self.configure_cluster(True))
cluster.start_member()
- client = HazelcastClient(**get_ssl_config(cluster.id, True,
- get_abs_path(self.current_directory, "server1-cert.pem"),
- get_abs_path(self.current_directory, "client1-cert.pem"),
- get_abs_path(self.current_directory, "client1-key.pem")))
+ client = HazelcastClient(
+ **get_ssl_config(
+ cluster.id,
+ True,
+ get_abs_path(self.current_directory, "server1-cert.pem"),
+ get_abs_path(self.current_directory, "client1-cert.pem"),
+ get_abs_path(self.current_directory, "client1-key.pem"),
+ )
+ )
self.assertTrue(client.lifecycle_service.is_running())
client.shutdown()
@@ -35,38 +40,58 @@ def test_ma_required_server_not_authenticated(self):
cluster.start_member()
with self.assertRaises(HazelcastError):
- HazelcastClient(**get_ssl_config(cluster.id, True,
- get_abs_path(self.current_directory, "server2-cert.pem"),
- get_abs_path(self.current_directory, "client1-cert.pem"),
- get_abs_path(self.current_directory, "client1-key.pem")))
+ HazelcastClient(
+ **get_ssl_config(
+ cluster.id,
+ True,
+ get_abs_path(self.current_directory, "server2-cert.pem"),
+ get_abs_path(self.current_directory, "client1-cert.pem"),
+ get_abs_path(self.current_directory, "client1-key.pem"),
+ )
+ )
def test_ma_required_client_not_authenticated(self):
cluster = self.create_cluster(self.rc, self.configure_cluster(True))
cluster.start_member()
with self.assertRaises(HazelcastError):
- HazelcastClient(**get_ssl_config(cluster.id, True,
- get_abs_path(self.current_directory, "server1-cert.pem"),
- get_abs_path(self.current_directory, "client2-cert.pem"),
- get_abs_path(self.current_directory, "client2-key.pem")))
+ HazelcastClient(
+ **get_ssl_config(
+ cluster.id,
+ True,
+ get_abs_path(self.current_directory, "server1-cert.pem"),
+ get_abs_path(self.current_directory, "client2-cert.pem"),
+ get_abs_path(self.current_directory, "client2-key.pem"),
+ )
+ )
def test_ma_required_client_and_server_not_authenticated(self):
cluster = self.create_cluster(self.rc, self.configure_cluster(True))
cluster.start_member()
with self.assertRaises(HazelcastError):
- HazelcastClient(**get_ssl_config(cluster.id, True,
- get_abs_path(self.current_directory, "server2-cert.pem"),
- get_abs_path(self.current_directory, "client2-cert.pem"),
- get_abs_path(self.current_directory, "client2-key.pem")))
+ HazelcastClient(
+ **get_ssl_config(
+ cluster.id,
+ True,
+ get_abs_path(self.current_directory, "server2-cert.pem"),
+ get_abs_path(self.current_directory, "client2-cert.pem"),
+ get_abs_path(self.current_directory, "client2-key.pem"),
+ )
+ )
def test_ma_optional_client_and_server_authenticated(self):
cluster = self.create_cluster(self.rc, self.configure_cluster(False))
cluster.start_member()
- client = HazelcastClient(**get_ssl_config(cluster.id, True,
- get_abs_path(self.current_directory, "server1-cert.pem"),
- get_abs_path(self.current_directory, "client1-cert.pem"),
- get_abs_path(self.current_directory, "client1-key.pem")))
+ client = HazelcastClient(
+ **get_ssl_config(
+ cluster.id,
+ True,
+ get_abs_path(self.current_directory, "server1-cert.pem"),
+ get_abs_path(self.current_directory, "client1-cert.pem"),
+ get_abs_path(self.current_directory, "client1-key.pem"),
+ )
+ )
self.assertTrue(client.lifecycle_service.is_running())
client.shutdown()
@@ -75,44 +100,65 @@ def test_ma_optional_server_not_authenticated(self):
cluster.start_member()
with self.assertRaises(HazelcastError):
- HazelcastClient(**get_ssl_config(cluster.id, True,
- get_abs_path(self.current_directory, "server2-cert.pem"),
- get_abs_path(self.current_directory, "client1-cert.pem"),
- get_abs_path(self.current_directory, "client1-key.pem")))
+ HazelcastClient(
+ **get_ssl_config(
+ cluster.id,
+ True,
+ get_abs_path(self.current_directory, "server2-cert.pem"),
+ get_abs_path(self.current_directory, "client1-cert.pem"),
+ get_abs_path(self.current_directory, "client1-key.pem"),
+ )
+ )
def test_ma_optional_client_not_authenticated(self):
cluster = self.create_cluster(self.rc, self.configure_cluster(False))
cluster.start_member()
with self.assertRaises(HazelcastError):
- HazelcastClient(**get_ssl_config(cluster.id, True,
- get_abs_path(self.current_directory, "server1-cert.pem"),
- get_abs_path(self.current_directory, "client2-cert.pem"),
- get_abs_path(self.current_directory, "client2-key.pem")))
+ HazelcastClient(
+ **get_ssl_config(
+ cluster.id,
+ True,
+ get_abs_path(self.current_directory, "server1-cert.pem"),
+ get_abs_path(self.current_directory, "client2-cert.pem"),
+ get_abs_path(self.current_directory, "client2-key.pem"),
+ )
+ )
def test_ma_optional_client_and_server_not_authenticated(self):
cluster = self.create_cluster(self.rc, self.configure_cluster(False))
cluster.start_member()
with self.assertRaises(HazelcastError):
- HazelcastClient(**get_ssl_config(cluster.id, True,
- get_abs_path(self.current_directory, "server2-cert.pem"),
- get_abs_path(self.current_directory, "client2-cert.pem"),
- get_abs_path(self.current_directory, "client2-key.pem")))
+ HazelcastClient(
+ **get_ssl_config(
+ cluster.id,
+ True,
+ get_abs_path(self.current_directory, "server2-cert.pem"),
+ get_abs_path(self.current_directory, "client2-cert.pem"),
+ get_abs_path(self.current_directory, "client2-key.pem"),
+ )
+ )
def test_ma_required_with_no_cert_file(self):
cluster = self.create_cluster(self.rc, self.configure_cluster(True))
cluster.start_member()
with self.assertRaises(HazelcastError):
- HazelcastClient(**get_ssl_config(cluster.id, True,
- get_abs_path(self.current_directory, "server1-cert.pem")))
+ HazelcastClient(
+ **get_ssl_config(
+ cluster.id, True, get_abs_path(self.current_directory, "server1-cert.pem")
+ )
+ )
def test_ma_optional_with_no_cert_file(self):
cluster = self.create_cluster(self.rc, self.configure_cluster(False))
cluster.start_member()
- client = HazelcastClient(**get_ssl_config(cluster.id, True,
- get_abs_path(self.current_directory, "server1-cert.pem")))
+ client = HazelcastClient(
+ **get_ssl_config(
+ cluster.id, True, get_abs_path(self.current_directory, "server1-cert.pem")
+ )
+ )
self.assertTrue(client.lifecycle_service.is_running())
client.shutdown()
diff --git a/tests/ssl_tests/ssl_test.py b/tests/ssl_tests/ssl_test.py
index 93635e6056..56f7d58586 100644
--- a/tests/ssl_tests/ssl_test.py
+++ b/tests/ssl_tests/ssl_test.py
@@ -31,8 +31,11 @@ def test_ssl_enabled_is_client_live(self):
cluster = self.create_cluster(self.rc, self.configure_cluster(self.hazelcast_ssl_xml))
cluster.start_member()
- client = HazelcastClient(**get_ssl_config(cluster.id, True,
- get_abs_path(self.current_directory, "server1-cert.pem")))
+ client = HazelcastClient(
+ **get_ssl_config(
+ cluster.id, True, get_abs_path(self.current_directory, "server1-cert.pem")
+ )
+ )
self.assertTrue(client.lifecycle_service.is_running())
client.shutdown()
@@ -57,8 +60,11 @@ def test_ssl_enabled_map_size(self):
cluster = self.create_cluster(self.rc, self.configure_cluster(self.hazelcast_ssl_xml))
cluster.start_member()
- client = HazelcastClient(**get_ssl_config(cluster.id, True,
- get_abs_path(self.current_directory, "server1-cert.pem")))
+ client = HazelcastClient(
+ **get_ssl_config(
+ cluster.id, True, get_abs_path(self.current_directory, "server1-cert.pem")
+ )
+ )
test_map = client.get_map("test_map")
fill_map(test_map, 10)
self.assertEqual(test_map.size().result(), 10)
@@ -68,9 +74,14 @@ def test_ssl_enabled_with_custom_ciphers(self):
cluster = self.create_cluster(self.rc, self.configure_cluster(self.hazelcast_ssl_xml))
cluster.start_member()
- client = HazelcastClient(**get_ssl_config(cluster.id, True,
- get_abs_path(self.current_directory, "server1-cert.pem"),
- ciphers="ECDHE-RSA-AES128-SHA256:ECDHE-RSA-AES256-GCM-SHA384"))
+ client = HazelcastClient(
+ **get_ssl_config(
+ cluster.id,
+ True,
+ get_abs_path(self.current_directory, "server1-cert.pem"),
+ ciphers="ECDHE-RSA-AES128-SHA256:ECDHE-RSA-AES256-GCM-SHA384",
+ )
+ )
self.assertTrue(client.lifecycle_service.is_running())
client.shutdown()
@@ -79,9 +90,14 @@ def test_ssl_enabled_with_invalid_ciphers(self):
cluster.start_member()
with self.assertRaises(HazelcastError):
- HazelcastClient(**get_ssl_config(cluster.id, True,
- get_abs_path(self.current_directory, "server1-cert.pem"),
- ciphers="INVALID-CIPHER1:INVALID_CIPHER2"))
+ HazelcastClient(
+ **get_ssl_config(
+ cluster.id,
+ True,
+ get_abs_path(self.current_directory, "server1-cert.pem"),
+ ciphers="INVALID-CIPHER1:INVALID_CIPHER2",
+ )
+ )
def test_ssl_enabled_with_protocol_mismatch(self):
cluster = self.create_cluster(self.rc, self.configure_cluster(self.hazelcast_ssl_xml))
@@ -89,9 +105,14 @@ def test_ssl_enabled_with_protocol_mismatch(self):
# Member configured with TLSv1
with self.assertRaises(HazelcastError):
- HazelcastClient(**get_ssl_config(cluster.id, True,
- get_abs_path(self.current_directory, "server1-cert.pem"),
- protocol=SSLProtocol.SSLv3))
+ HazelcastClient(
+ **get_ssl_config(
+ cluster.id,
+ True,
+ get_abs_path(self.current_directory, "server1-cert.pem"),
+ protocol=SSLProtocol.SSLv3,
+ )
+ )
def configure_cluster(self, filename):
with open(filename, "r") as f:
diff --git a/tests/statistics_test.py b/tests/statistics_test.py
index 503ea31633..16166a11d5 100644
--- a/tests/statistics_test.py
+++ b/tests/statistics_test.py
@@ -44,9 +44,11 @@ def test_statistics_enabled(self):
client.shutdown()
def test_statistics_period(self):
- client = HazelcastClient(cluster_name=self.cluster.id,
- statistics_enabled=True,
- statistics_period=self.STATS_PERIOD)
+ client = HazelcastClient(
+ cluster_name=self.cluster.id,
+ statistics_enabled=True,
+ statistics_period=self.STATS_PERIOD,
+ )
client_uuid = client._connection_manager.client_uuid
time.sleep(2 * self.STATS_PERIOD)
@@ -60,12 +62,14 @@ def test_statistics_period(self):
def test_statistics_content(self):
map_name = random_string()
- client = HazelcastClient(cluster_name=self.cluster.id,
- statistics_enabled=True,
- statistics_period=self.STATS_PERIOD,
- near_caches={
- map_name: {},
- })
+ client = HazelcastClient(
+ cluster_name=self.cluster.id,
+ statistics_enabled=True,
+ statistics_period=self.STATS_PERIOD,
+ near_caches={
+ map_name: {},
+ },
+ )
client_uuid = client._connection_manager.client_uuid
client.get_map(map_name).blocking()
@@ -108,12 +112,14 @@ def test_statistics_content(self):
def test_special_characters(self):
map_name = random_string() + ",t=es\\t"
- client = HazelcastClient(cluster_name=self.cluster.id,
- statistics_enabled=True,
- statistics_period=self.STATS_PERIOD,
- near_caches={
- map_name: {},
- })
+ client = HazelcastClient(
+ cluster_name=self.cluster.id,
+ statistics_enabled=True,
+ statistics_period=self.STATS_PERIOD,
+ near_caches={
+ map_name: {},
+ },
+ )
client_uuid = client._connection_manager.client_uuid
client.get_map(map_name).blocking()
@@ -129,12 +135,14 @@ def test_special_characters(self):
def test_near_cache_stats(self):
map_name = random_string()
- client = HazelcastClient(cluster_name=self.cluster.id,
- statistics_enabled=True,
- statistics_period=self.STATS_PERIOD,
- near_caches={
- map_name: {},
- })
+ client = HazelcastClient(
+ cluster_name=self.cluster.id,
+ statistics_enabled=True,
+ statistics_period=self.STATS_PERIOD,
+ near_caches={
+ map_name: {},
+ },
+ )
client_uuid = client._connection_manager.client_uuid
test_map = client.get_map(map_name).blocking()
@@ -172,18 +180,25 @@ def test_near_cache_stats(self):
client.shutdown()
def _get_client_stats_from_server(self, client_uuid):
- script = "stats = instance_0.getOriginal().node.getClientEngine().getClientStatistics()\n" \
- "keys = stats.keySet().toArray()\n" \
- "for(i=0; i < keys.length; i++) {\n" \
- " if (keys[i].toString().equals(\"%s\")) {\n" \
- " result = stats.get(keys[i]).clientAttributes()\n" \
- " break\n" \
- " }\n}\n" % client_uuid
+ script = (
+ """
+ stats = instance_0.getOriginal().node.getClientEngine().getClientStatistics();
+ keys = stats.keySet().toArray();
+ for(i=0; i < keys.length; i++) {
+ if (keys[i].toString().equals("%s")) {
+ result = stats.get(keys[i]).clientAttributes();
+ break;
+ }
+ }"""
+ % client_uuid
+ )
return self.rc.executeOnController(self.cluster.id, script, Lang.JAVASCRIPT)
def _unescape_special_chars(self, value):
- return value.replace("\\,", ",").replace("\\=", "=").replace("\\.", ".").replace("\\\\", "\\")
+ return (
+ value.replace("\\,", ",").replace("\\=", "=").replace("\\.", ".").replace("\\\\", "\\")
+ )
def _verify_response_not_empty(self, response):
if not response.success or response.result is None:
diff --git a/tests/transaction_test.py b/tests/transaction_test.py
index 2b003d282f..cb9f091a50 100644
--- a/tests/transaction_test.py
+++ b/tests/transaction_test.py
@@ -80,8 +80,13 @@ def test_rollback_from_another_thread(self):
def test_operations_from_another_thread(self):
transaction = self.client.new_transaction()
- ops = [transaction.get_map, transaction.get_list, transaction.get_multi_map, transaction.get_queue,
- transaction.get_set]
+ ops = [
+ transaction.get_map,
+ transaction.get_list,
+ transaction.get_multi_map,
+ transaction.get_queue,
+ transaction.get_set,
+ ]
t = Thread(target=transaction.begin)
t.start()
@@ -92,8 +97,13 @@ def test_operations_from_another_thread(self):
def test_operations_before_transaction_started(self):
transaction = self.client.new_transaction()
- ops = [transaction.get_map, transaction.get_list, transaction.get_multi_map, transaction.get_queue,
- transaction.get_set]
+ ops = [
+ transaction.get_map,
+ transaction.get_list,
+ transaction.get_multi_map,
+ transaction.get_queue,
+ transaction.get_set,
+ ]
for op in ops:
with self.assertRaises(TransactionError):
@@ -127,4 +137,4 @@ def test_context_manager_rollback(self):
with self.client.new_transaction() as t:
raise RuntimeError("error")
- self.assertEqual(t.state, hazelcast.transaction._STATE_ROLLED_BACK)
\ No newline at end of file
+ self.assertEqual(t.state, hazelcast.transaction._STATE_ROLLED_BACK)
diff --git a/tests/util.py b/tests/util.py
index ff1039f386..53cb879ede 100644
--- a/tests/util.py
+++ b/tests/util.py
@@ -27,13 +27,16 @@ def fill_map(map, size=10, key_prefix="key", value_prefix="val"):
return entries
-def get_ssl_config(cluster_name, enable_ssl=False,
- cafile=None,
- certfile=None,
- keyfile=None,
- password=None,
- protocol=SSLProtocol.TLSv1_2,
- ciphers=None):
+def get_ssl_config(
+ cluster_name,
+ enable_ssl=False,
+ cafile=None,
+ certfile=None,
+ keyfile=None,
+ password=None,
+ protocol=SSLProtocol.TLSv1_2,
+ ciphers=None,
+):
config = {
"cluster_name": cluster_name,
"ssl_enabled": enable_ssl,
diff --git a/tests/util_test.py b/tests/util_test.py
index 82281f434d..4ab77d9148 100644
--- a/tests/util_test.py
+++ b/tests/util_test.py
@@ -1,5 +1,10 @@
-from hazelcast.config import IndexConfig, IndexUtil, IndexType, QueryConstants, \
- UniqueKeyTransformation
+from hazelcast.config import (
+ IndexConfig,
+ IndexUtil,
+ IndexType,
+ QueryConstants,
+ UniqueKeyTransformation,
+)
from hazelcast.util import calculate_version
from unittest import TestCase
@@ -80,9 +85,12 @@ def test_normalized_name(self):
def test_with_bitmap_indexes(self):
bio = {
"unique_key": QueryConstants.THIS_ATTRIBUTE_NAME,
- "unique_key_transformation": UniqueKeyTransformation.RAW
+ "unique_key_transformation": UniqueKeyTransformation.RAW,
}
config = IndexConfig(type=IndexType.BITMAP, attributes=["attr"], bitmap_index_options=bio)
normalized = IndexUtil.validate_and_normalize("map", config)
self.assertEqual(bio["unique_key"], normalized.bitmap_index_options.unique_key)
- self.assertEqual(bio["unique_key_transformation"], normalized.bitmap_index_options.unique_key_transformation)
+ self.assertEqual(
+ bio["unique_key_transformation"],
+ normalized.bitmap_index_options.unique_key_transformation,
+ )