Skip to content

Commit

Permalink
Merge 060ff5e into 4aad6b7
Browse files Browse the repository at this point in the history
  • Loading branch information
Mat001 authored Jul 29, 2021
2 parents 4aad6b7 + 060ff5e commit b646a6d
Show file tree
Hide file tree
Showing 2 changed files with 31 additions and 1 deletion.
2 changes: 1 addition & 1 deletion optimizely/event/event_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -259,7 +259,7 @@ def process(self, user_event):
try:
self.event_queue.put_nowait(user_event)
except queue.Full:
self.logger.debug(
self.logger.warning(
'Payload not accepted by the queue. Current size: {}'.format(str(self.event_queue.qsize()))
)

Expand Down
30 changes: 30 additions & 0 deletions tests/test_event_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -494,6 +494,36 @@ def on_log_event(log_event):
1, len(self.optimizely.notification_center.notification_listeners[enums.NotificationTypes.LOG_EVENT]),
)

def test_warning_log_level_on_queue_overflow(self):
""" Test that a warning log is created when events overflow the queue. """

# create scenario where the batch size (MAX_BATCH_SIZE) is significantly larger than the queue size
# use smaller batch size and higher timeout to avoid test flakiness
test_max_queue_size = 10
self.MAX_BATCH_SIZE = 1000

event_dispatcher = CustomEventDispatcher()

with mock.patch.object(self.optimizely, 'logger') as mock_config_logging:
self.event_processor = BatchEventProcessor(
event_dispatcher,
self.optimizely.logger,
True,
queue.Queue(maxsize=test_max_queue_size),
)

for i in range(0, self.MAX_BATCH_SIZE):
user_event = self._build_conversion_event(self.event_name)
self.event_processor.process(user_event)
event_dispatcher.expect_conversion(self.event_name, self.test_user_id)

time.sleep(self.TEST_TIMEOUT)

# queue is flushed, even though events overflow
self.assertEqual(0, self.event_processor.event_queue.qsize())
mock_config_logging.warning.assert_called_with('Payload not accepted by the queue. Current size: {}'
.format(str(test_max_queue_size)))


class CustomForwardingEventDispatcher(object):
def __init__(self, is_updated=False):
Expand Down

0 comments on commit b646a6d

Please sign in to comment.