Skip to content

Commit

Permalink
chore(test): Make unit tests go faster (#276)
Browse files Browse the repository at this point in the history
  • Loading branch information
aliabbasrizvi committed Jun 26, 2020
1 parent a34b8a8 commit d743410
Show file tree
Hide file tree
Showing 3 changed files with 30 additions and 22 deletions.
8 changes: 5 additions & 3 deletions optimizely/config_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -402,9 +402,11 @@ def _set_access_token(self, access_token):

def fetch_datafile(self):
""" Fetch authenticated datafile and set ProjectConfig. """
request_headers = {}
request_headers[enums.HTTPHeaders.AUTHORIZATION] = \
enums.ConfigManager.AUTHORIZATION_HEADER_DATA_TEMPLATE.format(access_token=self.access_token)
request_headers = {
enums.HTTPHeaders.AUTHORIZATION: enums.ConfigManager.AUTHORIZATION_HEADER_DATA_TEMPLATE.format(
access_token=self.access_token
)
}

if self.last_modified:
request_headers[enums.HTTPHeaders.IF_MODIFIED_SINCE] = self.last_modified
Expand Down
13 changes: 9 additions & 4 deletions tests/test_config_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -211,11 +211,11 @@ def test_get_config(self):
def test_get_config_blocks(self):
""" Test that get_config blocks until blocking timeout is hit. """
start_time = time.time()
project_config_manager = config_manager.PollingConfigManager(sdk_key='sdk_key', blocking_timeout=5)
project_config_manager = config_manager.PollingConfigManager(sdk_key='sdk_key', blocking_timeout=1)
# Assert get_config should block until blocking timeout.
project_config_manager.get_config()
end_time = time.time()
self.assertEqual(5, round(end_time - start_time))
self.assertEqual(1, round(end_time - start_time))


@mock.patch('requests.get')
Expand Down Expand Up @@ -425,17 +425,22 @@ def test_fetch_datafile(self, _):
""" Test that fetch_datafile sets authorization header in request header and sets config based on response. """
access_token = 'some_token'
sdk_key = 'some_key'
with mock.patch('optimizely.config_manager.AuthDatafilePollingConfigManager.fetch_datafile'):
with mock.patch('optimizely.config_manager.AuthDatafilePollingConfigManager.fetch_datafile'), mock.patch(
'optimizely.config_manager.AuthDatafilePollingConfigManager._run'
):
project_config_manager = config_manager.AuthDatafilePollingConfigManager(
access_token=access_token, sdk_key=sdk_key)
expected_datafile_url = enums.ConfigManager.AUTHENTICATED_DATAFILE_URL_TEMPLATE.format(sdk_key=sdk_key)
test_headers = {'Last-Modified': 'New Time'}
test_datafile = json.dumps(self.config_dict_with_features)
test_response = requests.Response()
test_response.status_code = 200
test_response.headers = test_headers
test_response._content = test_datafile

# Call fetch_datafile and assert that request was sent with correct authorization header
with mock.patch('requests.get', return_value=test_response) as mock_request:
with mock.patch('requests.get',
return_value=test_response) as mock_request:
project_config_manager.fetch_datafile()

mock_request.assert_called_once_with(
Expand Down
31 changes: 16 additions & 15 deletions tests/test_event_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
from optimizely.event.user_event_factory import UserEventFactory
from optimizely.event_dispatcher import EventDispatcher as default_event_dispatcher
from optimizely.helpers import enums
from optimizely.logger import SimpleLogger
from optimizely.logger import NoOpLogger
from . import base


Expand Down Expand Up @@ -114,15 +114,16 @@ class BatchEventProcessorTest(base.BaseTest):

DEFAULT_QUEUE_CAPACITY = 1000
MAX_BATCH_SIZE = 10
MAX_DURATION_SEC = 1
MAX_TIMEOUT_INTERVAL_SEC = 5
MAX_DURATION_SEC = 0.2
MAX_TIMEOUT_INTERVAL_SEC = 0.1
TEST_TIMEOUT = 0.3

def setUp(self, *args, **kwargs):
base.BaseTest.setUp(self, 'config_dict_with_multiple_experiments')
self.test_user_id = 'test_user'
self.event_name = 'test_event'
self.event_queue = queue.Queue(maxsize=self.DEFAULT_QUEUE_CAPACITY)
self.optimizely.logger = SimpleLogger()
self.optimizely.logger = NoOpLogger()
self.notification_center = self.optimizely.notification_center

def tearDown(self):
Expand Down Expand Up @@ -154,7 +155,7 @@ def test_drain_on_stop(self):
self.event_processor.process(user_event)
event_dispatcher.expect_conversion(self.event_name, self.test_user_id)

time.sleep(5)
time.sleep(self.TEST_TIMEOUT)

self.assertStrictTrue(event_dispatcher.compare_events())
self.assertEqual(0, self.event_processor.event_queue.qsize())
Expand All @@ -169,15 +170,15 @@ def test_flush_on_max_timeout(self):
self.event_processor.process(user_event)
event_dispatcher.expect_conversion(self.event_name, self.test_user_id)

time.sleep(3)
time.sleep(self.TEST_TIMEOUT)

self.assertStrictTrue(event_dispatcher.compare_events())
self.assertEqual(0, self.event_processor.event_queue.qsize())

def test_flush_once_max_timeout(self):
event_dispatcher = TestEventDispatcher()

self.optimizely.logger = SimpleLogger(enums.LogLevels.DEBUG)
self.optimizely.logger = NoOpLogger()

with mock.patch.object(self.optimizely, 'logger') as mock_config_logging:
self._set_event_processor(event_dispatcher, mock_config_logging)
Expand All @@ -186,7 +187,7 @@ def test_flush_once_max_timeout(self):
self.event_processor.process(user_event)
event_dispatcher.expect_conversion(self.event_name, self.test_user_id)

time.sleep(1.75)
time.sleep(self.TEST_TIMEOUT)

self.assertStrictTrue(event_dispatcher.compare_events())
self.assertEqual(0, self.event_processor.event_queue.qsize())
Expand All @@ -195,7 +196,7 @@ def test_flush_once_max_timeout(self):
mock_config_logging.debug.assert_any_call('Flushing batch size 1')
mock_config_logging.debug.assert_any_call('Flush interval deadline. Flushed batch.')
self.assertTrue(mock_config_logging.debug.call_count == 3)
self.optimizely.logger = SimpleLogger()
self.optimizely.logger = NoOpLogger()

def test_flush_max_batch_size(self):
event_dispatcher = TestEventDispatcher()
Expand All @@ -208,7 +209,7 @@ def test_flush_max_batch_size(self):
self.event_processor.process(user_event)
event_dispatcher.expect_conversion(self.event_name, self.test_user_id)

time.sleep(1)
time.sleep(self.TEST_TIMEOUT)

self.assertStrictTrue(event_dispatcher.compare_events())
self.assertEqual(0, self.event_processor.event_queue.qsize())
Expand All @@ -228,7 +229,7 @@ def test_flush(self):
self.event_processor.flush()
event_dispatcher.expect_conversion(self.event_name, self.test_user_id)

time.sleep(3)
time.sleep(self.TEST_TIMEOUT)

self.assertStrictTrue(event_dispatcher.compare_events())
self.assertEqual(0, self.event_processor.event_queue.qsize())
Expand All @@ -253,7 +254,7 @@ def test_flush_on_mismatch_revision(self):
self.event_processor.process(user_event_2)
event_dispatcher.expect_conversion(self.event_name, self.test_user_id)

time.sleep(3)
time.sleep(self.TEST_TIMEOUT)

self.assertStrictTrue(event_dispatcher.compare_events())
self.assertEqual(0, self.event_processor.event_queue.qsize())
Expand All @@ -278,7 +279,7 @@ def test_flush_on_mismatch_project_id(self):
self.event_processor.process(user_event_2)
event_dispatcher.expect_conversion(self.event_name, self.test_user_id)

time.sleep(3)
time.sleep(self.TEST_TIMEOUT)

self.assertStrictTrue(event_dispatcher.compare_events())
self.assertEqual(0, self.event_processor.event_queue.qsize())
Expand All @@ -293,7 +294,7 @@ def test_stop_and_start(self):
self.event_processor.process(user_event)
event_dispatcher.expect_conversion(self.event_name, self.test_user_id)

time.sleep(3)
time.sleep(self.TEST_TIMEOUT)

self.assertStrictTrue(event_dispatcher.compare_events())
self.event_processor.stop()
Expand Down Expand Up @@ -509,7 +510,7 @@ def setUp(self, *args, **kwargs):
base.BaseTest.setUp(self, 'config_dict_with_multiple_experiments')
self.test_user_id = 'test_user'
self.event_name = 'test_event'
self.optimizely.logger = SimpleLogger()
self.optimizely.logger = NoOpLogger()
self.notification_center = self.optimizely.notification_center
self.event_dispatcher = TestForwardingEventDispatcher(is_updated=False)

Expand Down

0 comments on commit d743410

Please sign in to comment.