Skip to content

Commit

Permalink
Merge 2666271 into 0406a8d
Browse files Browse the repository at this point in the history
  • Loading branch information
liampauling committed Sep 2, 2019
2 parents 0406a8d + 2666271 commit 5ca21b7
Show file tree
Hide file tree
Showing 6 changed files with 105 additions and 42 deletions.
13 changes: 13 additions & 0 deletions HISTORY.rst
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,19 @@
Release History
---------------

1.10.2 (2019-09-02)
+++++++++++++++++++

**Improvements**

- OrderCache / UnmatchedOrder logic improved
- streaming_update and streaming_unique_id added to lightweight response

**Bug Fixes**

- handicap bugfix on OrderCache
- Missing closed logic added to OrderCache

1.10.1 (2019-08-12)
+++++++++++++++++++

Expand Down
2 changes: 1 addition & 1 deletion betfairlightweight/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from . import filters

__title__ = 'betfairlightweight'
__version__ = '1.10.1'
__version__ = '1.10.2'
__author__ = 'Liam Pauling'

# Set default logging handler to avoid "No handler found" warnings.
Expand Down
48 changes: 24 additions & 24 deletions betfairlightweight/streaming/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -198,15 +198,16 @@ def update_cache(self, market_change, publish_time):
self._update_runner_dict()

def create_resource(self, unique_id, streaming_update, lightweight):
data = self.serialise
data["streaming_unique_id"] = unique_id
data["streaming_update"] = streaming_update
if lightweight:
return self.serialise
return data
else:
return MarketBook(
elapsed_time=(datetime.datetime.utcnow()-self._datetime_updated).total_seconds(),
streaming_unique_id=unique_id,
streaming_update=streaming_update,
market_definition=MarketDefinition(**self.market_definition),
**self.serialise
**data
)

def _update_runner_dict(self):
Expand Down Expand Up @@ -266,7 +267,9 @@ def __init__(self, id, p, s, side, status, ot, pd, sm, sr, sl, sc, sv, rfo, rfs,
self.persistence_type = pt
self.order_type = ot
self.placed_date = BaseResource.strip_datetime(pd)
self.placed_date_string = self.create_placed_date_string()
self.matched_date = BaseResource.strip_datetime(md)
self.matched_date_string = self.create_matched_date_string()
self.average_price_matched = avp
self.size_matched = sm
self.size_remaining = sr
Expand All @@ -280,13 +283,11 @@ def __init__(self, id, p, s, side, status, ot, pd, sm, sr, sl, sc, sv, rfo, rfs,
self.lapsed_date = ld
self.lapse_status_reason_code = lsrc # todo add to output?

@property
def placed_date_string(self):
def create_placed_date_string(self):
if self.placed_date:
return self.placed_date.strftime('%Y-%m-%dT%H:%M:%S.%fZ')

@property
def matched_date_string(self):
def create_matched_date_string(self):
if self.matched_date:
return self.matched_date.strftime('%Y-%m-%dT%H:%M:%S.%fZ')

Expand Down Expand Up @@ -326,24 +327,17 @@ def __init__(self, id, fullImage=None, ml=None, mb=None, uo=None, hc=0, smc=None
self.full_image = fullImage
self.matched_lays = Available(ml, 1)
self.matched_backs = Available(mb, 1)
self.unmatched_orders = [UnmatchedOrder(**i) for i in uo] if uo else []
self.unmatched_orders = {i["id"]: UnmatchedOrder(**i) for i in uo} if uo else {}
self.handicap = hc
self.strategy_matches = smc

def update_unmatched(self, unmatched_orders):
order_dict = {order.bet_id: order for order in self.unmatched_orders}
for unmatched_order in unmatched_orders:
if unmatched_order.get('id') in order_dict:
for n, order in enumerate(self.unmatched_orders):
if order.bet_id == unmatched_order.get('id'):
self.unmatched_orders[n] = UnmatchedOrder(**unmatched_order)
break
else:
self.unmatched_orders.append(UnmatchedOrder(**unmatched_order))
self.unmatched_orders[unmatched_order["id"]] = UnmatchedOrder(**unmatched_order)

def serialise_orders(self, market_id):
return [
order.serialise(market_id, self.selection_id, self.handicap) for order in self.unmatched_orders
order.serialise(market_id, self.selection_id, self.handicap) for order in self.unmatched_orders.values()
]


Expand All @@ -359,10 +353,15 @@ def __init__(self, **kwargs):
def update_cache(self, order_book, publish_time):
self._datetime_updated = self.strip_datetime(publish_time)
self.publish_time = publish_time
if "closed" in order_book:
self.closed = order_book["closed"]

for order_changes in order_book.get('orc', []):
selection_id = order_changes['id']
runner = self.runner_dict.get(selection_id)
handicap = order_changes.get('hc', 0)
runner = self.runner_dict.get(
(selection_id, handicap)
)
if runner:
if 'ml' in order_changes:
runner.matched_lays.update(order_changes['ml'])
Expand All @@ -374,20 +373,21 @@ def update_cache(self, order_book, publish_time):
self.runners.append(OrderBookRunner(**order_changes))

def create_resource(self, unique_id, streaming_update, lightweight):
data = self.serialise
data["streaming_unique_id"] = unique_id
data["streaming_update"] = streaming_update
if lightweight:
return self.serialise
return data
else:
return CurrentOrders(
elapsed_time=(datetime.datetime.utcnow()-self._datetime_updated).total_seconds(),
streaming_unique_id=unique_id,
streaming_update=streaming_update,
publish_time=self.publish_time,
**self.serialise
**data
)

@property
def runner_dict(self):
return {runner.selection_id: runner for runner in self.runners}
return {(runner.selection_id, runner.handicap): runner for runner in self.runners}

@property
def serialise(self):
Expand Down
2 changes: 1 addition & 1 deletion betfairlightweight/streaming/stream.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,7 @@ def _process(self, order_books, publish_time):
self._updates_processed += 1

output_order_book.append(
self._caches[market_id].create_resource(self.unique_id, order_book, self._lightweight)
order_book_cache.create_resource(self.unique_id, order_book, self._lightweight)
)
self.on_process(output_order_book)

Expand Down
1 change: 1 addition & 0 deletions tests/resources/streaming_ocm_FULL_IMAGE.json
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"op":"ocm","id":3,"clk":"AMYkAJokAJAnAIk2ANYk","pt":1566480793554,"oc":[{"id":"1.161613698","orc":[{"fullImage":true,"id":7017905,"hc":8.5,"uo":[{"id":"175706685825","p":2,"s":2,"side":"B","status":"EC","pt":"L","ot":"L","pd":1566480793000,"md":1566480793000,"avp":2,"sm":2,"sr":0,"sl":0,"sc":0,"sv":0,"rac":"","rc":"REG_LGA","rfo":"","rfs":""}],"mb":[[2,2]]},{"fullImage":true,"id":7017905,"hc":7.5,"uo":[{"id":"175706685826","p":2,"s":2,"side":"B","status":"EC","pt":"L","ot":"L","pd":1566480793000,"md":1566480793000,"avp":2,"sm":2,"sr":0,"sl":0,"sc":0,"sv":0,"rac":"","rc":"REG_LGA","rfo":"","rfs":""}],"mb":[[2,2]]},{"fullImage":true,"id":7017905,"uo":[{"id":"175706685827","p":2,"s":2,"side":"B","status":"EC","pt":"L","ot":"L","pd":1566480793000,"md":1566480793000,"avp":2,"sm":2,"sr":0,"sl":0,"sc":0,"sv":0,"rac":"","rc":"REG_LGA","rfo":"","rfs":""}],"mb":[[2,2]]},{"fullImage":true,"id":7017905,"uo":[{"id":"175706685828","p":2,"s":2,"side":"B","status":"EC","pt":"L","ot":"L","pd":1566480793000,"md":1566480793000,"avp":2,"sm":2,"sr":0,"sl":0,"sc":0,"sv":0,"rac":"","rc":"REG_LGA","rfo":"","rfs":""}],"mb":[[2,2]]},{"fullImage":true,"id":1017905,"uo":[{"id":"175706685828","p":2,"s":2,"side":"B","status":"EC","pt":"L","ot":"L","pd":1566480793000,"md":1566480793000,"avp":2,"sm":2,"sr":0,"sl":0,"sc":0,"sv":0,"rac":"","rc":"REG_LGA","rfo":"","rfs":""}],"mb":[[2,2]]}]}]}
81 changes: 65 additions & 16 deletions tests/unit/test_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,13 +160,15 @@ def test_update_cache_tv(self, mock_strip_datetime):
#
# assert self.market_book_cache.total_matched == book.get('tv')

@mock.patch('betfairlightweight.streaming.cache.MarketBookCache.serialise')
@mock.patch('betfairlightweight.streaming.cache.MarketBookCache.serialise', new_callable=mock.PropertyMock,
return_value={})
@mock.patch('betfairlightweight.streaming.cache.MarketDefinition')
@mock.patch('betfairlightweight.streaming.cache.MarketBook')
def test_create_resource(self, mock_market_book, mock_market_definition, mock_serialise):
# lightweight
market_book = self.market_book_cache.create_resource(1234, {}, True)
assert market_book == mock_serialise
market_book = self.market_book_cache.create_resource(1234, {"test"}, True)
assert market_book == {'streaming_update': {"test"}, 'streaming_unique_id': 1234}
assert market_book == mock_serialise()
# not lightweight
market_book = self.market_book_cache.create_resource(1234, {}, False)
assert market_book == mock_market_book()
Expand Down Expand Up @@ -269,9 +271,24 @@ def setUp(self):
self.order_book_cache = OrderBookCache(**{})
self.runner = mock.Mock()
self.runner.selection_id = 10895629
self.runner.handicap = 0
self.runner.serialise_orders = mock.Mock(return_value=[])
self.runner.unmatched_orders = [1]
self.order_book_cache.runners = [self.runner]

def test_full_image(self):
mock_response = create_mock_json('tests/resources/streaming_ocm_FULL_IMAGE.json')
for order_book in mock_response.json().get('oc'):
self.order_book_cache.update_cache(order_book, 1234)

self.assertEqual(len(self.order_book_cache.runners), 5)
self.assertEqual(len(self.order_book_cache.runner_dict), 5)
for k, v in self.order_book_cache.runner_dict.items():
if k == (7017905, 0):
self.assertEqual(len(v.unmatched_orders), 2)
else:
self.assertEqual(len(v.unmatched_orders), 1)

def test_update_cache(self):
mock_response = create_mock_json('tests/resources/streaming_ocm_UPDATE.json')
for order_book in mock_response.json().get('oc'):
Expand All @@ -292,23 +309,35 @@ def test_update_cache_new(self, mock_order_book_runner):
for order_changes in order_book.get('orc'):
mock_order_book_runner.assert_called_with(**order_changes)

@mock.patch('betfairlightweight.streaming.cache.OrderBookCache.serialise')
def test_update_cache_closed(self):
mock_response = create_mock_json('tests/resources/streaming_ocm_SUB_IMAGE.json')
for order_book in mock_response.json().get('oc'):
self.order_book_cache.update_cache(order_book, 1234)
self.assertTrue(self.order_book_cache.closed)

@mock.patch('betfairlightweight.streaming.cache.OrderBookCache.serialise', new_callable=mock.PropertyMock,
return_value={})
@mock.patch('betfairlightweight.streaming.cache.CurrentOrders')
def test_create_resource(self, mock_current_orders, mock_serialise):
# lightweight
current_orders = self.order_book_cache.create_resource(123, {"test"}, True)
assert current_orders == mock_serialise()
assert current_orders == {'streaming_update': {"test"}, 'streaming_unique_id': 123}
# not lightweight
current_orders = self.order_book_cache.create_resource(123, {}, False)

assert current_orders == mock_current_orders()

def test_runner_dict(self):

class Runner:
def __init__(self, selection_id, name):
def __init__(self, selection_id, name, handicap):
self.selection_id = selection_id
self.name = name
self.handicap = handicap

(a, b) = (Runner(123, 'a'), Runner(456, 'b'))
(a, b) = (Runner(123, 'a', 0), Runner(456, 'b', 1))
self.order_book_cache.runners = [a, b]
assert self.order_book_cache.runner_dict == {123: a, 456: b}
assert self.order_book_cache.runner_dict == {(123, 0): a, (456, 1): b}

def test_serialise(self):
serialised = self.order_book_cache.serialise
Expand All @@ -319,15 +348,37 @@ def test_serialise(self):
class TestOrderBookRunner(unittest.TestCase):

def setUp(self):
self.order_book_runner = OrderBookRunner(**{'id': 1, 'ml': [], 'mb': [], 'uo': []})
uo = [
{"id": 1, "p": "a", "s": "a", "side": "a", "ot": "a", "pd": "a", "sm": "a", "sr": "a", "sl": "a",
"sc": "a", "sv": "a", "rfo": "a", "rfs": "a", "status": "a"},
{"id": 2, "p": "b", "s": "a", "side": "a", "ot": "a", "pd": "a", "sm": "a", "sr": "a", "sl": "a",
"sc": "a", "sv": "a", "rfo": "a", "rfs": "a", "status": "b"},
]
self.order_book_runner = OrderBookRunner(**{'id': 1, 'ml': [], 'mb': [], 'uo': uo})

def test_update_unmatched(self):
unmatched_orders = [
{"id": 2, "p": "b", "s": "a", "side": "a", "ot": "a", "pd": "a", "sm": "a", "sr": "a", "sl": "a",
"sc": "a", "sv": "a", "rfo": "a", "rfs": "a", "status": "c"}
]
self.order_book_runner.update_unmatched(unmatched_orders)

self.assertEqual(
self.order_book_runner.unmatched_orders[1].status,
"a"
)
self.assertEqual(
self.order_book_runner.unmatched_orders[2].status,
"c"
)


class TestUnmatchedOrder(unittest.TestCase):

def setUp(self):
order = {
'id': 1, 'p': 2, 's': 3, 'side': 'L', 'status': 'E', 'pt': 'L', 'ot': 'L', 'pd': 8, 'sm': 9, 'sr': 10,
'sl': 11, 'sc': 12, 'sv': 13, 'rfo': 14, 'rfs': 15, 'ld': 16, 'lsrc': 17, 'error': 'test'
'sl': 11, 'sc': 12, 'sv': 13, 'rfo': 14, 'rfs': 15, 'ld': 16, 'lsrc': 17, 'error': 'test', 'md': 4
}
self.unmatched_order = UnmatchedOrder(**order)

Expand All @@ -351,21 +402,19 @@ def test_init(self):
assert self.unmatched_order.lapse_status_reason_code == 17

def test_placed_date_string(self):
now = datetime.datetime.now()
self.unmatched_order.placed_date = now
now = BaseResource.strip_datetime(8)
assert self.unmatched_order.placed_date_string == now.strftime('%Y-%m-%dT%H:%M:%S.%fZ')

def test_matched_date_string(self):
now = datetime.datetime.now()
self.unmatched_order.matched_date = now
now = BaseResource.strip_datetime(4)
assert self.unmatched_order.matched_date_string == now.strftime('%Y-%m-%dT%H:%M:%S.%fZ')

def test_serialise(self):
assert self.unmatched_order.serialise('1.23', 12345, 0.0) == {
'sizeLapsed': 11, 'persistenceType': 'LAPSE', 'sizeRemaining': 10,
'placedDate': '1970-01-01T00:00:00.008000Z', 'sizeVoided': 13, 'sizeCancelled': 12, 'betId': 1,
'customerOrderRef': 14, 'orderType': 'LIMIT', 'marketId': '1.23', 'matchedDate': None, 'side': 'LAY',
'customerOrderRef': 14, 'orderType': 'LIMIT', 'marketId': '1.23', 'side': 'LAY',
'selectionId': 12345, 'bspLiability': None, 'sizeMatched': 9, 'handicap': 0.0, 'averagePriceMatched': 0.0,
'status': 'EXECUTABLE', 'customerStrategyRef': 15, 'regulatorCode': None,
'priceSize': {'price': 2, 'size': 3}
'priceSize': {'price': 2, 'size': 3}, 'matchedDate': '1970-01-01T00:00:00.004000Z'
}

0 comments on commit 5ca21b7

Please sign in to comment.