-
Notifications
You must be signed in to change notification settings - Fork 1.6k
/
data_proxy.py
303 lines (250 loc) · 13.6 KB
/
data_proxy.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
# -*- coding: utf-8 -*-
# 版权所有 2019 深圳米筐科技有限公司(下称“米筐科技”)
#
# 除非遵守当前许可,否则不得使用本软件。
#
# * 非商业用途(非商业用途指个人出于非商业目的使用本软件,或者高校、研究所等非营利机构出于教育、科研等目的使用本软件):
# 遵守 Apache License 2.0(下称“Apache 2.0 许可”),
# 您可以在以下位置获得 Apache 2.0 许可的副本:http://www.apache.org/licenses/LICENSE-2.0。
# 除非法律有要求或以书面形式达成协议,否则本软件分发时需保持当前许可“原样”不变,且不得附加任何条件。
#
# * 商业用途(商业用途指个人出于任何商业目的使用本软件,或者法人或其他组织出于任何目的使用本软件):
# 未经米筐科技授权,任何个人不得出于任何商业目的使用本软件(包括但不限于向第三方提供、销售、出租、出借、转让本软件、
# 本软件的衍生产品、引用或借鉴了本软件功能或源代码的产品或服务),任何法人或其他组织不得出于任何目的使用本软件,
# 否则米筐科技有权追究相应的知识产权侵权责任。
# 在此前提下,对本软件的使用同样需要遵守 Apache 2.0 许可,Apache 2.0 许可与本许可冲突之处,以本许可为准。
# 详细的授权流程,请联系 public@ricequant.com 获取。
from datetime import datetime, date
from typing import Union, List, Sequence, Optional
import six
import numpy as np
import pandas as pd
from rqalpha.const import INSTRUMENT_TYPE, TRADING_CALENDAR_TYPE
from rqalpha.utils import risk_free_helper, TimeRange, merge_trading_period
from rqalpha.data.trading_dates_mixin import TradingDatesMixin
from rqalpha.model.bar import BarObject, NANDict, PartialBarObject
from rqalpha.model.tick import TickObject
from rqalpha.model.instrument import Instrument
from rqalpha.utils.functools import lru_cache
from rqalpha.utils.datetime_func import convert_int_to_datetime, convert_date_to_int
from rqalpha.utils.typing import DateLike, StrOrIter
from rqalpha.interface import AbstractDataSource, AbstractPriceBoard
class DataProxy(TradingDatesMixin):
def __init__(self, data_source, price_board):
# type: (AbstractDataSource, AbstractPriceBoard) -> None
self._data_source = data_source
self._price_board = price_board
try:
trading_calendars = data_source.get_trading_calendars()
except NotImplementedError:
# forward compatible
trading_calendars = {TRADING_CALENDAR_TYPE.EXCHANGE: data_source.get_trading_calendar()}
TradingDatesMixin.__init__(self, trading_calendars)
def __getattr__(self, item):
return getattr(self._data_source, item)
def get_trading_minutes_for(self, order_book_id, dt):
instrument = self.instruments(order_book_id)
minutes = self._data_source.get_trading_minutes_for(instrument, dt)
return [] if minutes is None else minutes
def get_yield_curve(self, start_date, end_date, tenor=None):
if isinstance(tenor, six.string_types):
tenor = [tenor]
return self._data_source.get_yield_curve(start_date, end_date, tenor)
def get_risk_free_rate(self, start_date, end_date):
tenors = risk_free_helper.get_tenors_for(start_date, end_date)
# 为何取 start_date 当日的?表示 start_date 时借入资金、end_date 归还的成本
_s = start_date if self.is_trading_date(start_date) else self.get_next_trading_date(start_date, n=1)
yc = self._data_source.get_yield_curve(_s, _s)
if yc is None or yc.empty:
return np.nan
yc = yc.iloc[0]
for tenor in tenors[::-1]:
rate = yc.get(tenor)
if rate and not np.isnan(rate):
return rate
else:
return np.nan
def get_dividend(self, order_book_id):
instrument = self.instruments(order_book_id)
return self._data_source.get_dividend(instrument)
def get_split(self, order_book_id):
instrument = self.instruments(order_book_id)
return self._data_source.get_split(instrument)
def get_dividend_by_book_date(self, order_book_id, date):
table = self._data_source.get_dividend(self.instruments(order_book_id))
if table is None or len(table) == 0:
return
try:
dates = table['book_closure_date']
except ValueError:
dates = table['ex_dividend_date']
date = self.get_next_trading_date(date)
dt = date.year * 10000 + date.month * 100 + date.day
left_pos = dates.searchsorted(dt)
right_pos = dates.searchsorted(dt, side="right")
if left_pos >= right_pos:
return None
return table[left_pos: right_pos]
def get_split_by_ex_date(self, order_book_id, date):
df = self.get_split(order_book_id)
if df is None or len(df) == 0:
return
dt = convert_date_to_int(date)
pos = df['ex_date'].searchsorted(dt)
if pos == len(df) or df['ex_date'][pos] != dt:
return None
return df['split_factor'][pos]
@lru_cache(10240)
def _get_prev_close(self, order_book_id, dt):
instrument = self.instruments(order_book_id)
prev_trading_date = self.get_previous_trading_date(dt)
bar = self._data_source.history_bars(instrument, 1, '1d', 'close', prev_trading_date,
skip_suspended=False, include_now=False, adjust_orig=dt)
if bar is None or len(bar) < 1:
return np.nan
return bar[0]
def get_prev_close(self, order_book_id, dt):
return self._get_prev_close(order_book_id, dt.replace(hour=0, minute=0, second=0))
@lru_cache(10240)
def _get_prev_settlement(self, instrument, dt):
bar = self._data_source.history_bars(instrument, 1, '1d', 'prev_settlement', dt, skip_suspended=False)
if bar is None or len(bar) == 0:
return np.nan
return bar[0]
@lru_cache(10240)
def _get_settlement(self, instrument, dt):
bar = self._data_source.history_bars(instrument, 1, '1d', 'settlement', dt, skip_suspended=False)
if bar is None or len(bar) == 0:
raise LookupError("'{}', dt={}".format(instrument.order_book_id, dt))
return bar[0]
def get_prev_settlement(self, order_book_id, dt):
instrument = self.instruments(order_book_id)
if instrument.type not in (INSTRUMENT_TYPE.FUTURE, INSTRUMENT_TYPE.OPTION):
return np.nan
return self._get_prev_settlement(instrument, dt)
def get_settlement(self, instrument, dt):
# type: (Instrument, datetime) -> float
if instrument.type != INSTRUMENT_TYPE.FUTURE:
raise LookupError("'{}', instrument_type={}".format(instrument.order_book_id, instrument.type))
return self._get_settlement(instrument, dt)
def get_settle_price(self, order_book_id, date):
instrument = self.instruments(order_book_id)
if instrument.type != 'Future':
return np.nan
return self._data_source.get_settle_price(instrument, date)
@lru_cache(512)
def get_bar(self, order_book_id: str, dt: date, frequency: str = '1d') -> BarObject:
instrument = self.instruments(order_book_id)
if dt is None:
return BarObject(instrument, NANDict, dt)
bar = self._data_source.get_bar(instrument, dt, frequency)
if bar:
return BarObject(instrument, bar)
return BarObject(instrument, NANDict, dt)
def get_open_auction_bar(self, order_book_id, dt):
instrument = self.instruments(order_book_id)
try:
bar = self._data_source.get_open_auction_bar(instrument, dt)
except NotImplementedError:
# forward compatible
tick = self.current_snapshot(order_book_id, "1d", dt)
bar = {k: getattr(tick, k) for k in [
"datetime", "open", "limit_up", "limit_down", "volume", "total_turnover"
]}
return PartialBarObject(instrument, bar)
def history(self, order_book_id, bar_count, frequency, field, dt):
data = self.history_bars(order_book_id, bar_count, frequency,
['datetime', field], dt, skip_suspended=False, adjust_orig=dt)
if data is None:
return None
return pd.Series(data[field], index=[convert_int_to_datetime(t) for t in data['datetime']])
def fast_history(self, order_book_id, bar_count, frequency, field, dt):
return self.history_bars(order_book_id, bar_count, frequency, field, dt, skip_suspended=False,
adjust_type='pre', adjust_orig=dt)
def history_bars(self, order_book_id, bar_count, frequency, field, dt,
skip_suspended=True, include_now=False,
adjust_type='pre', adjust_orig=None):
instrument = self.instruments(order_book_id)
if adjust_orig is None:
adjust_orig = dt
return self._data_source.history_bars(instrument, bar_count, frequency, field, dt,
skip_suspended=skip_suspended, include_now=include_now,
adjust_type=adjust_type, adjust_orig=adjust_orig)
def history_ticks(self, order_book_id, count, dt):
instrument = self.instruments(order_book_id)
return self._data_source.history_ticks(instrument, count, dt)
def current_snapshot(self, order_book_id, frequency, dt):
def tick_fields_for(ins):
_STOCK_FIELD_NAMES = [
'datetime', 'open', 'high', 'low', 'last', 'volume', 'total_turnover', 'prev_close',
'limit_up', 'limit_down'
]
_FUTURE_FIELD_NAMES = _STOCK_FIELD_NAMES + ['open_interest', 'prev_settlement']
if ins.type == 'Future':
return _STOCK_FIELD_NAMES
else:
return _FUTURE_FIELD_NAMES
instrument = self.instruments(order_book_id)
if frequency == '1d':
bar = self._data_source.get_bar(instrument, dt, '1d')
if not bar:
return None
d = {k: bar[k] for k in tick_fields_for(instrument) if k in bar.dtype.names}
d['last'] = bar['close']
d['prev_close'] = self._get_prev_close(order_book_id, dt)
return TickObject(instrument, d)
return self._data_source.current_snapshot(instrument, frequency, dt)
def available_data_range(self, frequency):
return self._data_source.available_data_range(frequency)
def get_commission_info(self, order_book_id):
instrument = self.instruments(order_book_id)
return self._data_source.get_commission_info(instrument)
def get_merge_ticks(self, order_book_id_list, trading_date, last_dt=None):
return self._data_source.get_merge_ticks(order_book_id_list, trading_date, last_dt)
def is_suspended(self, order_book_id, dt, count=1):
# type: (str, DateLike, int) -> Union[Sequence[bool], bool]
if count == 1:
return self._data_source.is_suspended(order_book_id, [dt])[0]
trading_dates = self.get_n_trading_dates_until(dt, count)
return self._data_source.is_suspended(order_book_id, trading_dates)
def is_st_stock(self, order_book_id, dt, count=1):
if count == 1:
return self._data_source.is_st_stock(order_book_id, [dt])[0]
trading_dates = self.get_n_trading_dates_until(dt, count)
return self._data_source.is_st_stock(order_book_id, trading_dates)
def get_tick_size(self, order_book_id):
return self.instruments(order_book_id).tick_size()
def get_last_price(self, order_book_id):
# type: (str) -> float
return float(self._price_board.get_last_price(order_book_id))
def all_instruments(self, types, dt=None):
# type: (List[INSTRUMENT_TYPE], Optional[datetime]) -> List[Instrument]
li = []
for i in self._data_source.get_instruments(types=types):
if dt is None or i.listing_at(dt):
li.append(i)
return li
# return [i for i in self._data_source.get_instruments(types=types) if dt is None or i.listing_at(dt)]
@lru_cache(2048)
def instrument(self, sym_or_id):
return next(iter(self._data_source.get_instruments(id_or_syms=[sym_or_id])), None)
def instruments(self, sym_or_ids):
# type: (StrOrIter) -> Union[None, Instrument, List[Instrument]]
if isinstance(sym_or_ids, str):
return next(iter(self._data_source.get_instruments(id_or_syms=[sym_or_ids])), None)
else:
return list(self._data_source.get_instruments(id_or_syms=sym_or_ids))
def get_future_contracts(self, underlying, date):
# type: (str, DateLike) -> List[str]
return sorted(i.order_book_id for i in self.all_instruments(
[INSTRUMENT_TYPE.FUTURE], date
) if i.underlying_symbol == underlying and not Instrument.is_future_continuous_contract(i.order_book_id))
def get_trading_period(self, sym_or_ids, default_trading_period=None):
# type: (StrOrIter, Optional[Sequence[TimeRange]]) -> List[TimeRange]
trading_period = default_trading_period or []
for instrument in self.instruments(sym_or_ids):
trading_period.extend(instrument.trading_hours or [])
return merge_trading_period(trading_period)
def is_night_trading(self, sym_or_ids):
# type: (StrOrIter) -> bool
return any((instrument.trade_at_night for instrument in self.instruments(sym_or_ids)))