Skip to content

Commit

Permalink
Trim dataframe, not tickerlist
Browse files Browse the repository at this point in the history
  • Loading branch information
xmatthias committed Dec 28, 2019
1 parent 866908d commit db520a0
Show file tree
Hide file tree
Showing 3 changed files with 12 additions and 31 deletions.
29 changes: 1 addition & 28 deletions freqtrade/data/datahandlers/idatahandler.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def ohlcv_load(self, pair, timeframe: str,
:param pair: Pair to load data for
:param timeframe: Ticker timeframe (e.g. "5m")
:param timerange: Limit data to be loaded to this timerange
:param fill_up_missing: Fill missing values with "No action"-candles
:param fill_missing: Fill missing values with "No action"-candles
:param drop_incomplete: Drop last candle assuming it may be incomplete.
:param startup_candles: Additional candles to load at the start of the period
:return: DataFrame with ohlcv data, or empty DataFrame
Expand Down Expand Up @@ -75,30 +75,3 @@ def _validate_pairdata(self, pair, pairdata: DataFrame, timerange: TimeRange):
if timerange.stoptype == 'date' and pairdata[-1][0] < timerange.stopts * 1000:
logger.warning('Missing data at end for pair %s, data ends at %s',
pair, arrow.get(pairdata[-1][0] // 1000).strftime('%Y-%m-%d %H:%M:%S'))

@staticmethod
def trim_tickerlist(tickerlist: List[Dict], timerange: TimeRange) -> List[Dict]:
"""
TODO: investigate if this is needed ... we can probably cover this in a dataframe
Trim tickerlist based on given timerange
"""
if not tickerlist:
return tickerlist

start_index = 0
stop_index = len(tickerlist)

if timerange.starttype == 'date':
while (start_index < len(tickerlist) and
tickerlist[start_index][0] < timerange.startts * 1000):
start_index += 1

if timerange.stoptype == 'date':
while (stop_index > 0 and
tickerlist[stop_index-1][0] > timerange.stopts * 1000):
stop_index -= 1

if start_index > stop_index:
raise ValueError(f'The timerange [{timerange.startts},{timerange.stopts}] is incorrect')

return tickerlist[start_index:stop_index]
13 changes: 10 additions & 3 deletions freqtrade/data/datahandlers/jsondatahandler.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
from freqtrade import misc
from freqtrade.configuration import TimeRange
from freqtrade.data.converter import clean_ohlcv_dataframe
from freqtrade.data.history import trim_dataframe

from .idatahandler import IDataHandler

Expand Down Expand Up @@ -54,9 +55,15 @@ def _ohlcv_load(self, pair: str, timeframe: str,
drop_incomplete: bool = True,
) -> DataFrame:
"""
Load data for one pair from disk.
Internal method used to load data for one pair from disk.
Implements the loading and conversation to a Pandas dataframe.
:return: Dataframe
:param pair: Pair to load data for
:param timeframe: Ticker timeframe (e.g. "5m")
:param timerange: Limit data to be loaded to this timerange
:param fill_missing: Fill missing values with "No action"-candles
:param drop_incomplete: Drop last candle assuming it may be incomplete.
:param startup_candles: Additional candles to load at the start of the period
:return: DataFrame with ohlcv data, or empty DataFrame
"""
filename = self._pair_data_filename(self._datadir, pair, timeframe)
pairdata = read_json(filename, orient='values')
Expand All @@ -67,7 +74,7 @@ def _ohlcv_load(self, pair: str, timeframe: str,
infer_datetime_format=True)

if timerange:
pairdata = IDataHandler.trim_tickerlist(pairdata, timerange)
pairdata = trim_dataframe(pairdata, timerange)

return clean_ohlcv_dataframe(pairdata, timeframe,
pair=pair,
Expand Down
1 change: 1 addition & 0 deletions freqtrade/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -285,6 +285,7 @@ def convert_ohlcv_format(config: Dict[str, Any], convert_from: str, convert_to:
for timeframe in timeframes:
for pair in config['pairs']:
data = src.ohlcv_load(pair=pair, timeframe=timeframe,
timerange=None,
fill_missing=False,
drop_incomplete=False,
startup_candles=0)
Expand Down

0 comments on commit db520a0

Please sign in to comment.