Skip to content
Browse files

* Fix pickling of DateArrays (to keep the frequency)

  • Loading branch information...
1 parent 362745c commit 96a9d6bbe38b62e504b25b552101dad43c48dc46 pierregm committed Dec 19, 2009
View
85 scikits/timeseries/lib/moving_funcs.py
@@ -148,13 +148,18 @@ def _moving_func(data, cfunc, kwargs):
#...............................................................................
def _mov_sum(data, span, dtype=None, type_num_double=False):
- """ helper function for calculating moving sum. Resulting dtype can be
-determined in one of two ways. See C-code for more details."""
+ """
+ Helper function for calculating moving sum.
+ Resulting dtype can be determined in one of two ways.
+ See C-code for more details.
+ """
kwargs = {'span':span, 'type_num_double':type_num_double}
if dtype is not None:
kwargs['dtype'] = dtype
return _moving_func(data, MA_mov_sum, kwargs)
-#...............................................................................
+
+
+
def mov_sum(data, span, dtype=None):
"""
Calculates the moving sum of a series.
@@ -166,10 +171,12 @@ def mov_sum(data, span, dtype=None):
%(dtype)s
%(movfuncresults)s
- """
+ """ % _doc_parameters
return _mov_sum(data, span, dtype=dtype)
-#...............................................................................
+
+
+
def mov_median(data, span, dtype=None):
"""
Calculates the moving median of a series.
@@ -181,14 +188,16 @@ def mov_median(data, span, dtype=None):
%(dtype)s
%(movfuncresults)s
- """
+ """ % _doc_parameters
kwargs = {'span':span}
if dtype is not None:
kwargs['dtype'] = dtype
return _moving_func(data, MA_mov_median, kwargs)
-#...............................................................................
+
+
+
def mov_min(data, span, dtype=None):
"""
Calculates the moving minimum of a series.
@@ -200,14 +209,16 @@ def mov_min(data, span, dtype=None):
%(dtype)s
%(movfuncresults)s
- """
+ """ % _doc_parameters
kwargs = {'span':span}
if dtype is not None:
kwargs['dtype'] = dtype
return _moving_func(data, MA_mov_min, kwargs)
-#...............................................................................
+
+
+
def mov_max(data, span, dtype=None):
"""
Calculates the moving max of a series.
@@ -219,14 +230,16 @@ def mov_max(data, span, dtype=None):
%(dtype)s
%(movfuncresults)s
- """
+ """ % _doc_parameters
kwargs = {'span':span}
if dtype is not None:
kwargs['dtype'] = dtype
return _moving_func(data, MA_mov_max, kwargs)
-#...............................................................................
+
+
+
def mov_average(data, span, dtype=None):
"""Calculates the moving average of a series.
@@ -237,10 +250,12 @@ def mov_average(data, span, dtype=None):
%(dtype)s
%(movfuncresults)s
- """
+ """ % _doc_parameters
return _mov_sum(data, span, dtype=dtype, type_num_double=True)/span
mov_mean = mov_average
-#...............................................................................
+
+
+
def mov_var(data, span, dtype=None, ddof=0):
"""
Calculates the moving variance of a 1-D array.
@@ -253,9 +268,11 @@ def mov_var(data, span, dtype=None, ddof=0):
%(ddof)s
%(movfuncresults)s
- """
+ """ % _doc_parameters
return _mov_cov(data, data, span, ddof, dtype=dtype)
-#...............................................................................
+
+
+
def mov_std(data, span, dtype=None, ddof=0):
"""
Calculates the moving standard deviation of a 1-D array.
@@ -268,9 +285,11 @@ def mov_std(data, span, dtype=None, ddof=0):
%(ddof)s
%(movfuncresults)s
-"""
+ """ % _doc_parameters
return sqrt(mov_var(data, span, dtype=dtype, ddof=ddof))
-#...............................................................................
+
+
+
def _mov_cov(x, y, span, ddof, dtype=None):
# helper function
denom = span - ddof
@@ -284,6 +303,7 @@ def _mov_cov(x, y, span, ddof, dtype=None):
return sum_prod/denom - (sum_x * sum_y) / (span*denom)
+
def mov_cov(x, y, span, bias=0, dtype=None):
"""
Calculates the moving covariance of two 1-D arrays.
@@ -296,10 +316,12 @@ def mov_cov(x, y, span, bias=0, dtype=None):
%(dtype)s
%(movfuncresults)s
- """
+ """ % _doc_parameters
- if bias==0: ddof = 1
- else: ddof = 0
+ if bias==0:
+ ddof = 1
+ else:
+ ddof = 0
return _mov_cov(x, y, span, ddof, dtype=dtype)
#...............................................................................
@@ -315,7 +337,7 @@ def mov_corr(x, y, span, dtype=None):
%(dtype)s
%(movfuncresults)s
- """
+ """ % _doc_parameters
sum_x = _mov_sum(x, span, dtype=dtype, type_num_double=True)
sum_y = _mov_sum(y, span, dtype=dtype, type_num_double=True)
@@ -330,7 +352,9 @@ def mov_corr(x, y, span, dtype=None):
_stddev_y = sqrt(sum_prod/span - (sum_y ** 2) / (span ** 2))
return _covar / (_stddev_x * _stddev_y)
-#...............................................................................
+
+
+
def mov_average_expw(data, span, tol=1e-6, dtype=None):
"""
Calculates the exponentially weighted moving average of a series.
@@ -349,7 +373,8 @@ def mov_average_expw(data, span, tol=1e-6, dtype=None):
%(dtype)s
%(movfuncexpwresults)s
- """
+ """ % _doc_parameters
+
kwargs = {'span':span}
if dtype is not None:
kwargs['dtype'] = dtype
@@ -362,7 +387,9 @@ def mov_average_expw(data, span, tol=1e-6, dtype=None):
result._mask = np.where(marker > tol, True, mask)
return result
-#.............................................................................
+
+
+
def cmov_window(data, span, window_type):
"""
Applies a centered moving window of type ``window_type`` and size ``span``
@@ -411,8 +438,8 @@ def cmov_window(data, span, window_type):
--------
Only ``boxcar`` has been thoroughly tested so far...
+ """ % _doc_parameters
-"""
from scipy.signal import convolve, get_window
data = marray(data, copy=True, subok=True)
@@ -434,6 +461,8 @@ def cmov_window(data, span, window_type):
data._mask[:k] = data._mask[-k:] = True
return data
+
+
def cmov_average(data, span):
"""
Computes the centered moving average of size ``span`` on the data.
@@ -449,12 +478,8 @@ def cmov_average(data, span):
Noting ``k=span//2``, the ``k`` first and ``k`` last data are always masked.
If ``data`` has a missing value at position ``i``, then the result has
missing values in the interval ``[i-k:i+k+1]``.
-"""
+ """ % _doc_parameters
return cmov_window(data, span, 'boxcar')
cmov_mean = cmov_average
-if __doc__ is not None:
- for mf in __all__:
- mf_obj = locals()[mf]
- mf_obj.__doc__ = mf_obj.__doc__ % _doc_parameters
View
2 scikits/timeseries/lib/plotlib.py
@@ -1121,7 +1121,7 @@ def set_datelimits(self, start_date=None, end_date=None):
"""
errmsg = "The use of 'set_datelimits' is deprecated. "\
"Please use 'set_dlim' instead"
- warnings.DepreciationWarning(errmsg)
+ warnings.warn(errmsg, DeprecationWarning)
return self.set_dlim(start_date, end_date)
#
set_datelims = set_datelimits
View
48 scikits/timeseries/lib/tests/test_avcf.py
@@ -34,20 +34,42 @@ def __init__(self,*args,**kwargs):
63, 46, 56, 44, 44, 52, 38, 46, 36, 49, 35, 44,
59, 65, 65, 56, 66, 53, 61, 52, 51, 48, 54, 49,
49, 61,nan,nan, 68, 44, 40, 27, 28, 25, 24, 24]
- self.mdeaths = [2134,1863,1877,1877,1492,1249,1280,1131,1209,1492,1621,
- 1846,2103,2137,2153,1833,1403,1288,1186,1133,1053,1347,
- 1545,2066,2020,2750,2283,1479,1189,1160,1113, 970, 999,
- 1208,1467,2059,2240,1634,1722,1801,1246,1162,1087,1013,
- 959,1179,1229,1655,2019,2284,1942,1423,1340,1187,1098,
- 1004, 970,1140,1110,1812,2263,1820,1846,1531,1215,1075,
- 1056, 975, 940,1081,1294,1341]
- self.fdeaths = [901, 689, 827, 677, 522, 406, 441, 393, 387, 582, 578,
- 666, 830, 752, 785, 664, 467, 438, 421, 412, 343, 440,
- 531, 771, 767,1141, 896, 532, 447, 420, 376, 330, 357,
- 445, 546, 764, 862, 660, 663, 643, 502, 392, 411, 348,
- 387, 385, 411, 638, 796, 853, 737, 546, 530, 446, 431,
- 362, 387, 430, 425, 679, 821, 785, 727, 612, 478, 429,
+ self.mdeaths = [2134, 1863, 1877, 1877, 1492, 1249,
+ 1280, 1131, 1209, 1492, 1621, 1846,
+ 2103, 2137, 2153, 1833, 1403, 1288,
+ 1186, 1133, 1053, 1347, 1545, 2066,
+ 2020, 2750, 2283, 1479, 1189, 1160,
+ 1113, 970, 999, 1208, 1467, 2059,
+ 2240, 1634, 1722, 1801, 1246, 1162,
+ 1087, 1013, 959, 1179, 1229, 1655,
+ 2019, 2284, 1942, 1423, 1340, 1187,
+ 1098, 1004, 970, 1140, 1110, 1812,
+ 2263, 1820, 1846, 1531, 1215, 1075,
+ 1056, 975, 940, 1081, 1294, 1341]
+ self.fdeaths = [901, 689, 827, 677, 522, 406,
+ 441, 393, 387, 582, 578, 666,
+ 830, 752, 785, 664, 467, 438,
+ 421, 412, 343, 440, 531, 771,
+ 767,1141, 896, 532, 447, 420,
+ 376, 330, 357, 445, 546, 764,
+ 862, 660, 663, 643, 502, 392,
+ 411, 348, 387, 385, 411, 638,
+ 796, 853, 737, 546, 530, 446,
+ 431, 362, 387, 430, 425, 679,
+ 821, 785, 727, 612, 478, 429,
405, 379, 393, 411, 487, 574]
+ self.ldeaths = [3035, 2552, 2704, 2554, 2014, 1655,
+ 1721, 1524, 1596, 2074, 2199, 2512,
+ 2933, 2889, 2938, 2497, 1870, 1726,
+ 1607, 1545, 1396, 1787, 2076, 2837,
+ 2787, 3891, 3179, 2011, 1636, 1580,
+ 1489, 1300, 1356, 1653, 2013, 2823,
+ 3102, 2294, 2385, 2444, 1748, 1554,
+ 1498, 1361, 1346, 1564, 1640, 2293,
+ 2815, 3137, 2679, 1969, 1870, 1633,
+ 1529, 1366, 1357, 1570, 1535, 2491,
+ 3084, 2605, 2573, 2143, 1693, 1504,
+ 1461, 1354, 1333, 1492, 1781, 1915,]
self.mdeaths = ma.asarray(self.mdeaths)
self.fdeaths = ma.asarray(self.fdeaths)
View
184 scikits/timeseries/lib/tests/test_tstables.py
@@ -28,33 +28,33 @@
def common_ma_setup():
- data2D = ma.array([np.random.rand(25).reshape(5,5),
- np.random.rand(25).reshape(5,5),
- np.random.rand(25).reshape(5,5),
- np.random.rand(25).reshape(5,5),
- np.random.rand(25).reshape(5,5),],
- mask=[np.random.rand(25).reshape(5,5)>.5,
- np.random.rand(25).reshape(5,5)>.5,
- np.random.rand(25).reshape(5,5)>.5,
- np.random.rand(25).reshape(5,5)>.5,
- np.random.rand(25).reshape(5,5)>.5,]
- )
+ data2D = ma.array([np.random.rand(25).reshape(5, 5),
+ np.random.rand(25).reshape(5, 5),
+ np.random.rand(25).reshape(5, 5),
+ np.random.rand(25).reshape(5, 5),
+ np.random.rand(25).reshape(5, 5), ],
+ mask=[np.random.rand(25).reshape(5, 5) > .5,
+ np.random.rand(25).reshape(5, 5) > .5,
+ np.random.rand(25).reshape(5, 5) > .5,
+ np.random.rand(25).reshape(5, 5) > .5,
+ np.random.rand(25).reshape(5, 5) > .5, ]
+ )
data1D = ma.array(np.random.rand(25),
- mask=np.random.rand(25)>0.9,
- fill_value=-9999)
- dtype5R = [('a',float),('b',int),('c','|S3')]
+ mask=np.random.rand(25) > 0.9,
+ fill_value= -9999)
+ dtype5R = [('a', float), ('b', int), ('c', '|S3')]
data5N = ma.array(zip(np.random.rand(5),
np.arange(5),
'ABCDE'),
dtype=dtype5R)
data5R = mr.fromarrays([np.random.rand(5),
np.arange(5),
- ('A','B','C','D','E')],
+ ('A', 'B', 'C', 'D', 'E')],
dtype=dtype5R)
- data5R._mask['a'][0]=True
- data5R._mask['b'][2]=True
- data5R._mask['c'][-1]=True
- return dict(data1D=data1D,
+ data5R._mask['a'][0] = True
+ data5R._mask['b'][2] = True
+ data5R._mask['c'][-1] = True
+ return dict(data1D=data1D,
data2D=data2D,
data5N=data5N,
data5R=data5R)
@@ -79,28 +79,28 @@ def test_tabulate2D(self):
"Tests the transformation from a nD series to..."
data2D = self.data['data2D']
_data2D = tabulate(data2D)
- assert_equal(_data2D['_data'], data2D.filled().reshape(len(data2D),-1))
- assert_equal(_data2D['_mask'], data2D._mask.reshape(len(data2D),-1))
+ assert_equal(_data2D['_data'], data2D.filled().reshape(len(data2D), -1))
+ assert_equal(_data2D['_mask'], data2D._mask.reshape(len(data2D), -1))
#
def test_tabulateNV(self):
"Tests the transformation of named variables to..."
data5N = self.data['data5N']
_data5N = tabulate(data5N)
zipped5N = [((ma.filled(s['a']), ma.getmaskarray(s['a'])),
(ma.filled(s['b']), ma.getmaskarray(s['b'])),
- (ma.filled(s['c']), ma.getmaskarray(s['c'])))
+ (ma.filled(s['c']), ma.getmaskarray(s['c'])))
for s in data5N]
- ndtype5N = [(fname,[('_data',ftype), ('_mask',bool)])
- for (fname,ftype) in data5N.dtype.descr]
+ ndtype5N = [(fname, [('_data', ftype), ('_mask', bool)])
+ for (fname, ftype) in data5N.dtype.descr]
data5N = np.array(zipped5N, dtype=ndtype5N)
- for key in ('a','b','c'):
- assert_equal_records(data5N[key].view(np.recarray),
+ for key in ('a', 'b', 'c'):
+ assert_equal_records(data5N[key].view(np.recarray),
_data5N[key].view(np.recarray))
#
_data5R = tabulate(self.data['data5N'])
data5R = np.array(zipped5N, dtype=ndtype5N)
- for key in ('a','b','c'):
- assert_equal_records(data5R[key].view(np.recarray),
+ for key in ('a', 'b', 'c'):
+ assert_equal_records(data5R[key].view(np.recarray),
_data5R[key].view(np.recarray))
@@ -111,7 +111,7 @@ def __init__(self, *args, **kwds):
TestCase.__init__(self, *args, **kwds)
self.data = common_ma_setup()
self.file = tempfile.mktemp(".hdf5")
- self.fileh = tables.openFile(self.file,'a')
+ self.fileh = tables.openFile(self.file, 'a')
self.populate()
#
def tearDown(self):
@@ -121,27 +121,28 @@ def tearDown(self):
#
def populate(self):
h5file = self.fileh
- for (key,data) in self.data.iteritems():
+ for (key, data) in self.data.iteritems():
table = h5file.createMaskedTable('/', key, data, "Example")
h5file.flush()
#
def test_read1D(self):
data1D = self.data['data1D']
table = self.fileh.root.data1D
tarray = table.read()
-
+ #
assert_equal(tarray.dtype, data1D.dtype)
assert_equal(tarray.filled(), data1D.filled())
assert_equal(tarray._mask, data1D._mask)
assert_equal(tarray.fill_value, data1D.fill_value)
#
- tarray = table.read(1,5,2)
+ tarray = table.read(1, 5, 2)
control = data1D[1:5:2]
assert_equal(tarray, control)
#
tarray = table.read(field='_data')
assert_equal(tarray, data1D.filled())
- #
+
+
def test_read2D(self):
data2D = self.data['data2D']
table = self.fileh.root.data2D
@@ -151,27 +152,28 @@ def test_read2D(self):
assert_equal(tarray.filled(), data2D.filled())
assert_equal(tarray._mask, data2D._mask)
#
- tarray = table.read(1,5,2)
- control = (data2D.reshape(len(data2D),-1)[1:5:2]).reshape(-1,5,5)
+ tarray = table.read(1, 5, 2)
+ control = (data2D.reshape(len(data2D), -1)[1:5:2]).reshape(-1, 5, 5)
assert_equal(tarray.mask, control.mask)
assert_equal(tarray.filled(), control.filled())
#
tarray = table.read(field='_data')
assert_equal(tarray, data2D.filled())
- #
+
+
def test_read5N(self):
data5N = self.data['data5N']
table = self.fileh.root.data5N
tarray = table.read()
assert_equal(tarray.dtype, data5N.dtype)
- for f in ('a','b','c'):
+ for f in ('a', 'b', 'c'):
assert_equal(tarray[f], data5N[f])
assert_equal(tarray._mask, data5N._mask)
assert_equal(tarray.fill_value, data5N.fill_value)
#
- tarray = table.read(1,5,2)
+ tarray = table.read(1, 5, 2)
initial = data5N[1:5:2]
- for f in ('a','b','c'):
+ for f in ('a', 'b', 'c'):
assert_equal(tarray[f], initial[f])
#
tarray = table.read(field='a')
@@ -186,44 +188,45 @@ def test_read5N(self):
def common_ts_setup():
- series2D = ts.time_series([np.random.rand(25).reshape(5,5),
- np.random.rand(25).reshape(5,5),
- np.random.rand(25).reshape(5,5),
- np.random.rand(25).reshape(5,5),
- np.random.rand(25).reshape(5,5),],
+ series2D = ts.time_series([np.random.rand(25).reshape(5, 5),
+ np.random.rand(25).reshape(5, 5),
+ np.random.rand(25).reshape(5, 5),
+ np.random.rand(25).reshape(5, 5),
+ np.random.rand(25).reshape(5, 5), ],
start_date=ts.now('M'),
- mask=[np.random.rand(25).reshape(5,5)>.5,
- np.random.rand(25).reshape(5,5)>.5,
- np.random.rand(25).reshape(5,5)>.5,
- np.random.rand(25).reshape(5,5)>.5,
- np.random.rand(25).reshape(5,5)>.5,]
- )
+ mask=[np.random.rand(25).reshape(5, 5) > .5,
+ np.random.rand(25).reshape(5, 5) > .5,
+ np.random.rand(25).reshape(5, 5) > .5,
+ np.random.rand(25).reshape(5, 5) > .5,
+ np.random.rand(25).reshape(5, 5) > .5, ]
+ )
series1D = ts.time_series(np.random.rand(25),
- mask=np.random.rand(25)>0.7,
+ mask=np.random.rand(25) > 0.7,
start_date=ts.now('M'),
- fill_value=-999)
- series5V = ts.time_series(np.random.rand(25).reshape(5,5),
- mask=np.random.rand(25).reshape(5,5)>0.7,
+ fill_value= -999)
+ series5V = ts.time_series(np.random.rand(25).reshape(5, 5),
+ mask=np.random.rand(25).reshape(5, 5) > 0.7,
start_date=ts.now('M'))
series5N = ts.time_series(zip(np.random.rand(5),
np.random.rand(5),
np.arange(5)),
start_date=ts.now('M'),
- dtype=[('a',float),('b',float),('c',int)]
+ dtype=[('a', float), ('b', float), ('c', int)]
)
- return dict(series1D=series1D,
+ return dict(series1D=series1D,
series5V=series5V,
series2D=series2D,
series5N=series5N)
+
class TestTimeSeriesTable(TestCase):
#
def __init__(self, *args, **kwds):
TestCase.__init__(self, *args, **kwds)
self.data = common_ts_setup()
self.file = tempfile.mktemp(".hdf5")
- self.fileh = tables.openFile(self.file,'a')
+ self.fileh = tables.openFile(self.file, 'a')
self.populate()
#
def tearDown(self):
@@ -233,7 +236,7 @@ def tearDown(self):
#
def populate(self):
h5file = self.fileh
- for (key,data) in self.data.iteritems():
+ for (key, data) in self.data.iteritems():
table = h5file.createTimeSeriesTable('/', key, data, "Example")
h5file.flush()
#
@@ -247,7 +250,7 @@ def test_read1D(self):
assert_equal(tarray._dates, series1D._dates)
assert_equal(tarray.fill_value, series1D.fill_value)
#
- tarray = table.read(1,5,2)
+ tarray = table.read(1, 5, 2)
assert_equal(tarray, series1D[1:5:2])
#
tarray = table.read(field='_data')
@@ -265,9 +268,9 @@ def test_read2D(self):
assert_equal(tarray._mask, series2D._mask)
assert_equal(tarray.fill_value, series2D.fill_value)
#
- tarray = table.read(1,5,2)
+ tarray = table.read(1, 5, 2)
initial = series2D[1:5:2]
- assert_equal(tarray, initial._series.reshape(2,5,5))
+ assert_equal(tarray, initial._series.reshape(2, 5, 5))
assert_equal(tarray._dates, initial._dates)
#
tarray = table.read(field='_series')
@@ -278,14 +281,14 @@ def test_read5N(self):
table = self.fileh.root.series5N
tarray = table.read()
assert_equal(tarray.dtype, series5N.dtype)
- for f in ('a','b','c'):
+ for f in ('a', 'b', 'c'):
assert_equal(tarray[f], series5N[f])
assert_equal(tarray._mask, series5N._mask)
assert_equal(tarray.fill_value, series5N.fill_value)
#
- tarray = table.read(1,5,2)
+ tarray = table.read(1, 5, 2)
initial = series5N[1:5:2]
- for f in ('a','b','c'):
+ for f in ('a', 'b', 'c'):
assert_equal(tarray[f], initial[f])
#
tarray = table.read(field='a')
@@ -298,19 +301,20 @@ def test_read5N(self):
assert_equal(tarray._mask, series5V._mask)
+
class TestSpecialAttrs(TestCase):
#
def __init__(self, *args, **kwds):
TestCase.__init__(self, *args, **kwds)
- self.marray = ma.array(np.random.rand(100).reshape(10,10),
- mask = (np.random.rand(100).reshape(10,10) > 0.7),
- fill_value=-999,
+ self.marray = ma.array(np.random.rand(100).reshape(10, 10),
+ mask=(np.random.rand(100).reshape(10, 10) > 0.7),
+ fill_value= -999,
hard_mask=True)
self.marray._optinfo['memo'] = "Some line of text"
self.tseries = ts.time_series(self.marray,
start_date=ts.now('D'))
self.file = tempfile.mktemp(".hdf5")
- self.fileh = tables.openFile(self.file,'a')
+ self.fileh = tables.openFile(self.file, 'a')
self.populate()
#
def tearDown(self):
@@ -340,20 +344,21 @@ def test_specialattributes_timeseriestable(self):
assert_equal(test._hardmask, data._hardmask)
+
class TestTableRead(TestCase):
#
def __init__(self, *args, **kwds):
TestCase.__init__(self, *args, **kwds)
series = ts.time_series(zip(np.random.rand(10),
np.arange(10)),
start_date=ts.now('M'),
- dtype=[('a',float),('b',int)])
- series.mask[0] = (0,1)
- series.mask[-1] = (1,0)
+ dtype=[('a', float), ('b', int)])
+ series.mask[0] = (0, 1)
+ series.mask[-1] = (1, 0)
self.tseries = series
self.marray = series._series
self.file = tempfile.mktemp(".hdf5")
- self.h5file = tables.openFile(self.file,'a')
+ self.h5file = tables.openFile(self.file, 'a')
self.populate()
#
def tearDown(self):
@@ -385,13 +390,13 @@ def test_tseries_read(self):
self.failUnless(isinstance(test, TimeSeries))
assert_equal(test, series[::2])
#
- test = table.readCoordinates([1,2,3])
+ test = table.readCoordinates([1, 2, 3])
self.failUnless(isinstance(test, TimeSeries))
- assert_equal(test, series[[1,2,3]])
+ assert_equal(test, series[[1, 2, 3]])
#
- test = table.readCoordinates([1,2,3], field='a')
+ test = table.readCoordinates([1, 2, 3], field='a')
self.failUnless(isinstance(test, TimeSeries))
- assert_equal(test, series['a'][[1,2,3]])
+ assert_equal(test, series['a'][[1, 2, 3]])
#
def test_marray_read(self):
"Test reading specific elements of a MaskedTable"
@@ -410,42 +415,43 @@ def test_marray_read(self):
self.failUnless(isinstance(test, MaskedArray))
assert_equal(test, data[::2])
#
- test = table.readCoordinates([1,2,3])
+ test = table.readCoordinates([1, 2, 3])
self.failUnless(isinstance(test, MaskedArray))
- assert_equal(test, data[[1,2,3]])
+ assert_equal(test, data[[1, 2, 3]])
#
- test = table.readCoordinates([1,2,3], field='a')
+ test = table.readCoordinates([1, 2, 3], field='a')
self.failUnless(isinstance(test, MaskedArray))
- assert_equal(test, data['a'][[1,2,3]])
+ assert_equal(test, data['a'][[1, 2, 3]])
#
def test_append_maskedarray(self):
"Test appending to a MaskedTable"
table = self.h5file.root.marray
data = self.marray
- newdata = ma.array(zip(np.random.rand(3), np.arange(3)+10),
- mask=[(0,0),(1,0),(0,1)],
+ newdata = ma.array(zip(np.random.rand(3), np.arange(3) + 10),
+ mask=[(0, 0), (1, 0), (0, 1)],
dtype=data.dtype)
table.append(newdata)
test = table.read()
self.failUnless(isinstance(test, MaskedArray))
- assert_equal_records(test, ma.mr_[data,newdata])
+ assert_equal_records(test, ma.mr_[data, newdata])
#
def test_append_timeseries(self):
"Test appending to a MaskedTable"
table = self.h5file.root.tseries
tseries = self.tseries
- newdata = ts.time_series(zip(np.random.rand(3), np.arange(3)+10),
- mask=[(0,0),(1,0),(0,1)],
+ newdata = ts.time_series(zip(np.random.rand(3), np.arange(3) + 10),
+ mask=[(0, 0), (1, 0), (0, 1)],
dtype=tseries.dtype,
- start_date=tseries.dates[-1]+1)
+ start_date=tseries.dates[-1] + 1)
table.append(newdata)
test = table.read()
self.failUnless(isinstance(test, TimeSeries))
- assert_equal_records(test, ts.concatenate((tseries,newdata)))
- #
+ assert_equal_records(test, ts.concatenate((tseries, newdata)))
+
###############################################################################
-#------------------------------------------------------------------------------
+
if __name__ == "__main__":
if has_tables:
run_module_suite()
+
View
47 scikits/timeseries/tdates.py
@@ -550,7 +550,7 @@ def tostring(self):
# Note: we better cache the result
if self._cachedinfo['tostr'] is None:
firststr = str(self[0])
- if self.size > 0:
+ if self.size:
ncharsize = len(firststr)
tostr = np.fromiter((str(d) for d in self),
dtype='|S%i' % ncharsize)
@@ -580,7 +580,7 @@ def asfreq(self, freq=None, relation="END"):
"""
# Note: As we define a new object, we don't need caching
- if freq is None or freq == _c.FR_UND:
+ if (freq is None) or (freq == _c.FR_UND):
return self
tofreq = check_freq(freq)
if tofreq == self.freq:
@@ -595,9 +595,48 @@ def asfreq(self, freq=None, relation="END"):
if fromfreq == _c.FR_UND:
new = self.__array__()
else:
- new = cseries.DA_asfreq(self.__array__(), fromfreq, tofreq, relation[0])
+ new = cseries.DA_asfreq(self.__array__(),
+ fromfreq, tofreq, relation[0])
return DateArray(new, freq=freq)
+
+ #......................................................
+ # Pickling
+ def __getstate__(self):
+ """
+ Returns the internal state of the TimeSeries, for pickling purposes.
+ """
+ state = (1,
+ self.shape,
+ self.dtype,
+ self.flags.fnc,
+ self.view(ndarray).tostring(),
+ self.freq,
+ )
+ return state
+ #
+ def __setstate__(self, state):
+ """
+ Restores the internal state of the TimeSeries, for pickling purposes.
+ `state` is typically the output of the ``__getstate__`` output, and is a 5-tuple:
+
+ - class name
+ - a tuple giving the shape of the data
+ - a typecode for the data
+ - a binary string for the data
+ - a binary string for the mask.
+ """
+ (ver, shp, typ, isf, raw, frq) = state
+ ndarray.__setstate__(self, (shp, typ, isf, raw))
+ self.freq = frq
+
+ def __reduce__(self):
+ """Returns a 3-tuple for pickling a DateArray."""
+ return (self.__class__,
+ (self.__array__(), self.freq),
+ self.__getstate__())
+
+
def find_dates(self, *dates):
"""
Returns the indices corresponding to given dates, as an array.
@@ -982,6 +1021,8 @@ def date_array(dlist=None, start_date=None, end_date=None, length=None,
dates._cachedinfo.update(ischrono=True, chronidx=np.array([], dtype=int))
return dates
+
+
#####---------------------------------------------------------------------------
#---- --- Definition of functions from the corresponding methods ---
#####---------------------------------------------------------------------------

0 comments on commit 96a9d6b

Please sign in to comment.
Something went wrong with that request. Please try again.