Skip to content
This repository
Browse code

REF: more nanosecond support fixes, test suite passes #1238

  • Loading branch information...
commit 9bc381470b2d8e9f78f7d4b5734f437871c31dc0 1 parent 4f15d54
Wes McKinney wesm authored Komnomnomnom committed
8 pandas/core/algorithms.py
@@ -108,6 +108,8 @@ def factorize(values, sort=False, order=None, na_sentinel=-1):
108 108 Returns
109 109 -------
110 110 """
  111 + values = np.asarray(values)
  112 + is_datetime = com.is_datetime64_dtype(values)
111 113 hash_klass, values = _get_data_algo(values, _hashtables)
112 114
113 115 uniques = []
@@ -129,6 +131,9 @@ def factorize(values, sort=False, order=None, na_sentinel=-1):
129 131 uniques = uniques.take(sorter)
130 132 counts = counts.take(sorter)
131 133
  134 + if is_datetime:
  135 + uniques = np.array(uniques, dtype='M8[ns]')
  136 +
132 137 return labels, uniques, counts
133 138
134 139 def value_counts(values, sort=True, ascending=False):
@@ -179,6 +184,9 @@ def _get_data_algo(values, func_map):
179 184 if com.is_float_dtype(values):
180 185 f = func_map['float64']
181 186 values = com._ensure_float64(values)
  187 + elif com.is_datetime64_dtype(values):
  188 + f = func_map['int64']
  189 + values = values.view('i8')
182 190 elif com.is_integer_dtype(values):
183 191 f = func_map['int64']
184 192 values = com._ensure_int64(values)
14 pandas/core/common.py
@@ -171,7 +171,7 @@ def wrapper(arr, indexer, out, fill_value=np.nan):
171 171 'int64' : _algos.take_1d_int64,
172 172 'object' : _algos.take_1d_object,
173 173 'bool' : _view_wrapper(_algos.take_1d_bool, np.uint8),
174   - 'datetime64[us]' : _view_wrapper(_algos.take_1d_int64, np.int64,
  174 + 'datetime64[ns]' : _view_wrapper(_algos.take_1d_int64, np.int64,
175 175 na_override=lib.NaT),
176 176 }
177 177
@@ -181,7 +181,7 @@ def wrapper(arr, indexer, out, fill_value=np.nan):
181 181 'int64' : _algos.take_2d_axis0_int64,
182 182 'object' : _algos.take_2d_axis0_object,
183 183 'bool' : _view_wrapper(_algos.take_2d_axis0_bool, np.uint8),
184   - 'datetime64[us]' : _view_wrapper(_algos.take_2d_axis0_int64, np.int64,
  184 + 'datetime64[ns]' : _view_wrapper(_algos.take_2d_axis0_int64, np.int64,
185 185 na_override=lib.NaT),
186 186 }
187 187
@@ -191,7 +191,7 @@ def wrapper(arr, indexer, out, fill_value=np.nan):
191 191 'int64' : _algos.take_2d_axis1_int64,
192 192 'object' : _algos.take_2d_axis1_object,
193 193 'bool' : _view_wrapper(_algos.take_2d_axis1_bool, np.uint8),
194   - 'datetime64[us]' : _view_wrapper(_algos.take_2d_axis1_int64, np.int64,
  194 + 'datetime64[ns]' : _view_wrapper(_algos.take_2d_axis1_int64, np.int64,
195 195 na_override=lib.NaT),
196 196 }
197 197
@@ -201,7 +201,7 @@ def wrapper(arr, indexer, out, fill_value=np.nan):
201 201 'int64' : _algos.take_2d_multi_int64,
202 202 'object' : _algos.take_2d_multi_object,
203 203 'bool' : _view_wrapper(_algos.take_2d_multi_bool, np.uint8),
204   - 'datetime64[us]' : _view_wrapper(_algos.take_2d_multi_int64, np.int64,
  204 + 'datetime64[ns]' : _view_wrapper(_algos.take_2d_multi_int64, np.int64,
205 205 na_override=lib.NaT),
206 206 }
207 207
@@ -246,7 +246,7 @@ def take_1d(arr, indexer, out=None, fill_value=np.nan):
246 246 out.dtype)
247 247 out = _maybe_upcast(out)
248 248 np.putmask(out, mask, fill_value)
249   - elif dtype_str in ('float64', 'object', 'datetime64[us]'):
  249 + elif dtype_str in ('float64', 'object', 'datetime64[ns]'):
250 250 if out is None:
251 251 out = np.empty(n, dtype=arr.dtype)
252 252 take_f(arr, _ensure_int64(indexer), out=out, fill_value=fill_value)
@@ -284,7 +284,7 @@ def take_2d_multi(arr, row_idx, col_idx, fill_value=np.nan):
284 284 _ensure_int64(col_idx), out=out,
285 285 fill_value=fill_value)
286 286 return out
287   - elif dtype_str in ('float64', 'object', 'datetime64[us]'):
  287 + elif dtype_str in ('float64', 'object', 'datetime64[ns]'):
288 288 out = np.empty(out_shape, dtype=arr.dtype)
289 289 take_f(arr, _ensure_int64(row_idx), _ensure_int64(col_idx), out=out,
290 290 fill_value=fill_value)
@@ -326,7 +326,7 @@ def take_2d(arr, indexer, out=None, mask=None, needs_masking=None, axis=0,
326 326 take_f = _get_take2d_function(dtype_str, axis=axis)
327 327 take_f(arr, _ensure_int64(indexer), out=out, fill_value=fill_value)
328 328 return out
329   - elif dtype_str in ('float64', 'object', 'datetime64[us]'):
  329 + elif dtype_str in ('float64', 'object', 'datetime64[ns]'):
330 330 if out is None:
331 331 out = np.empty(out_shape, dtype=arr.dtype)
332 332 take_f = _get_take2d_function(dtype_str, axis=axis)
34 pandas/core/factor.py
@@ -18,11 +18,17 @@ class Factor(np.ndarray):
18 18 * levels : ndarray
19 19 """
20 20 def __new__(cls, data):
21   - data = np.asarray(data, dtype=object)
22   - levels, factor = unique_with_labels(data)
23   - factor = factor.view(Factor)
24   - factor.levels = levels
25   - return factor
  21 + from pandas.core.index import _ensure_index
  22 + from pandas.core.algorithms import factorize
  23 +
  24 + try:
  25 + labels, levels, _ = factorize(data, sort=True)
  26 + except TypeError:
  27 + labels, levels, _ = factorize(data, sort=False)
  28 +
  29 + labels = labels.view(Factor)
  30 + labels.levels = _ensure_index(levels)
  31 + return labels
26 32
27 33 levels = None
28 34
@@ -51,21 +57,3 @@ def __getitem__(self, key):
51 57 else:
52 58 return np.ndarray.__getitem__(self, key)
53 59
54   -
55   -def unique_with_labels(values):
56   - from pandas.core.index import Index
57   - rizer = lib.Factorizer(len(values))
58   - labels, _ = rizer.factorize(values, sort=False)
59   - uniques = Index(rizer.uniques)
60   - labels = com._ensure_platform_int(labels)
61   - try:
62   - sorter = uniques.argsort()
63   - reverse_indexer = np.empty(len(sorter), dtype=np.int_)
64   - reverse_indexer.put(sorter, np.arange(len(sorter)))
65   - labels = reverse_indexer.take(labels)
66   - uniques = uniques.take(sorter)
67   - except TypeError:
68   - pass
69   -
70   - return uniques, labels
71   -
26 pandas/core/format.py
@@ -571,16 +571,30 @@ def get_result(self):
571 571 if self.formatter:
572 572 formatter = self.formatter
573 573 else:
574   - def formatter(x):
575   - if isnull(x):
576   - return 'NaT'
577   - else:
578   - return str(x)
  574 + formatter = _format_datetime64
579 575
580 576 fmt_values = [formatter(x) for x in self.values]
581   -
582 577 return _make_fixed_width(fmt_values, self.justify)
583 578
  579 +def _format_datetime64(x):
  580 + if isnull(x):
  581 + return 'NaT'
  582 +
  583 + stamp = lib.Timestamp(x)
  584 + base = stamp.strftime('%Y-%m-%d %H:%M:%S')
  585 +
  586 + fraction = stamp.microsecond * 1000 + stamp.nanosecond
  587 + digits = 9
  588 +
  589 + if fraction == 0:
  590 + return base
  591 +
  592 + while (fraction % 10) == 0:
  593 + fraction /= 10
  594 + digits -= 1
  595 +
  596 + return base + ('.%%.%id' % digits) % fraction
  597 +
584 598
585 599 def _make_fixed_width(strings, justify='right'):
586 600 if len(strings) == 0:
20 pandas/core/index.py
@@ -691,8 +691,8 @@ def get_indexer(self, target, method=None, limit=None):
691 691 return pself.get_indexer(ptarget, method=method, limit=limit)
692 692
693 693 if self.dtype != target.dtype:
694   - this = Index(self, dtype=object)
695   - target = Index(target, dtype=object)
  694 + this = self.astype(object)
  695 + target = target.astype(object)
696 696 return this.get_indexer(target, method=method, limit=limit)
697 697
698 698 if not self.is_unique:
@@ -1172,8 +1172,12 @@ def __new__(cls, levels=None, labels=None, sortorder=None, names=None):
1172 1172 levels = [_ensure_index(lev) for lev in levels]
1173 1173 labels = [np.asarray(labs, dtype=np.int_) for labs in labels]
1174 1174
1175   - values = [ndtake(np.asarray(lev), lab)
  1175 + values = [ndtake(lev.values, lab)
1176 1176 for lev, lab in zip(levels, labels)]
  1177 +
  1178 + # Need to box timestamps, etc.
  1179 + values = _clean_arrays(values)
  1180 +
1177 1181 subarr = lib.fast_zip(values).view(cls)
1178 1182
1179 1183 subarr.levels = levels
@@ -2372,3 +2376,13 @@ def _maybe_box_dtindex(idx):
2372 2376 return Index(_dt_box_array(idx.asi8), dtype='object')
2373 2377 return idx
2374 2378
  2379 +def _clean_arrays(values):
  2380 + result = []
  2381 + for arr in values:
  2382 + if np.issubdtype(arr.dtype, np.datetime_):
  2383 + result.append(lib.map_infer(arr, lib.Timestamp))
  2384 + else:
  2385 + result.append(arr)
  2386 + return result
  2387 +
  2388 +
2  pandas/core/nanops.py
@@ -405,7 +405,7 @@ def unique1d(values):
405 405 uniques = np.array(table.unique(com._ensure_int64(values)),
406 406 dtype=np.int64)
407 407
408   - if values.dtype == np.datetime64:
  408 + if issubclass(values.dtype.type, np.datetime_):
409 409 uniques = uniques.view('M8[ns]')
410 410 else:
411 411 table = lib.PyObjectHashTable(len(values))
7 pandas/io/pytables.py
@@ -839,8 +839,7 @@ def _read_panel_table(self, group, where=None):
839 839
840 840 columns = _maybe_convert(sel.values['column'],
841 841 table._v_attrs.columns_kind)
842   - index = _maybe_convert(sel.values['index'],
843   - table._v_attrs.index_kind)
  842 + index = _maybe_convert(sel.values['index'], table._v_attrs.index_kind)
844 843 values = sel.values['values']
845 844
846 845 major = Factor(index)
@@ -995,7 +994,7 @@ def _maybe_convert(values, val_kind):
995 994
996 995 def _get_converter(kind):
997 996 if kind == 'datetime64':
998   - return lambda x: np.datetime64(x)
  997 + return lambda x: np.array(x, dtype='M8[ns]')
999 998 if kind == 'datetime':
1000 999 return lib.convert_timestamps
1001 1000 else: # pragma: no cover
@@ -1069,7 +1068,7 @@ def generate(self, where):
1069 1068 field = c['field']
1070 1069
1071 1070 if field == 'index' and self.index_kind == 'datetime64':
1072   - val = np.datetime64(value).view('i8')
  1071 + val = lib.Timestamp(value).value
1073 1072 self.conditions.append('(%s %s %s)' % (field,op,val))
1074 1073 elif field == 'index' and isinstance(value, datetime):
1075 1074 value = time.mktime(value.timetuple())
3  pandas/io/tests/test_parsers.py
@@ -376,7 +376,8 @@ def test_parse_dates_column_list(self):
376 376 lev = expected.index.levels[0]
377 377 expected.index.levels[0] = lev.to_datetime(dayfirst=True)
378 378 expected['aux_date'] = to_datetime(expected['aux_date'],
379   - dayfirst=True).astype('O')
  379 + dayfirst=True)
  380 + expected['aux_date'] = map(Timestamp, expected['aux_date'])
380 381 self.assert_(isinstance(expected['aux_date'][0], datetime))
381 382
382 383 df = read_csv(StringIO(data), sep=";", index_col = range(4),
17 pandas/sparse/frame.py
@@ -741,6 +741,23 @@ def apply(self, func, axis=0, broadcast=False):
741 741 else:
742 742 return self._apply_broadcast(func, axis)
743 743
  744 + def applymap(self, func):
  745 + """
  746 + Apply a function to a DataFrame that is intended to operate
  747 + elementwise, i.e. like doing map(func, series) for each series in the
  748 + DataFrame
  749 +
  750 + Parameters
  751 + ----------
  752 + func : function
  753 + Python function, returns a single value from a single value
  754 +
  755 + Returns
  756 + -------
  757 + applied : DataFrame
  758 + """
  759 + return self.apply(lambda x: map(func, x))
  760 +
744 761 @Appender(DataFrame.fillna.__doc__)
745 762 def fillna(self, value=None, method='pad', inplace=False, limit=None):
746 763 new_series = {}
49 pandas/src/datetime.pyx
@@ -136,6 +136,11 @@ class Timestamp(_Timestamp):
136 136 conv = tz.normalize(self)
137 137 return Timestamp(conv)
138 138
  139 + def replace(self, **kwds):
  140 + return Timestamp(datetime.replace(self, **kwds),
  141 + offset=self.offset)
  142 +
  143 +
139 144 cdef inline bint is_timestamp(object o):
140 145 return isinstance(o, Timestamp)
141 146
@@ -194,10 +199,38 @@ def apply_offset(ndarray[object] values, object offset):
194 199 # (see Timestamp class above). This will serve as a C extension type that
195 200 # shadows the python class, where we do any heavy lifting.
196 201 cdef class _Timestamp(datetime):
197   - cdef public:
  202 + cdef readonly:
198 203 int64_t value, nanosecond
199 204 object offset # frequency reference
200 205
  206 + def __richcmp__(_Timestamp self, object other, int op):
  207 + cdef _Timestamp ots
  208 +
  209 + if isinstance(other, _Timestamp):
  210 + ots = other
  211 + elif isinstance(other, datetime):
  212 + ots = Timestamp(other)
  213 + else:
  214 + if op == 2:
  215 + return False
  216 + elif op == 3:
  217 + return True
  218 + else:
  219 + raise TypeError('Cannot compare Timestamp with %s' % str(other))
  220 +
  221 + if op == 2: # ==
  222 + return self.value == ots.value
  223 + elif op == 3: # !=
  224 + return self.value != ots.value
  225 + elif op == 0: # <
  226 + return self.value < ots.value
  227 + elif op == 1: # <=
  228 + return self.value <= ots.value
  229 + elif op == 4: # >
  230 + return self.value > ots.value
  231 + elif op == 5: # >=
  232 + return self.value >= ots.value
  233 +
201 234 def __add__(self, other):
202 235 if is_integer_object(other):
203 236 if self.offset is None:
@@ -313,6 +346,7 @@ cdef inline int64_t _pydatetime_to_dts(object val, pandas_datetimestruct *dts):
313 346 dts.min = PyDateTime_DATE_GET_MINUTE(val)
314 347 dts.sec = PyDateTime_DATE_GET_SECOND(val)
315 348 dts.us = PyDateTime_DATE_GET_MICROSECOND(val)
  349 + dts.ps = dts.as = 0
316 350 return pandas_datetimestruct_to_datetime(PANDAS_FR_ns, dts)
317 351
318 352 cdef inline int64_t _dtlike_to_datetime64(object val,
@@ -324,6 +358,7 @@ cdef inline int64_t _dtlike_to_datetime64(object val,
324 358 dts.min = val.minute
325 359 dts.sec = val.second
326 360 dts.us = val.microsecond
  361 + dts.ps = dts.as = 0
327 362 return pandas_datetimestruct_to_datetime(PANDAS_FR_ns, dts)
328 363
329 364 cdef inline int64_t _date_to_datetime64(object val,
@@ -331,10 +366,8 @@ cdef inline int64_t _date_to_datetime64(object val,
331 366 dts.year = PyDateTime_GET_YEAR(val)
332 367 dts.month = PyDateTime_GET_MONTH(val)
333 368 dts.day = PyDateTime_GET_DAY(val)
334   - dts.hour = 0
335   - dts.min = 0
336   - dts.sec = 0
337   - dts.us = 0
  369 + dts.hour = dts.min = dts.sec = dts.us = 0
  370 + dts.ps = dts.as = 0
338 371 return pandas_datetimestruct_to_datetime(PANDAS_FR_ns, dts)
339 372
340 373
@@ -928,7 +961,7 @@ cpdef ndarray _unbox_utcoffsets(object transinfo):
928 961 arr = np.empty(sz, dtype='i8')
929 962
930 963 for i in range(sz):
931   - arr[i] = int(total_seconds(transinfo[i][0])) * 1000000
  964 + arr[i] = int(total_seconds(transinfo[i][0])) * 1000000000
932 965
933 966 return arr
934 967
@@ -1243,7 +1276,7 @@ def dt64arr_to_periodarr(ndarray[int64_t] dtarr, int freq):
1243 1276 for i in range(l):
1244 1277 pandas_datetime_to_datetimestruct(dtarr[i], PANDAS_FR_ns, &dts)
1245 1278 out[i] = get_period_ordinal(dts.year, dts.month, dts.day,
1246   - dts.hour, dts.min, dts.sec, freq)
  1279 + dts.hour, dts.min, dts.sec, freq)
1247 1280 return out
1248 1281
1249 1282 def periodarr_to_dt64arr(ndarray[int64_t] periodarr, int freq):
@@ -1338,7 +1371,7 @@ cpdef int64_t period_ordinal_to_dt64(int64_t ordinal, int freq):
1338 1371 dts.hour = dinfo.hour
1339 1372 dts.min = dinfo.minute
1340 1373 dts.sec = int(dinfo.second)
1341   - dts.us = 0
  1374 + dts.us = dts.ps = 0
1342 1375
1343 1376 return pandas_datetimestruct_to_datetime(PANDAS_FR_ns, &dts)
1344 1377
7 pandas/src/inference.pyx
@@ -491,15 +491,13 @@ def map_infer(ndarray arr, object f):
491 491 '''
492 492 cdef:
493 493 Py_ssize_t i, n
494   - flatiter it
495 494 ndarray[object] result
496 495 object val
497 496
498   - it = <flatiter> PyArray_IterNew(arr)
499 497 n = len(arr)
500 498 result = np.empty(n, dtype=object)
501 499 for i in range(n):
502   - val = f(PyArray_GETITEM(arr, PyArray_ITER_DATA(it)))
  500 + val = f(util.get_value_at(arr, i))
503 501
504 502 # unbox 0-dim arrays, GH #690
505 503 if is_array(val) and PyArray_NDIM(val) == 0:
@@ -508,9 +506,6 @@ def map_infer(ndarray arr, object f):
508 506
509 507 result[i] = val
510 508
511   -
512   - PyArray_ITER_NEXT(it)
513   -
514 509 return maybe_convert_objects(result, try_float=0)
515 510
516 511 def to_object_array(list rows):
3  pandas/src/reduce.pyx
@@ -85,11 +85,14 @@ cdef class Reducer:
85 85 except Exception, e:
86 86 if hasattr(e, 'args'):
87 87 e.args = e.args + (i,)
  88 + raise
88 89 finally:
89 90 # so we don't free the wrong memory
90 91 chunk.data = dummy_buf
  92 +
91 93 if result.dtype == np.object_:
92 94 result = maybe_convert_objects(result)
  95 +
93 96 return result
94 97
95 98 def _get_result_array(self, object res):
3  pandas/tests/test_frame.py
@@ -1629,7 +1629,7 @@ def test_constructor_maskedarray_nonfloat(self):
1629 1629 self.assertEqual(2, frame['C'][2])
1630 1630
1631 1631 # masked np.datetime64 stays (use lib.NaT as null)
1632   - mat = ma.masked_all((2, 3), dtype=np.datetime64)
  1632 + mat = ma.masked_all((2, 3), dtype='M8[ns]')
1633 1633 # 2-D input
1634 1634 frame = DataFrame(mat, columns=['A', 'B', 'C'], index=[1, 2])
1635 1635
@@ -5683,7 +5683,6 @@ def test_index_namedtuple(self):
5683 5683 idx2 = IndexType("baz", "bof")
5684 5684 index = Index([idx1, idx2], name="composite_index")
5685 5685 df = DataFrame([(1, 2), (3, 4)], index=index, columns=["A", "B"])
5686   - print df.ix[IndexType("foo", "bar")]["A"]
5687 5686 self.assertEqual(df.ix[IndexType("foo", "bar")]["A"], 1)
5688 5687
5689 5688 def test_bool_raises_value_error_1069(self):
2  pandas/tests/test_series.py
@@ -1359,7 +1359,7 @@ def test_comparison_different_length(self):
1359 1359 self.assertRaises(ValueError, a.__lt__, b)
1360 1360
1361 1361 def test_between(self):
1362   - s = Series(bdate_range('1/1/2000', periods=20), dtype=object)
  1362 + s = Series(bdate_range('1/1/2000', periods=20).asobject)
1363 1363 s[::2] = np.nan
1364 1364
1365 1365 result = s[s.between(s[3], s[17])]
4 pandas/tseries/frequencies.py
@@ -753,10 +753,10 @@ def get_freq(self):
753 753 return _maybe_add_count('L', delta / _ONE_MILLI)
754 754 elif _is_multiple(delta, _ONE_MICRO):
755 755 # Microseconds
756   - return _maybe_add_count('L', delta / _ONE_MICRO)
  756 + return _maybe_add_count('U', delta / _ONE_MICRO)
757 757 else:
758 758 # Nanoseconds
759   - return _maybe_add_count('U', delta)
  759 + return _maybe_add_count('N', delta)
760 760
761 761 @cache_readonly
762 762 def day_deltas(self):
3  pandas/tseries/index.py
@@ -1229,8 +1229,7 @@ def _dt_box_array(arr, offset=None, tz=None):
1229 1229 return arr
1230 1230
1231 1231 boxfunc = lambda x: Timestamp(x, offset=offset, tz=tz)
1232   - boxer = np.frompyfunc(boxfunc, 1, 1)
1233   - return boxer(arr)
  1232 + return lib.map_infer(arr, boxfunc)
1234 1233
1235 1234
1236 1235 def _to_m8(key):
14 pandas/tseries/offsets.py
@@ -990,11 +990,13 @@ def _delta_to_tick(delta):
990 990 else:
991 991 return Second(seconds)
992 992 else:
993   - mus = _delta_to_nanoseconds(delta)
994   - if mus % 1000 == 0:
995   - return Milli(mus // 1000)
  993 + nanos = _delta_to_nanoseconds(delta)
  994 + if nanos % 1000000 == 0:
  995 + return Milli(nanos // 1000000)
  996 + elif nanos % 1000 == 0:
  997 + return Micro(nanos // 1000)
996 998 else:
997   - return Micro(mus)
  999 + return Nano(nanos)
998 1000
999 1001 def _delta_to_nanoseconds(delta):
1000 1002 if isinstance(delta, Tick):
@@ -1030,6 +1032,10 @@ class Micro(Tick):
1030 1032 _inc = timedelta(microseconds=1)
1031 1033 _rule_base = 'U'
1032 1034
  1035 +class Nano(Tick):
  1036 + _inc = 1
  1037 + _rule_base = 'N'
  1038 +
1033 1039 BDay = BusinessDay
1034 1040 BMonthEnd = BusinessMonthEnd
1035 1041 BMonthBegin = BusinessMonthBegin
5 pandas/tseries/period.py
@@ -466,6 +466,9 @@ def _period_box_array(arr, freq):
466 466 return boxer(arr)
467 467
468 468 def dt64arr_to_periodarr(data, freq):
  469 + if data.dtype != np.dtype('M8[ns]'):
  470 + raise ValueError('Wrong dtype: %s' % data.dtype)
  471 +
469 472 if data is None:
470 473 return data
471 474
@@ -607,7 +610,7 @@ def __new__(cls, data=None,
607 610 raise ValueError(('freq not specified and cannot be '
608 611 'inferred from first element'))
609 612
610   - if data.dtype == np.datetime64:
  613 + if issubclass(data.dtype.type, np.datetime_):
611 614 data = dt64arr_to_periodarr(data, freq)
612 615 elif data.dtype == np.int64:
613 616 pass
2  pandas/tseries/tests/test_resample.py
@@ -54,7 +54,7 @@ def test_custom_grouper(self):
54 54 # construct expected val
55 55 arr = [1] + [5] * 2592
56 56 idx = dti[0:-1:5]
57   - idx = idx.append(DatetimeIndex([np.datetime64(dti[-1])]))
  57 + idx = idx.append(dti[-1:])
58 58 expect = Series(arr, index=idx)
59 59
60 60 # cython returns float for now
36 pandas/tseries/tests/test_timeseries.py
@@ -351,7 +351,7 @@ def test_reindex_frame_add_nat(self):
351 351 self.assert_(not mask[:-5].any())
352 352
353 353 def test_series_repr_nat(self):
354   - series = Series([0, 1, 2, NaT], dtype='M8[ns]')
  354 + series = Series([0, 1000, 2000, NaT], dtype='M8[ns]')
355 355
356 356 result = repr(series)
357 357 expected = ('0 1970-01-01 00:00:00\n'
@@ -1160,13 +1160,35 @@ def test_basics_nanos(self):
1160 1160 self.assert_(stamp.nanosecond == 500)
1161 1161
1162 1162 def test_comparison(self):
1163   - arr = np.array(['1/1/2000'], dtype='M8[ns]')
1164   -
1165   - x = Timestamp(arr[0].view('i8') + 500)
1166   - y = Timestamp(arr[0].view('i8'))
  1163 + # 5-18-2012 00:00:00.000
  1164 + stamp = 1337299200000000000L
  1165 +
  1166 + val = Timestamp(stamp)
  1167 +
  1168 + self.assert_(val == val)
  1169 + self.assert_(not val != val)
  1170 + self.assert_(not val < val)
  1171 + self.assert_(val <= val)
  1172 + self.assert_(not val > val)
  1173 + self.assert_(val >= val)
  1174 +
  1175 + other = datetime(2012, 5, 18)
  1176 + self.assert_(val == other)
  1177 + self.assert_(not val != other)
  1178 + self.assert_(not val < other)
  1179 + self.assert_(val <= other)
  1180 + self.assert_(not val > other)
  1181 + self.assert_(val >= other)
  1182 +
  1183 + other = Timestamp(stamp + 100)
  1184 +
  1185 + self.assert_(not val == other)
  1186 + self.assert_(val != other)
  1187 + self.assert_(val < other)
  1188 + self.assert_(val <= other)
  1189 + self.assert_(other > val)
  1190 + self.assert_(other >= val)
1167 1191
1168   - self.assert_(arr[0].astype('O') == x)
1169   - self.assert_(x != y)
1170 1192
1171 1193 """
1172 1194

0 comments on commit 9bc3814

Please sign in to comment.
Something went wrong with that request. Please try again.