Skip to content

Commit

Permalink
Merge branch 'merge-utc'
Browse files Browse the repository at this point in the history
  • Loading branch information
thobbs committed May 29, 2012
2 parents 55dfd3c + 373f262 commit b9110f2
Show file tree
Hide file tree
Showing 4 changed files with 81 additions and 110 deletions.
121 changes: 41 additions & 80 deletions pycassa/marshal.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,24 +4,34 @@
"""

import uuid
import time
import struct
import calendar
from datetime import datetime

import pycassa.util as util

_number_types = frozenset((int, long, float))

if hasattr(struct, 'Struct'): # new in Python 2.5
_have_struct = True
_bool_packer = struct.Struct('>B')
_float_packer = struct.Struct('>f')
_double_packer = struct.Struct('>d')
_long_packer = struct.Struct('>q')
_int_packer = struct.Struct('>i')
_short_packer = struct.Struct('>H')
def make_packer(fmt_string):
return struct.Struct(fmt_string)
else:
_have_struct = False
def make_packer(fmt_string):
class Struct(object):
def pack(self, v):
return struct.pack(fmt_string, v)

def unpack(self, v):
return struct.unpack(fmt_string, v)

return Struct()

_bool_packer = make_packer('>B')
_float_packer = make_packer('>f')
_double_packer = make_packer('>d')
_long_packer = make_packer('>q')
_int_packer = make_packer('>i')
_short_packer = make_packer('>H')

_BASIC_TYPES = ['BytesType', 'LongType', 'IntegerType', 'UTF8Type',
'AsciiType', 'LexicalUUIDType', 'TimeUUIDType',
Expand Down Expand Up @@ -64,7 +74,7 @@ def _get_composite_name(typestr):
def _to_timestamp(v):
# Expects Value to be either date or datetime
try:
converted = time.mktime(v.timetuple())
converted = calendar.timegm(v.utctimetuple())
converted = converted * 1e3 + getattr(v, 'microsecond', 0)/1e3
except AttributeError:
# Ints and floats are valid timestamps too
Expand All @@ -82,10 +92,7 @@ def get_composite_packer(typestr=None, composite_type=None):
elif composite_type:
packers = [c.pack for c in composite_type.components]

if _have_struct:
len_packer = _short_packer.pack
else:
len_packer = lambda v: struct.pack('>H', v)
len_packer = _short_packer.pack

def pack_composite(items, slice_start=None):
last_index = len(items) - 1
Expand Down Expand Up @@ -125,10 +132,7 @@ def get_composite_unpacker(typestr=None, composite_type=None):
elif composite_type:
unpackers = [c.unpack for c in composite_type.components]

if _have_struct:
len_unpacker = lambda v: _short_packer.unpack(v)[0]
else:
len_unpacker = lambda v: struct.unpack('>H', v)[0]
len_unpacker = lambda v: _short_packer.unpack(v)[0]

def unpack_composite(bytestr):
# The composite format for each component is:
Expand Down Expand Up @@ -158,57 +162,33 @@ def packer_for(typestr):
data_type = extract_type_name(typestr)

if data_type == 'DateType':
if _have_struct:
def pack_date(v, _=None):
return _long_packer.pack(_to_timestamp(v))
else:
def pack_date(v, _=None):
return struct.pack('>q', _to_timestamp(v))
def pack_date(v, _=None):
return _long_packer.pack(_to_timestamp(v))
return pack_date

elif data_type == 'BooleanType':
if _have_struct:
def pack_bool(v, _=None):
return _bool_packer.pack(bool(v))
else:
def pack_bool(v, _=None):
return struct.pack('>B', bool(v))
def pack_bool(v, _=None):
return _bool_packer.pack(bool(v))
return pack_bool

elif data_type == 'DoubleType':
if _have_struct:
def pack_double(v, _=None):
return _double_packer.pack(v)
else:
def pack_double(v, _=None):
return struct.pack('>d', v)
def pack_double(v, _=None):
return _double_packer.pack(v)
return pack_double

elif data_type == 'FloatType':
if _have_struct:
def pack_float(v, _=None):
return _float_packer.pack(v)
else:
def pack_float(v, _=None):
return struct.pack('>f', v)
def pack_float(v, _=None):
return _float_packer.pack(v)
return pack_float

elif data_type == 'LongType':
if _have_struct:
def pack_long(v, _=None):
return _long_packer.pack(v)
else:
def pack_long(v, _=None):
return struct.pack('>q', v)
def pack_long(v, _=None):
return _long_packer.pack(v)
return pack_long

elif data_type == 'Int32Type':
if _have_struct:
def pack_int32(v, _=None):
return _int_packer.pack(v)
else:
def pack_int32(v, _=None):
return struct.pack('>i', v)
def pack_int32(v, _=None):
return _int_packer.pack(v)
return pack_int32

elif data_type == 'IntegerType':
Expand Down Expand Up @@ -268,42 +248,23 @@ def unpacker_for(typestr):
return lambda v: v

elif data_type == 'DateType':
if _have_struct:
return lambda v: datetime.fromtimestamp(
_long_packer.unpack(v)[0] / 1e3)
else:
return lambda v: datetime.fromtimestamp(
struct.unpack('>q', v)[0] / 1e3)
return lambda v: datetime.utcfromtimestamp(
_long_packer.unpack(v)[0] / 1e3)

elif data_type == 'BooleanType':
if _have_struct:
return lambda v: bool(_bool_packer.unpack(v)[0])
else:
return lambda v: bool(struct.unpack('>B', v)[0])
return lambda v: bool(_bool_packer.unpack(v)[0])

elif data_type == 'DoubleType':
if _have_struct:
return lambda v: _double_packer.unpack(v)[0]
else:
return lambda v: struct.unpack('>d', v)[0]
return lambda v: _double_packer.unpack(v)[0]

elif data_type == 'FloatType':
if _have_struct:
return lambda v: _float_packer.unpack(v)[0]
else:
return lambda v: struct.unpack('>f', v)[0]
return lambda v: _float_packer.unpack(v)[0]

elif data_type == 'LongType':
if _have_struct:
return lambda v: _long_packer.unpack(v)[0]
else:
return lambda v: struct.unpack('>q', v)[0]
return lambda v: _long_packer.unpack(v)[0]

elif data_type == 'Int32Type':
if _have_struct:
return lambda v: _int_packer.unpack(v)[0]
else:
return lambda v: struct.unpack('>i', v)[0]
return lambda v: _int_packer.unpack(v)[0]

elif data_type == 'IntegerType':
return decode_int
Expand Down
49 changes: 27 additions & 22 deletions pycassa/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,7 @@
"""

import time
import struct
import calendar
from datetime import datetime

import pycassa.marshal as marshal
Expand Down Expand Up @@ -122,6 +121,12 @@ class DateType(CassandraType):
An 8 byte timestamp. This will be returned
as a :class:`datetime.datetime` instance by pycassa. Either
:class:`datetime` instances or timestamps will be accepted.
.. versionchanged:: 1.7.0
Prior to 1.7.0, datetime objects were expected to be in
local time. In 1.7.0 and beyond, naive datetimes are
assumed to be in UTC and tz-aware objects will be
automatically converted to UTC for storage in Cassandra.
"""
pass

Expand All @@ -135,7 +140,7 @@ def _to_timestamp(v, use_micros=False):
micro_scale = 1e3

try:
converted = time.mktime(v.timetuple())
converted = calendar.timegm(v.utctimetuple())
converted = (converted * scale) + \
(getattr(v, 'microsecond', 0) / micro_scale)
except AttributeError:
Expand All @@ -155,23 +160,23 @@ class OldPycassaDateType(CassandraType):
unix epoch, rather than the number of milliseconds, which
is what cassandra-cli and other clients supporting DateType
use.
.. versionchanged:: 1.7.0
Prior to 1.7.0, datetime objects were expected to be in
local time. In 1.7.0 and beyond, naive datetimes are
assumed to be in UTC and tz-aware objects will be
automatically converted to UTC for storage in Cassandra.
"""

@staticmethod
def pack(v, *args, **kwargs):
ts = _to_timestamp(v, use_micros=True)
if marshal._have_struct:
return marshal._long_packer.pack(ts)
else:
return struct.pack('>q', ts)
return marshal._long_packer.pack(ts)

@staticmethod
def unpack(v):
if marshal._have_struct:
ts = marshal._long_packer.unpack(v)[0] / 1e6
else:
ts = struct.unpack('>q', v)[0] / 1e6
return datetime.fromtimestamp(ts)
ts = marshal._long_packer.unpack(v)[0] / 1e6
return datetime.utcfromtimestamp(ts)

class IntermediateDateType(CassandraType):
"""
Expand All @@ -188,29 +193,29 @@ class IntermediateDateType(CassandraType):
It almost certainly *should not be used* for row keys,
column names (if you care about the sorting), or column
values that have a secondary index on them.
.. versionchanged:: 1.7.0
Prior to 1.7.0, datetime objects were expected to be in
local time. In 1.7.0 and beyond, naive datetimes are
assumed to be in UTC and tz-aware objects will be
automatically converted to UTC for storage in Cassandra.
"""

@staticmethod
def pack(v, *args, **kwargs):
ts = _to_timestamp(v, use_micros=False)
if marshal._have_struct:
return marshal._long_packer.pack(ts)
else:
return struct.pack('>q', ts)
return marshal._long_packer.pack(ts)

@staticmethod
def unpack(v):
if marshal._have_struct:
raw_ts = marshal._long_packer.unpack(v)[0] / 1e3
else:
raw_ts = struct.unpack('>q', v)[0] / 1e3
raw_ts = marshal._long_packer.unpack(v)[0] / 1e3

try:
return datetime.fromtimestamp(raw_ts)
return datetime.utcfromtimestamp(raw_ts)
except ValueError:
# convert from bad microsecond format to millis
corrected_ts = raw_ts / 1e3
return datetime.fromtimestamp(corrected_ts)
return datetime.utcfromtimestamp(corrected_ts)

class CompositeType(CassandraType):
"""
Expand Down
13 changes: 9 additions & 4 deletions pycassa/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,7 @@

import random
import uuid
import time
import datetime
import calendar

__all__ = ['convert_time_to_uuid', 'convert_uuid_to_time', 'OrderedDict']

Expand Down Expand Up @@ -56,12 +55,18 @@ def convert_time_to_uuid(time_arg, lowest_val=True, randomize=False):
:rtype: :class:`uuid.UUID`
.. versionchanged:: 1.7.0
Prior to 1.7.0, datetime objects were expected to be in
local time. In 1.7.0 and beyond, naive datetimes are
assumed to be in UTC and tz-aware objects will be
automatically converted to UTC.
"""
if isinstance(time_arg, uuid.UUID):
return time_arg

if hasattr(time_arg, 'timetuple'):
seconds = int(time.mktime(time_arg.timetuple()))
if hasattr(time_arg, 'utctimetuple'):
seconds = int(calendar.timegm(time_arg.utctimetuple()))
microseconds = (seconds * 1e6) + time_arg.time().microsecond
elif type(time_arg) in _number_types:
microseconds = int(time_arg * 1e6)
Expand Down
8 changes: 4 additions & 4 deletions tests/test_autopacking.py
Original file line number Diff line number Diff line change
Expand Up @@ -921,19 +921,19 @@ def test_datetime_to_uuid(self):
key = 'key1'
timeline = []

timeline.append(datetime.now())
timeline.append(datetime.utcnow())
time1 = uuid1()
col1 = {time1:'0'}
cf_time.insert(key, col1)
time.sleep(1)

timeline.append(datetime.now())
timeline.append(datetime.utcnow())
time2 = uuid1()
col2 = {time2:'1'}
cf_time.insert(key, col2)
time.sleep(1)

timeline.append(datetime.now())
timeline.append(datetime.utcnow())

cols = {time1:'0', time2:'1'}

Expand Down Expand Up @@ -1013,7 +1013,7 @@ def test_compatibility(self):
self.cf = ColumnFamily(pool, 'Standard1')
self.cf.column_validators['date'] = OldPycassaDateType()

d = datetime.now()
d = datetime.utcnow()
self.cf.insert('key1', {'date': d})
self._compare_dates(self.cf.get('key1')['date'], d)

Expand Down

0 comments on commit b9110f2

Please sign in to comment.