Permalink
Browse files

General/API Cleanups and Added Data Processors

 * Added custom data processor classes where the following methods
   are called on a per value basis
   - process_type_<type_name>(field_data)
   - process_field_<field_name>(field_data)
   - def process_message_<mesg_name>(data_message)

 * Updated profile.py and its data structures in record.py to leave
   out values in its `__init__` method and have None by default.
   (Updated profile export script as well)

 * Tweaked `__str__`/`__repr__` methods on records.py classes

 * Minor Refactor For Timestamp Field and Record Base Class

 * Made the argparse module a `setup.py`/pip requirement for Python
   2.6 and lower in anticipation in using it for the fitdump script.
  • Loading branch information...
1 parent b5c6619 commit 1d477d0046cdbcb2db66a41bd0935840098b3e3d @dtcooper committed Dec 12, 2012
Showing with 190 additions and 1,983 deletions.
  1. +6 −1 fitparse/__init__.py
  2. +37 −21 fitparse/base.py
  3. +42 −0 fitparse/processors.py
  4. +16 −1,874 fitparse/profile.py
  5. +35 −47 fitparse/records.py
  6. +3 −3 scripts/fitdump
  7. +34 −30 scripts/generate_profile.py
  8. +12 −2 setup.py
  9. +5 −5 tests/test.py
View
@@ -1,4 +1,9 @@
from fitparse.base import FitFile, FitParseError
+from fitparse.processors import FitFileDataProcessor, StandardUnitsDataProcessor
+
__version__ = '0.0.1-dev'
-__all__ = ['FitFile', 'FitParseError']
+__all__ = [
+ 'FitFileDataProcessor', 'FitFile', 'FitParseError',
+ 'StandardUnitsDataProcessor',
+]
View
@@ -5,32 +5,21 @@
except ImportError:
import StringIO
-from fitparse.profile import MESSAGE_TYPES
+from fitparse.processors import FitFileDataProcessor
+from fitparse.profile import FIELD_TYPE_TIMESTAMP, MESSAGE_TYPES
from fitparse.records import (
DataMessage, FieldData, FieldDefinition, DefinitionMessage, MessageHeader,
BASE_TYPES, BASE_TYPE_BYTE
)
from fitparse.utils import calc_crc
-# TODO: put in button of profile.py
-def get_timestamp_field():
- for mesg_type in MESSAGE_TYPES.values():
- field = mesg_type.fields.get(TIMESTAMP_DEF_NUM)
- if field:
- return field
- raise Exception("TODO: no date_time")
-
-TIMESTAMP_DEF_NUM = 253
-DATE_TIME_FIELD = get_timestamp_field()
-
-
class FitParseError(Exception):
pass
class FitFile(object):
- def __init__(self, fileish, check_crc=True):
+ def __init__(self, fileish, check_crc=True, data_processor=None):
if isinstance(fileish, basestring):
try:
self._file = open(fileish, 'rb')
@@ -45,13 +34,14 @@ def __init__(self, fileish, check_crc=True):
self.check_crc = check_crc
+ self._accumulators = {}
+ self._complete = False
+ self._compressed_ts_accumulator = 0
self._crc = 0
self._data_bytes_left = -1 # Not valid until after _parse_file_header()
self._local_mesgs = {}
- self._accumulators = {}
- self._compressed_ts_accumulator = 0
- self._complete = False
self._messages = []
+ self._processor = data_processor or FitFileDataProcessor()
# Start off by parsing the file header (makes self._data_bytes_left valid)
self._parse_file_header()
@@ -325,7 +315,7 @@ def _parse_data_message(self, header):
value = raw_value
# Update compressed timestamp field
- if (field_def.def_num == TIMESTAMP_DEF_NUM) and (raw_value is not None):
+ if (field_def.def_num == FIELD_TYPE_TIMESTAMP.def_num) and (raw_value is not None):
self._compressed_ts_accumulator = raw_value
field_datas.append(
@@ -346,14 +336,31 @@ def _parse_data_message(self, header):
field_datas.append(
FieldData(
field_def=None,
- field=DATE_TIME_FIELD,
+ field=FIELD_TYPE_TIMESTAMP,
parent_field=None,
- value=DATE_TIME_FIELD.render(ts_value),
+ value=FIELD_TYPE_TIMESTAMP.render(ts_value),
raw_value=ts_value,
)
)
- return DataMessage(header=header, def_mesg=def_mesg, fields=field_datas)
+ # Apply data processors
+ for field_data in field_datas:
+ # Apply type processor
+ type_processor = getattr(self._processor, 'process_type_%s' % field_data.type.name, None)
+ if type_processor:
+ type_processor(field_data)
+
+ field_processor = getattr(self._processor, 'process_field_%s' % field_data.name, None)
+ if field_processor:
+ field_processor(field_data)
+
+ data_message = DataMessage(header=header, def_mesg=def_mesg, fields=field_datas)
+
+ mesg_processor = getattr(self._processor, 'process_message_%s' % def_mesg.name, None)
+ if mesg_processor:
+ mesg_processor(data_message)
+
+ return data_message
##########
# Public API
@@ -363,6 +370,7 @@ def get_messages(
with_definitions=False, as_dict=False,
):
# TODO: Implement the query arguments, also let them be tuples, ie name=('record', 'event')
+ # TODO: maybe remove mesg_num since fields are predictably named "unknown_NN"
if with_definitions: # with_definitions implies as_dict=False
as_dict = False
@@ -398,6 +406,14 @@ def parse(self):
while self._parse_message():
pass
+ def possible_field_names(self, name):
+ # XXX unused, only use me if fitdump/csv requires it
+ field_names = set()
+ for message in self.get_messages(name):
+ for record in message:
+ field_names.add(record.name)
+ return sorted(field_names)
+
def __iter__(self):
return self.get_messages()
View
@@ -0,0 +1,42 @@
+import datetime
+
+
+class FitFileDataProcessor(object):
+ # TODO: Document API
+ #def process_type_<type_name> (field_data)
+ #def process_field_<field_name> (field_data) -- can be unknown_DD but NOT recommended
+ #def process_message_<mesg_name / mesg_type_num> (data_message)
+
+ def process_type_bool(self, field_data):
+ if field_data.value is not None:
+ field_data.value = bool(field_data.value)
+
+ def process_type_date_time(self, field_data):
+ value = field_data.value
+ if value is not None and value >= 0x10000000:
+ field_data.value = datetime.datetime.utcfromtimestamp(631065600 + value)
+ field_data.units = None # Units were 's', set to None
+
+ def process_type_local_date_time(self, field_data):
+ if field_data.value is not None:
+ field_data.value = datetime.datetime.fromtimestamp(631065600 + field_data.value)
+ field_data.units = None
+
+
+class StandardUnitsDataProcessor(FitFileDataProcessor):
+ # Example use case
+ def process_field_distance(self, field_data):
+ if field_data.value is not None:
+ field_data.value /= 1000.0
+ field_data.units = 'km'
+
+ def process_field_speed(self, field_data):
+ if field_data.value is not None:
+ field_data.value *= 60.0 * 60.0 / 1000.0
+ field_data.units = 'km/h'
+
+ def process_field_position_lat(self, field_data):
+ if field_data.value is not None:
+ field_data.value *= 180.0 / (2 ** 31)
+ field_data.units = 'deg'
+ process_field_position_long = process_field_position_lat
Oops, something went wrong.

0 comments on commit 1d477d0

Please sign in to comment.