diff --git a/.gitignore b/.gitignore index 188159a2..ab4c6b9c 100644 --- a/.gitignore +++ b/.gitignore @@ -43,3 +43,4 @@ secrets.py *.lock src/forklift.log* src/config.json +config.json diff --git a/src/forklift/__main__.py b/src/forklift/__main__.py index 620c8a38..b5320b4c 100644 --- a/src/forklift/__main__.py +++ b/src/forklift/__main__.py @@ -8,7 +8,7 @@ forklift config --add forklift config --remove forklift config --list - forklift list [] + forklift list-pallets [] forklift lift [] Arguments: @@ -20,13 +20,13 @@ config --add path/to/folder adds a path to the config. Checks for duplicates. config --remove path/to/folder removes a path from the config. config --list outputs the list of pallet folder paths in your config file. - list outputs the list of pallets from the config. - list path/to/folder outputs the list of pallets for the passed in path. + list-pallets outputs the list of pallets from the config. + list-pallets path/to/folder outputs the list of pallets for the passed in path. lift the main entry for running all of pallets found in the config paths. lift path/to/file run a specific pallet. ''' -import lift +import cli import logging.config import sys from docopt import docopt @@ -38,35 +38,36 @@ def main(): if args['config']: if args['--init']: - message = lift.init() + message = cli.init() print('config file created: {}'.format(message)) if args['--add'] and args['']: - message = lift.add_config_folder(args['']) + message = cli.add_config_folder(args['']) print(message) if args['--remove'] and args['']: - message = lift.remove_pallet_folder(args['']) - print('{} removed from config file'.format(message)) + message = cli.remove_config_folder(args['']) + print(message) if args['--list']: - lift.list_config_folders() - elif args['list']: + for folder in cli.list_config_folders(): + print(folder) + elif args['list-pallets']: if args['']: - pallets = lift.list_pallets(args['']) + pallets = cli.list_pallets(args['']) else: - pallets = lift.list_pallets() + pallets = cli.list_pallets() if len(pallets) == 0: print('No pallets found!') else: for plug in pallets: print(': '.join(plug)) - elif args['update']: + elif args['lift']: if args['']: - lift.lift(args['']) + cli.start_lift(args['']) else: - lift.lift() + cli.start_lift() def _setup_logging(): diff --git a/src/forklift/cli.py b/src/forklift/cli.py new file mode 100644 index 00000000..df1fe5a6 --- /dev/null +++ b/src/forklift/cli.py @@ -0,0 +1,162 @@ +#!/usr/bin/env python +# * coding: utf8 * +''' +lift.py + +A module that contains the implementation of the cli commands +''' + +import logging +import settings +import sys +from glob import glob +from json import dumps, loads +from os.path import abspath, exists, join, splitext, basename, dirname +from models import Pallet +import lift +import core + +log = logging.getLogger(settings.LOGGER) + + +def init(): + if exists('config.json'): + return 'config file already created.' + + default_pallet_locations = ['c:\\scheduled'] + + log.debug('creating config.json file.') + + return _set_config_folders(default_pallet_locations) + + +def add_config_folder(folder): + folders = get_config_folders() + + if folder in folders: + return '{} is already in the config folders list!'.format(folder) + + try: + _validate_config_folder(folder, raises=True) + except Exception as e: + return e.message + + folders.append(folder) + + _set_config_folders(folders) + + return 'added {}'.format(folder) + + +def remove_config_folder(folder): + folders = get_config_folders() + + try: + folders.remove(folder) + except ValueError: + return '{} is not in the config folders list!'.format(folder) + + _set_config_folders(folders) + + return 'removed {}'.format(folder) + + +def list_pallets(folders=None): + if folders is None: + folders = get_config_folders() + + return _get_pallets_in_folders(folders) + + +def list_config_folders(): + folders = get_config_folders() + + validate_results = [] + for folder in folders: + validate_results.append(_validate_config_folder(folder)) + + return validate_results + + +def _set_config_folders(folders): + if type(folders) != list: + raise Exception('config file data must be a list.') + + with open('config.json', 'w') as json_data_file: + data = dumps(folders) + + log.debug('writing %s to %s', data, abspath(json_data_file.name)) + json_data_file.write(data) + + return abspath(json_data_file.name) + + +def get_config_folders(): + if not exists('config.json'): + raise Exception('config file not found.') + + with open('config.json', 'r') as json_data_file: + config = loads(json_data_file.read()) + + return config + + +def _validate_config_folder(folder, raises=False): + if exists(folder): + valid = 'valid' + else: + valid = 'invalid!' + if raises: + raise Exception('{}: {}'.format(folder, valid)) + + return('{}: {}'.format(folder, valid)) + + +def _get_pallets_in_folders(folders): + pallets = [] + + for folder in folders: + for py_file in glob(join(folder, '*.py')): + pallets.extend(_get_pallets_in_file(py_file)) + + return pallets + + +def _get_pallets_in_file(file_path): + pallets = [] + name = splitext(basename(file_path))[0] + folder = dirname(file_path) + + if folder not in sys.path: + sys.path.append(folder) + + mod = __import__(name) + + for member in dir(mod): + try: + potential_class = getattr(mod, member) + if issubclass(potential_class, Pallet) and potential_class != Pallet: + pallets.append((file_path, member)) + except: + #: member was likely not a class + pass + + return pallets + + +def start_lift(file_path=None): + if file_path is not None: + pallet_infos = _get_pallets_in_file(file_path) + else: + pallet_infos = list_pallets() + + pallets = [] + for info in pallet_infos: + module_name = splitext(basename(info[0]))[0] + class_name = info[1] + PalletClass = getattr(__import__(module_name), class_name) + pallets.append(PalletClass()) + + lift.process_crates_for(pallets, core.update) + + print(lift.process_pallets(pallets)) diff --git a/src/forklift/core.py b/src/forklift/core.py index 2077e191..7bdb3573 100644 --- a/src/forklift/core.py +++ b/src/forklift/core.py @@ -3,7 +3,7 @@ ''' core.py ----------------------------------------- -Tools for updating a filegeodatabase from an SDE database +Tools for updating the data associated with a models.Crate ''' import arcpy @@ -12,291 +12,270 @@ from datetime import datetime from itertools import izip from numpy.testing import assert_almost_equal -from os.path import join +from models import Crate +from exceptions import ValidationException -class Core(object): +log = logging.getLogger(settings.LOGGER) - def __init__(self): - self.log = logging.getLogger(settings.LOGGER) - self.changes = [] - def update_dataset(self, fgdb, f, sdeFC): - ''' - fgdb: String - file geodatabase - f: String - name of feature class to update - sdeFC: String - path to SDE feature class - is_table: Boolean - returns: Boolean - True if update was successful (even if no changes were found) +def update(crate, validate_crate): + ''' + crate: models.Crate + validate_crate: models.Pallet.validate_crate - Updates f with data from sdeFC. - ''' + returns: String + One of the result string constants from models.Crate - arcpy.env.workspace = fgdb - is_table = arcpy.Describe(f).datasetType == 'Table' + Checks to see if a crate can be updated by using validate_crate (if implemented + within the pallet) or _check_schema otherwise. If the crate is valid it + then updates the data. + ''' + try: + if not arcpy.Exists(crate.destination): + _create_destination_data(crate) + + return Crate.CREATED + + #: check for custom validation logic, otherwise do a default schema check try: - self.log.info('checking for schema changes...') - if not self.check_schema(f, sdeFC): - # skip updating if the schemas do not match - return False - - self.log.info('checking for changes...') - if self.check_for_changes(f, sdeFC, is_table): - self.log.info('updating data...') - self.log.debug('trucating data for %s', f) - arcpy.TruncateTable_management(f) - - # edit session required for data that participates in relationships - self.log.debug('starting edit session...') - editSession = arcpy.da.Editor(fgdb) - editSession.startEditing(False, False) - editSession.startOperation() - - fields = [fld.name for fld in arcpy.ListFields(f)] - fields = self._filter_fields(fields) - if not is_table: - fields.append('SHAPE@') - outputSR = arcpy.Describe(f).spatialReference - else: - outputSR = None - with arcpy.da.InsertCursor(f, fields) as icursor, \ - arcpy.da.SearchCursor(sdeFC, fields, sql_clause=(None, 'ORDER BY OBJECTID'), - spatial_reference=outputSR) as cursor: - for row in cursor: - icursor.insertRow(row) - - editSession.stopOperation() - editSession.stopEditing(True) - self.log.debug('edit session stopped') - - self.changes.append(f.upper()) - else: - self.log.info('no changes found') - - return True - except Exception as e: - self.log.error(e) - - return False - - def check_schema(self, source_dataset, destination_dataset): - ''' - source_dataset: String - destination_dataset: String - - returns: Boolean - True if the schemas match - ''' - - def get_fields(dataset): - field_dict = {} - for field in arcpy.ListFields(dataset): - if not self._is_naughty_field(field.name): - field_dict[field.name.upper()] = field - return field_dict - - missing_fields = [] - mismatching_fields = [] - source_fields = get_fields(source_dataset) - destination_fields = get_fields(destination_dataset) - - for field_key in destination_fields.keys(): - # make sure that all fields from destination are in source - # not sure that we care if there are fields in source that are not in destination - destination_fld = destination_fields[field_key] - if field_key not in source_fields.keys(): - missing_fields.append(destination_fld.name) - else: - source_fld = source_fields[field_key] - if source_fld.type != destination_fld.type: - mismatching_fields.append( - '{}: source type of {} does not match destination type of {}' - .format(source_fld.name, - source_fld.type, - destination_fld.type)) - elif source_fld.type == 'String' and source_fld.length != destination_fld.length: - mismatching_fields.append( - '{}: source length of {} does not match destination length of {}' - .format(source_fld.name, - source_fld.length, - destination_fld.length)) - - if len(missing_fields) > 0: - self.log.error('Missing fields in %s: %s', source_dataset, ', '.join(missing_fields)) - return False - elif len(mismatching_fields) > 0: - self.log.error('Mismatching fields in %s: %s', source_dataset, ', '.join(mismatching_fields)) - return False + has_custom = validate_crate(crate) + if has_custom == NotImplemented: + _check_schema(crate) + except ValidationException as e: + return (Crate.INVALID_DATA, e.message) + + if _check_for_changes(crate): + _move_data(crate) + return Crate.UPDATED else: - return True - - def update_fgdb_from_sde(self, fgdb, sde): - ''' - fgdb: String - file geodatabase - sde: String - sde geodatabase connection - returns: String[] - the list of errors - - Loops through the file geodatabase feature classes and looks for - matches in the SDE database. If there is a match, it does a schema check - and then updates the data. - ''' - - self.log.info('Updating %s from %s', fgdb, sde) - - # loop through local feature classes - arcpy.env.workspace = fgdb - fcs = arcpy.ListFeatureClasses() + arcpy.ListTables() - totalFcs = len(fcs) - i = 0 - for f in fcs: - i = i + 1 - self.log.info('%s of %s | %s', i, totalFcs, f) - - found = False - - # search for match in stand-alone feature classes - arcpy.env.workspace = sde - matches = arcpy.ListFeatureClasses('*.{}'.format(f)) + arcpy.ListTables('*.{}'.format(f)) - if matches is not None and len(matches) > 0: - match = matches[0] - sdeFC = join(sde, match) - found = True - else: - # search in feature datasets - datasets = arcpy.ListDatasets() - if len(datasets) > 0: - # loop through datasets - for ds in datasets: - matches = arcpy.ListFeatureClasses('*.{}'.format(f), None, ds) - if matches is not None and len(matches) > 0: - match = matches[0] - sdeFC = join(sde, match) - found = True - break - if not found: - self.log.error('no match found in sde for %s', f) - continue - - self.update_dataset(fgdb, f, sdeFC) - - return (self.changes) - - def was_modified_today(self, fcname): - ''' - fcname: String - - returns: Boolean - - Checks to see if fcname within the fgdb was updated today. - ''' - - return fcname.upper() in self.changes - - def _filter_fields(self, lst): - ''' - lst: String[] - - returns: String[] - - Filters out fields that mess up the update logic. - ''' - - newFields = [] - for fld in lst: - if not self._is_naughty_field(fld): - newFields.append(fld) - return newFields - - def _is_naughty_field(self, fld): - return 'SHAPE' in fld.upper() or fld.upper() in ['GLOBAL_ID', 'GLOBALID'] - - def check_for_changes(self, f, sde, is_table): - ''' - f: String - The name of the fgdb feature class - sde: String - The name of the sde feature class - is_table: Boolean - - returns: Boolean - False if there are no changes - ''' - - # try simple feature count first - fCount = int(arcpy.GetCount_management(f).getOutput(0)) - sdeCount = int(arcpy.GetCount_management(sde).getOutput(0)) - if fCount != sdeCount: - return True - - fields = [fld.name for fld in arcpy.ListFields(f)] - - # filter out shape fields - if not is_table: - fields = self._filter_fields(fields) - - d = arcpy.Describe(f) - shapeType = d.shapeType - if shapeType == 'Polygon': - shapeToken = 'SHAPE@AREA' - elif shapeType == 'Polyline': - shapeToken = 'SHAPE@LENGTH' - elif shapeType == 'Point': - shapeToken = 'SHAPE@XY' - else: - shapeToken = 'SHAPE@JSON' - fields.append(shapeToken) + return Crate.NO_CHANGES + except Exception as e: + return (Crate.UNHANDLED_EXCEPTION, e.message) + + +def _create_destination_data(crate): + if _is_table(crate): + arcpy.CopyRows_management(crate.source, crate.destination) + else: + arcpy.env.outputCoordinateSystem = crate.destination_coordinate_system + arcpy.env.geographicTransformations = crate.geographic_transformation + + arcpy.CopyFeatures_management(crate.source, crate.destination) + + #: prevent the stepping on of toes in any other scripts + arcpy.env.outputCoordinateSystem = None + arcpy.env.geographicTransformations = None + + +def _is_table(crate): + ''' + crate: Crate + + returns True if the crate defines a table + ''' + + return arcpy.Describe(crate.source).datasetType == 'Table' + + +def _move_data(crate): + ''' + crate: Crate + + move data from source to destination as defined by the crate + ''' + is_table = _is_table(crate) + + log.info('updating data...') + log.debug('trucating data for %s', crate.destination_name) + arcpy.TruncateTable_management(crate.destination) + + # edit session required for data that participates in relationships + log.debug('starting edit session...') + editSession = arcpy.da.Editor(crate.destination_workspace) + editSession.startEditing(False, False) + editSession.startOperation() + + fields = [fld.name for fld in arcpy.ListFields(crate.destination)] + fields = _filter_fields(fields) + if not is_table: + fields.append('SHAPE@') + outputSR = arcpy.Describe(crate.destination).spatialReference + else: + outputSR = None + with arcpy.da.InsertCursor(crate.destination, fields) as icursor, \ + arcpy.da.SearchCursor(crate.source, fields, sql_clause=(None, 'ORDER BY OBJECTID'), + spatial_reference=outputSR) as cursor: + for row in cursor: + icursor.insertRow(row) + + editSession.stopOperation() + editSession.stopEditing(True) + log.debug('edit session stopped') + + +def _check_schema(source_dataset, destination_dataset): + ''' + source_dataset: String + destination_dataset: String + + returns: Boolean - True if the schemas match + ''' + + def get_fields(dataset): + field_dict = {} + for field in arcpy.ListFields(dataset): + if not _is_naughty_field(field.name): + field_dict[field.name.upper()] = field + return field_dict + + missing_fields = [] + mismatching_fields = [] + source_fields = get_fields(source_dataset) + destination_fields = get_fields(destination_dataset) + + for field_key in destination_fields.keys(): + # make sure that all fields from destination are in source + # not sure that we care if there are fields in source that are not in destination + destination_fld = destination_fields[field_key] + if field_key not in source_fields.keys(): + missing_fields.append(destination_fld.name) + else: + source_fld = source_fields[field_key] + if source_fld.type != destination_fld.type: + mismatching_fields.append( + '{}: source type of {} does not match destination type of {}' + .format(source_fld.name, + source_fld.type, + destination_fld.type)) + elif source_fld.type == 'String' and source_fld.length != destination_fld.length: + mismatching_fields.append( + '{}: source length of {} does not match destination length of {}' + .format(source_fld.name, + source_fld.length, + destination_fld.length)) + + if len(missing_fields) > 0: + log.error('Missing fields in %s: %s', source_dataset, ', '.join(missing_fields)) + return False + elif len(mismatching_fields) > 0: + log.error('Mismatching fields in %s: %s', source_dataset, ', '.join(mismatching_fields)) + return False + else: + return True + + +def _filter_fields(lst): + ''' + lst: String[] + + returns: String[] + + Filters out fields that mess up the update logic. + ''' + + newFields = [] + for fld in lst: + if not _is_naughty_field(fld): + newFields.append(fld) + return newFields - def parse_shape(shapeValue): - if shapeValue is None: - return 0 - elif shapeType in ['Polygon', 'Polyline']: - return shapeValue - elif shapeType == 'Point': - if shapeValue[0] is not None and shapeValue[1] is not None: - return shapeValue[0] + shapeValue[1] - else: - return 0 - else: - return shapeValue - # support for reprojecting - outputSR = arcpy.Describe(f).spatialReference +def _is_naughty_field(fld): + return 'SHAPE' in fld.upper() or fld.upper() in ['GLOBAL_ID', 'GLOBALID'] + + +def _check_for_changes(crate): + ''' + crate: Crate + f: String + The name of the fgdb feature class + sde: String + The name of the sde feature class + is_table: Boolean + + returns: Boolean + False if there are no changes + ''' + is_table = _is_table(crate) + + # try simple feature count first + fCount = int(arcpy.GetCount_management(crate.destination).getOutput(0)) + sdeCount = int(arcpy.GetCount_management(crate.source).getOutput(0)) + if fCount != sdeCount: + return True + + fields = [fld.name for fld in arcpy.ListFields(crate.destination)] + + # filter out shape fields + if not is_table: + fields = _filter_fields(fields) + + d = arcpy.Describe(crate.destination) + shapeType = d.shapeType + if shapeType == 'Polygon': + shapeToken = 'SHAPE@AREA' + elif shapeType == 'Polyline': + shapeToken = 'SHAPE@LENGTH' + elif shapeType == 'Point': + shapeToken = 'SHAPE@XY' else: - outputSR = None - - # compare each feature based on sorting by OBJECTID - with arcpy.da.SearchCursor(f, fields, sql_clause=(None, 'ORDER BY OBJECTID')) as fCursor, \ - arcpy.da.SearchCursor(sde, fields, sql_clause=(None, 'ORDER BY OBJECTID'), - spatial_reference=outputSR) as sdeCursor: - for fRow, sdeRow in izip(fCursor, sdeCursor): - if fRow != sdeRow: - # check shapes first - if fRow[-1] != sdeRow[-1] and not is_table: - if shapeType not in ['Polygon', 'Polyline', 'Point']: - return True - fShape = parse_shape(fRow[-1]) - sdeShape = parse_shape(sdeRow[-1]) - try: - assert_almost_equal(fShape, sdeShape, -1) - # trim off shapes - fRow = list(fRow[:-1]) - sdeRow = list(sdeRow[:-1]) - except AssertionError: - return True - - # trim microseconds since they can be off by one between file and sde databases - for i in range(len(fRow)): - if type(fRow[i]) is datetime: - fRow = list(fRow) - sdeRow = list(sdeRow) - fRow[i] = fRow[i].replace(microsecond=0) - try: - sdeRow[i] = sdeRow[i].replace(microsecond=0) - except: - pass - - # compare all values except OBJECTID - if fRow[1:] != sdeRow[1:]: + shapeToken = 'SHAPE@JSON' + fields.append(shapeToken) + + def parse_shape(shapeValue): + if shapeValue is None: + return 0 + elif shapeType in ['Polygon', 'Polyline']: + return shapeValue + elif shapeType == 'Point': + if shapeValue[0] is not None and shapeValue[1] is not None: + return shapeValue[0] + shapeValue[1] + else: + return 0 + else: + return shapeValue + + # support for reprojecting + outputSR = arcpy.Describe(crate.destination).spatialReference + else: + outputSR = None + + # compare each feature based on sorting by OBJECTID + with arcpy.da.SearchCursor(crate.destination, fields, sql_clause=(None, 'ORDER BY OBJECTID')) as fCursor, \ + arcpy.da.SearchCursor(crate.source, fields, sql_clause=(None, 'ORDER BY OBJECTID'), + spatial_reference=outputSR) as sdeCursor: + for fRow, sdeRow in izip(fCursor, sdeCursor): + if fRow != sdeRow: + # check shapes first + if fRow[-1] != sdeRow[-1] and not is_table: + if shapeType not in ['Polygon', 'Polyline', 'Point']: + #: for complex types always return true for now + return True + fShape = parse_shape(fRow[-1]) + sdeShape = parse_shape(sdeRow[-1]) + try: + assert_almost_equal(fShape, sdeShape, -1) + # trim off shapes + fRow = list(fRow[:-1]) + sdeRow = list(sdeRow[:-1]) + except AssertionError: return True - return False + # trim microseconds since they can be off by one between file and sde databases + for i in range(len(fRow)): + if type(fRow[i]) is datetime: + fRow = list(fRow) + sdeRow = list(sdeRow) + fRow[i] = fRow[i].replace(microsecond=0) + try: + sdeRow[i] = sdeRow[i].replace(microsecond=0) + except: + pass + + # compare all values except OBJECTID + if fRow[1:] != sdeRow[1:]: + return True + + return False diff --git a/src/forklift/crate.py b/src/forklift/crate.py deleted file mode 100644 index 6e8ddfe7..00000000 --- a/src/forklift/crate.py +++ /dev/null @@ -1,23 +0,0 @@ -#!/usr/bin/env python -# * coding: utf8 * -''' -crate.py - -A module that defines a source and destination dataset that is a dependency of a pallet -''' - - -class Crate(object): - - def __init__(self, source_name, source, destination, destination_name=None): - #: the name of the source data table - self.source_name = source_name - - #: the name of the source database - self.source = source - - #: the name of the destination database - self.destination = destination - - #: the name of the output data table - self.destination_name = destination_name or source_name diff --git a/src/forklift/exceptions.py b/src/forklift/exceptions.py new file mode 100644 index 00000000..e8a91fe1 --- /dev/null +++ b/src/forklift/exceptions.py @@ -0,0 +1,11 @@ +#!/usr/bin/env python +# * coding: utf8 * +''' +exceptions.py + +Custom exceptions that are used in forklift +''' + + +class ValidationException(Exception): + pass diff --git a/src/forklift/lift.py b/src/forklift/lift.py index 2794da90..7d13be5d 100644 --- a/src/forklift/lift.py +++ b/src/forklift/lift.py @@ -3,145 +3,35 @@ ''' lift.py -A module that contains the implementation of the cli commands +A module that contains methods to handle pallets ''' -import logging -import settings -import sys -from pallet import Pallet -from glob import glob -from json import dumps, loads -from os.path import abspath, exists, join, splitext, basename -log = logging.getLogger(settings.LOGGER) +def process_crates_for(pallets, update_def): + ''' + pallets: Pallet[] + update_def: Function + Calls update_def on all crates (excluding duplicates) in pallets + ''' + processed_crates = {} -def init(): - if exists('config.json'): - return 'config file already created.' + for pallet in pallets: + for crate in pallet.get_crates(): + if crate.destination in processed_crates: + crate.set_result(processed_crates[crate.destination]) + else: + processed_crates[crate.destination] = crate.set_result(update_def(crate, pallet.validate_crate)) - default_pallet_locations = ['c:\\scheduled'] - log.debug('creating config.json file.') +def process_pallets(pallets): + reports = [] + for pallet in pallets: + if pallet.is_ready_to_ship(): #: checks for schema changes or errors + if pallet.requires_processing(): #: checks for data that was updated + pallet.process() + pallet.ship() - return _set_config_folders(default_pallet_locations) + reports.append(pallet.get_report()) - -def add_config_folder(folder): - folders = get_config_folders() - - if folder in folders: - return '{} is already in the config folders list!'.format(folder) - - try: - _validate_config_folder(folder, raises=True) - except Exception as e: - return e.message - - folders.append(folder) - - _set_config_folders(folders) - - return 'added {}'.format(folder) - - -def remove_pallet_folder(folder): - folders = get_config_folders() - - try: - folders.remove(folder) - except ValueError: - return '{} is not in the config folders list!'.format(folder) - - return _set_config_folders(folders) - - -def list_pallets(folders=None): - if folders is None: - folders = get_config_folders() - - return _get_pallets_in_folders(folders) - - -def list_config_folders(): - folders = get_config_folders() - - for folder in folders: - yield _validate_config_folder(folder) - - -def _set_config_folders(folders): - if type(folders) != list: - raise Exception('config file data must be a list.') - - with open('config.json', 'w') as json_data_file: - data = dumps(folders) - - log.debug('writing %s to %s', data, abspath(json_data_file.name)) - json_data_file.write(data) - - return abspath(json_data_file.name) - - -def get_config_folders(): - if not exists('config.json'): - raise Exception('config file not found.') - - with open('config.json', 'r') as json_data_file: - config = loads(json_data_file.read()) - - return config - - -def _validate_config_folder(folder, raises=False): - if exists(folder): - valid = 'valid' - else: - valid = 'invalid!' - if raises: - raise Exception('{}: {}'.format(folder, valid)) - - print('{}: {}'.format(folder, valid)) - - -def _get_pallets_in_folders(folders): - pallets = [] - - for folder in folders: - sys.path.append(folder) - - for py_file in glob(join(folder, '*.py')): - pallets.extend(_get_pallets_in_file(py_file)) - - return pallets - - -def _get_pallets_in_file(file_folder): - pallets = [] - name = splitext(basename(file_folder))[0] - mod = __import__(name) - - for member in dir(mod): - try: - potential_class = getattr(mod, member) - if issubclass(potential_class, Pallet) and potential_class != Pallet: - pallets.append((file_folder, member)) - except: - #: member was likely not a class - pass - - return pallets - - -def lift(file_path=None): - if file_path is not None: - pallet_infos = _get_pallets_in_file(file_path) - else: - pallet_infos = list_pallets() - - for info in pallet_infos: - palletClass = getattr(__import__(splitext(basename(info[0]))[0]), info[1]) - pallet = palletClass() - - pallet.process() + return reports diff --git a/src/forklift/models.py b/src/forklift/models.py new file mode 100644 index 00000000..50fd713c --- /dev/null +++ b/src/forklift/models.py @@ -0,0 +1,149 @@ +#!/usr/bin/env python +# * coding: utf8 * +''' +models.py + +A module that contains the model classes for forklift +''' + + +import logging +import settings +from os.path import join + + +class Pallet(object): + '''A module that contains the base class that should be inherited from when building new pallet classes. + + Pallets are plugins for the forklift main process. They define a list of crates and + any post processing that needs to happen. + ''' + + def __init__(self): + #: the logging module to keep track of the pallet + self.log = logging.getLogger(settings.LOGGER) + #: the table names for all dependent data for an application + self._crates = [] + + def process(self): + '''This method will be called by forklift if any of the crates data is modified + ''' + return NotImplemented + + def ship(self): + '''this method fires whether the crates have any updates or not + ''' + return NotImplemented + + def get_crates(self): + '''returns an array of crates affected by the pallet. This is a self documenting way to know what layers an + application is using. + + set `self.crates` in your child pallet. + ''' + + return self._crates + + def add_crates(self, crate_infos, defaults={}): + crate_param_names = ['source_name', 'source_workspace', 'destination_workspace', 'destination_name'] + + for info in crate_infos: + params = defaults.copy() + + #: info can be a table name here instead of a tuple + if isinstance(info, basestring): + params['source_name'] = info + else: + for i, val in enumerate(info): + params[crate_param_names[i]] = val + + self._crates.append(Crate(**params)) + + def add_crate(self, crate_info): + self.add_crates([crate_info]) + + def validate_crate(self, crate): + '''override to provide your own validation to determine whether the data within + a create is ready to be updated + + this method should return a boolean indicating if the crate is ready for an update + + if this method is not overriden then the default validate within core is used + ''' + return NotImplemented + + def is_ready_to_ship(self): + '''checks to see if there are any schema changes or errors within the crates + associated with this pallet + + returns: Boolean + Returns True if there are no crates defined + ''' + for crate in self._crates: + if crate.result in [Crate.INVALID_DATA, Crate.UNHANDLED_EXCEPTION]: + return False + + return True + + def requires_processing(self): + '''checks to see if any of the crates were updated + + returns: Boolean + Returns False if there are no crates defined + ''' + + has_updated = False + for crate in self._crates: + if crate.result in [Crate.INVALID_DATA, Crate.UNHANDLED_EXCEPTION]: + return False + if not has_updated: + has_updated = crate.result == Crate.UPDATED + + return has_updated + + def get_report(self): + '''returns a message about the result of each crate in the plugin''' + return ['{}: {}'.format(c.destination, c.result) for c in self.get_crates()] + + +class Crate(object): + '''A module that defines a source and destination dataset that is a dependency of a pallet + ''' + + #: possible results returned from core.update_crate + CREATED = 'Created table successfully.' + UPDATED = 'Data updated successfully.' + INVALID_DATA = 'Data is invalid.' + NO_CHANGES = 'No changes found.' + UNHANDLED_EXCEPTION = 'Unhandled exception during update.' + UNINITIALIZED = 'This crate was never processed.' + + def __init__(self, + source_name, + source_workspace, + destination_workspace, + destination_name=None, + destination_coordinate_system=None, + geographic_transformation=None): + #: the name of the source data table + self.source_name = source_name + #: the name of the source database + self.source_workspace = source_workspace + #: the name of the destination database + self.destination_workspace = destination_workspace + #: the name of the output data table + self.destination_name = destination_name or source_name + #: the result of the core.update method being called on this crate + self.result = self.UNINITIALIZED + #: optional definition of destination coordinate system to support reprojecting + self.destination_coordinate_system = destination_coordinate_system + #: optional geographic transformation to support reprojecting + self.geographic_transformation = geographic_transformation + + self.source = join(source_workspace, source_name) + self.destination = join(destination_workspace, self.destination_name) + + def set_result(self, value): + self.result = value + + return value diff --git a/src/forklift/pallet.py b/src/forklift/pallet.py deleted file mode 100644 index 558bafe5..00000000 --- a/src/forklift/pallet.py +++ /dev/null @@ -1,55 +0,0 @@ -#!/usr/bin/env python -# * coding: utf8 * -''' -pallet.py - -A module that contains the base class that should be inherited from when building new pallet classes. - -Pallets are plugins for the forklift main process. They define a list of crates and -any post processing that needs to happen. -''' - -import logging -import settings -from crate import Crate - - -class Pallet(object): - - def __init__(self): - #: the table names for all dependent data for an application - self.crates = [] - #: the logging module to keep track of the pallet - self.log = logging.getLogger(settings.LOGGER) - - def process(self): - '''This method will be called by forklift if any of the crates data is modified - ''' - pass - - def get_crates(self): - '''returns an array of crates affected by the pallet. This is a self documenting way to know what layers an - application is using. - - set `self.crates` in your child pallet. - ''' - - return self.crates - - def add_crates(self, crate_infos, defaults={}): - crate_param_names = ['source_name', 'source', 'destination', 'destination_name'] - - for info in crate_infos: - params = defaults.copy() - - #: info can be a table name here instead of a tuple - if isinstance(info, basestring): - params['source_name'] = info - else: - for i, val in enumerate(info): - params[crate_param_names[i]] = val - - self.crates.append(Crate(**params)) - - def add_crate(self, crate_info): - self.add_crates([crate_info]) diff --git a/tests/data/UPDATE_TESTS.bak b/tests/data/UPDATE_TESTS.bak index 9c052a0b..98b93fc0 100755 Binary files a/tests/data/UPDATE_TESTS.bak and b/tests/data/UPDATE_TESTS.bak differ diff --git a/tests/data/checkForChanges.gdb/a00000001.TablesByName.atx b/tests/data/checkForChanges.gdb/a00000001.TablesByName.atx index 263d0bd9..4b656786 100644 Binary files a/tests/data/checkForChanges.gdb/a00000001.TablesByName.atx and b/tests/data/checkForChanges.gdb/a00000001.TablesByName.atx differ diff --git a/tests/data/checkForChanges.gdb/a00000001.freelist b/tests/data/checkForChanges.gdb/a00000001.freelist index 534ba808..680ed47e 100644 Binary files a/tests/data/checkForChanges.gdb/a00000001.freelist and b/tests/data/checkForChanges.gdb/a00000001.freelist differ diff --git a/tests/data/checkForChanges.gdb/a00000001.gdbtable b/tests/data/checkForChanges.gdb/a00000001.gdbtable index 851e26f8..ee5736cf 100644 Binary files a/tests/data/checkForChanges.gdb/a00000001.gdbtable and b/tests/data/checkForChanges.gdb/a00000001.gdbtable differ diff --git a/tests/data/checkForChanges.gdb/a00000001.gdbtablx b/tests/data/checkForChanges.gdb/a00000001.gdbtablx index 44ff0eab..a3c9c64a 100644 Binary files a/tests/data/checkForChanges.gdb/a00000001.gdbtablx and b/tests/data/checkForChanges.gdb/a00000001.gdbtablx differ diff --git a/tests/data/checkForChanges.gdb/a00000004.CatItemsByPhysicalName.atx b/tests/data/checkForChanges.gdb/a00000004.CatItemsByPhysicalName.atx index 63ef4202..ba61e3f9 100644 Binary files a/tests/data/checkForChanges.gdb/a00000004.CatItemsByPhysicalName.atx and b/tests/data/checkForChanges.gdb/a00000004.CatItemsByPhysicalName.atx differ diff --git a/tests/data/checkForChanges.gdb/a00000004.CatItemsByType.atx b/tests/data/checkForChanges.gdb/a00000004.CatItemsByType.atx index 76808bda..c3ee2134 100644 Binary files a/tests/data/checkForChanges.gdb/a00000004.CatItemsByType.atx and b/tests/data/checkForChanges.gdb/a00000004.CatItemsByType.atx differ diff --git a/tests/data/checkForChanges.gdb/a00000004.FDO_UUID.atx b/tests/data/checkForChanges.gdb/a00000004.FDO_UUID.atx index 2f560e4c..70d4eadf 100644 Binary files a/tests/data/checkForChanges.gdb/a00000004.FDO_UUID.atx and b/tests/data/checkForChanges.gdb/a00000004.FDO_UUID.atx differ diff --git a/tests/data/checkForChanges.gdb/a00000004.freelist b/tests/data/checkForChanges.gdb/a00000004.freelist index b59a3789..a3995416 100644 Binary files a/tests/data/checkForChanges.gdb/a00000004.freelist and b/tests/data/checkForChanges.gdb/a00000004.freelist differ diff --git a/tests/data/checkForChanges.gdb/a00000004.gdbtable b/tests/data/checkForChanges.gdb/a00000004.gdbtable index 6c106daa..824fa372 100644 Binary files a/tests/data/checkForChanges.gdb/a00000004.gdbtable and b/tests/data/checkForChanges.gdb/a00000004.gdbtable differ diff --git a/tests/data/checkForChanges.gdb/a00000004.gdbtablx b/tests/data/checkForChanges.gdb/a00000004.gdbtablx index 1c347157..c883dd40 100644 Binary files a/tests/data/checkForChanges.gdb/a00000004.gdbtablx and b/tests/data/checkForChanges.gdb/a00000004.gdbtablx differ diff --git a/tests/data/checkForChanges.gdb/a00000006.CatRelsByDestinationID.atx b/tests/data/checkForChanges.gdb/a00000006.CatRelsByDestinationID.atx index e55cd1c3..aa7d8e56 100644 Binary files a/tests/data/checkForChanges.gdb/a00000006.CatRelsByDestinationID.atx and b/tests/data/checkForChanges.gdb/a00000006.CatRelsByDestinationID.atx differ diff --git a/tests/data/checkForChanges.gdb/a00000006.CatRelsByOriginID.atx b/tests/data/checkForChanges.gdb/a00000006.CatRelsByOriginID.atx index 52c14ad8..4d021eec 100644 Binary files a/tests/data/checkForChanges.gdb/a00000006.CatRelsByOriginID.atx and b/tests/data/checkForChanges.gdb/a00000006.CatRelsByOriginID.atx differ diff --git a/tests/data/checkForChanges.gdb/a00000006.CatRelsByType.atx b/tests/data/checkForChanges.gdb/a00000006.CatRelsByType.atx index 65adcf34..cd02d239 100644 Binary files a/tests/data/checkForChanges.gdb/a00000006.CatRelsByType.atx and b/tests/data/checkForChanges.gdb/a00000006.CatRelsByType.atx differ diff --git a/tests/data/checkForChanges.gdb/a00000006.FDO_UUID.atx b/tests/data/checkForChanges.gdb/a00000006.FDO_UUID.atx index d18c3321..60e817e2 100644 Binary files a/tests/data/checkForChanges.gdb/a00000006.FDO_UUID.atx and b/tests/data/checkForChanges.gdb/a00000006.FDO_UUID.atx differ diff --git a/tests/data/checkForChanges.gdb/a00000006.freelist b/tests/data/checkForChanges.gdb/a00000006.freelist index 927dad38..ef08d147 100644 Binary files a/tests/data/checkForChanges.gdb/a00000006.freelist and b/tests/data/checkForChanges.gdb/a00000006.freelist differ diff --git a/tests/data/checkForChanges.gdb/a00000006.gdbtable b/tests/data/checkForChanges.gdb/a00000006.gdbtable index 7d16e37d..7eff7946 100644 Binary files a/tests/data/checkForChanges.gdb/a00000006.gdbtable and b/tests/data/checkForChanges.gdb/a00000006.gdbtable differ diff --git a/tests/data/checkForChanges.gdb/a00000006.gdbtablx b/tests/data/checkForChanges.gdb/a00000006.gdbtablx index b85cc8bc..d5c55a16 100644 Binary files a/tests/data/checkForChanges.gdb/a00000006.gdbtablx and b/tests/data/checkForChanges.gdb/a00000006.gdbtablx differ diff --git a/tests/data/checkForChanges.gdb/a00000019.freelist b/tests/data/checkForChanges.gdb/a00000019.freelist new file mode 100755 index 00000000..ad1639a3 Binary files /dev/null and b/tests/data/checkForChanges.gdb/a00000019.freelist differ diff --git a/tests/data/checkForChanges.gdb/a00000019.gdbtable b/tests/data/checkForChanges.gdb/a00000019.gdbtable index 9674d40d..401890c9 100644 Binary files a/tests/data/checkForChanges.gdb/a00000019.gdbtable and b/tests/data/checkForChanges.gdb/a00000019.gdbtable differ diff --git a/tests/data/checkForChanges.gdb/a00000019.gdbtablx b/tests/data/checkForChanges.gdb/a00000019.gdbtablx index ddb22c75..8957e5b3 100644 Binary files a/tests/data/checkForChanges.gdb/a00000019.gdbtablx and b/tests/data/checkForChanges.gdb/a00000019.gdbtablx differ diff --git a/tests/data/checkForChanges.gdb/a00000019.spx b/tests/data/checkForChanges.gdb/a00000019.spx index 08438655..d9da4ef5 100644 Binary files a/tests/data/checkForChanges.gdb/a00000019.spx and b/tests/data/checkForChanges.gdb/a00000019.spx differ diff --git a/tests/data/checkForChanges.gdb/a0000001b.G23Code.atx b/tests/data/checkForChanges.gdb/a0000001b.G23Code.atx index 684ec1b4..fd0c3ec2 100644 Binary files a/tests/data/checkForChanges.gdb/a0000001b.G23Code.atx and b/tests/data/checkForChanges.gdb/a0000001b.G23Code.atx differ diff --git a/tests/data/checkForChanges.gdb/a0000001b.freelist b/tests/data/checkForChanges.gdb/a0000001b.freelist new file mode 100755 index 00000000..3fa9d07a Binary files /dev/null and b/tests/data/checkForChanges.gdb/a0000001b.freelist differ diff --git a/tests/data/checkForChanges.gdb/a0000001b.gdbtable b/tests/data/checkForChanges.gdb/a0000001b.gdbtable index e3273d59..4ac6fca8 100644 Binary files a/tests/data/checkForChanges.gdb/a0000001b.gdbtable and b/tests/data/checkForChanges.gdb/a0000001b.gdbtable differ diff --git a/tests/data/checkForChanges.gdb/a0000001b.gdbtablx b/tests/data/checkForChanges.gdb/a0000001b.gdbtablx index 0ff94741..5ddbcc91 100644 Binary files a/tests/data/checkForChanges.gdb/a0000001b.gdbtablx and b/tests/data/checkForChanges.gdb/a0000001b.gdbtablx differ diff --git a/tests/data/checkForChanges.gdb/a0000002a.gdbindexes b/tests/data/checkForChanges.gdb/a0000002a.gdbindexes new file mode 100755 index 00000000..cc24e2a0 Binary files /dev/null and b/tests/data/checkForChanges.gdb/a0000002a.gdbindexes differ diff --git a/tests/data/checkForChanges.gdb/a0000002a.gdbtable b/tests/data/checkForChanges.gdb/a0000002a.gdbtable new file mode 100755 index 00000000..aac2d579 Binary files /dev/null and b/tests/data/checkForChanges.gdb/a0000002a.gdbtable differ diff --git a/tests/data/checkForChanges.gdb/a0000002a.gdbtablx b/tests/data/checkForChanges.gdb/a0000002a.gdbtablx new file mode 100755 index 00000000..6f071b9e Binary files /dev/null and b/tests/data/checkForChanges.gdb/a0000002a.gdbtablx differ diff --git a/tests/data/checkForChanges.gdb/a0000002a.spx b/tests/data/checkForChanges.gdb/a0000002a.spx new file mode 100755 index 00000000..6d236bde Binary files /dev/null and b/tests/data/checkForChanges.gdb/a0000002a.spx differ diff --git a/tests/data/checkForChanges.gdb/a0000002b.gdbindexes b/tests/data/checkForChanges.gdb/a0000002b.gdbindexes new file mode 100755 index 00000000..cc24e2a0 Binary files /dev/null and b/tests/data/checkForChanges.gdb/a0000002b.gdbindexes differ diff --git a/tests/data/checkForChanges.gdb/a0000002b.gdbtable b/tests/data/checkForChanges.gdb/a0000002b.gdbtable new file mode 100755 index 00000000..fcf43cfb Binary files /dev/null and b/tests/data/checkForChanges.gdb/a0000002b.gdbtable differ diff --git a/tests/data/checkForChanges.gdb/a0000002b.gdbtablx b/tests/data/checkForChanges.gdb/a0000002b.gdbtablx new file mode 100755 index 00000000..1ced149f Binary files /dev/null and b/tests/data/checkForChanges.gdb/a0000002b.gdbtablx differ diff --git a/tests/data/checkForChanges.gdb/a0000002b.spx b/tests/data/checkForChanges.gdb/a0000002b.spx new file mode 100755 index 00000000..92effcdf Binary files /dev/null and b/tests/data/checkForChanges.gdb/a0000002b.spx differ diff --git a/tests/data/checkForChanges.gdb/timestamps b/tests/data/checkForChanges.gdb/timestamps index 254035ed..5a0295b5 100644 Binary files a/tests/data/checkForChanges.gdb/timestamps and b/tests/data/checkForChanges.gdb/timestamps differ diff --git a/tests/data/list_pallets/multiple_pallets.py b/tests/data/list_pallets/multiple_pallets.py index c148d9ee..99d81228 100644 --- a/tests/data/list_pallets/multiple_pallets.py +++ b/tests/data/list_pallets/multiple_pallets.py @@ -6,31 +6,26 @@ A module that contains pallets to be used in test_lift.py tests ''' -from forklift.pallet import Pallet +from forklift.models import Pallet class PalletOne(Pallet): def __init__(self): super(PalletOne, self).__init__() - self.expires_in_hours = 1 - - self.set_default_source_location() self.add_crates(['fc1', 'fc2', ('fc3', 'source', 'destination'), ('fc4', 'source', 'destination', 'fc4_new')], - {'source': 'C:\\MapData\\UDNR.sde', - 'destination': 'C:\\MapData\\UDNR.gdb'}) + {'source_workspace': 'C:\\MapData\\UDNR.sde', + 'destination_workspace': 'C:\\MapData\\UDNR.gdb'}) class PalletTwo(Pallet): def __init__(self): - super(PalletTwo, self).__init() - self.expires_in_hours = 2 - self.dependencies = ['c', 'd'] + super(PalletTwo, self).__init__() - def execute(self): + def ship(self): print('execute: overridden') diff --git a/tests/data/list_pallets/single_pallet.py b/tests/data/list_pallets/single_pallet.py index d0756c29..bb89b466 100644 --- a/tests/data/list_pallets/single_pallet.py +++ b/tests/data/list_pallets/single_pallet.py @@ -6,11 +6,10 @@ A module that contains pallets to be used in test_lift.py tests ''' -from forklift.pallet import Pallet +from forklift.models import Pallet class SinglePallet(Pallet): def __init__(self): super(SinglePallet, self).__init__() - self.dependencies = ['a', 'b'] diff --git a/tests/test_cli.py b/tests/test_cli.py new file mode 100644 index 00000000..8a1e5730 --- /dev/null +++ b/tests/test_cli.py @@ -0,0 +1,138 @@ +#!/usr/bin/env python +# * coding: utf8 * +''' +test_lift.py + +A module that contains tests for the cli.py module +''' + +import unittest +from forklift import cli +from json import loads +from os import remove +from os.path import abspath, dirname, join, exists +from mock import patch + + +test_data_folder = join(dirname(abspath(__file__)), 'data') +test_pallets_folder = join(test_data_folder, 'list_pallets') + + +class TestCli(unittest.TestCase): + + def setUp(self): + if exists('config.json'): + remove('config.json') + + def tearDown(self): + if exists('config.json'): + remove('config.json') + + def test_init_creates_config_file(self): + path = cli.init() + + self.assertTrue(exists(path)) + + with open(path) as config: + self.assertEquals(['c:\\scheduled'], loads(config.read())) + + def test_init_returns_if_existing_config_file(self): + cli._set_config_folders(['blah']) + + self.assertEquals(cli.init(), 'config file already created.') + + def test_list_pallets(self): + test_pallets_folder = join(test_data_folder, 'list_pallets') + pallets = cli.list_pallets(folders=[test_pallets_folder]) + + self.assertEquals(len(pallets), 3) + self.assertEquals(pallets[0][0], join(test_pallets_folder, 'multiple_pallets.py')) + self.assertEquals(pallets[0][1], 'PalletOne') + + def test_list_config_folders(self): + cli._set_config_folders(['blah', 'blah2']) + + result = cli.list_config_folders() + + self.assertEquals(result, ['blah: invalid!', 'blah2: invalid!']) + + def get_config_folders(self): + folders = ['blah', 'blah2'] + cli.init() + cli._set_config_folders(folders) + + self.assertEquals(cli.get_config_folders, folders) + + def get_config_folders_checks_for_existing_config_file(self): + self.assertRaises(Exception('config file not found.'), cli.get_config_folders) + + def test_list_pallets_from_config(self): + cli.init() + cli.add_config_folder(test_pallets_folder) + pallets = cli.list_pallets() + + self.assertEquals(len(pallets), 3) + self.assertEquals(pallets[0][0], join(test_pallets_folder, 'multiple_pallets.py')) + self.assertEquals(pallets[0][1], 'PalletOne') + + def test_set_config_paths_requires_list(self): + self.assertRaises(Exception, cli._set_config_folders, 'hello') + + def test_add_config_folder(self): + path = cli.init() + + cli.add_config_folder(abspath('tests\data')) + + with open(path) as config: + self.assertEquals(['c:\\scheduled', abspath('tests\data')], loads(config.read())) + + def test_add_config_folder_invalid(self): + cli.init() + + result = cli.add_config_folder('bad folder') + + self.assertIn('invalid!', result) + + def test_add_config_folder_checks_for_duplicates(self): + path = cli.init() + + cli.add_config_folder(abspath('tests\data')) + cli.add_config_folder(abspath('tests\data')) + + with open(path) as config: + self.assertEquals(['c:\\scheduled', abspath('tests\data')], loads(config.read())) + + def test_remove_config_folder(self): + path = cli.init() + test_config_path = join(test_data_folder, 'remove_test_config.json') + + with open(path, 'w') as json_data_file, open(test_config_path) as test_config_file: + json_data_file.write(test_config_file.read()) + + cli.remove_config_folder('path/one') + + with open(path) as test_config_file: + self.assertEquals(['path/two'], loads(test_config_file.read())) + + def test_remove_config_folder_checks_for_existing(self): + cli.init() + + self.assertEquals('{} is not in the config folders list!'.format('blah'), cli.remove_config_folder('blah')) + + +@patch('forklift.lift.process_crates_for') +@patch('forklift.lift.process_pallets') +class TestCliStartLift(unittest.TestCase): + + def test_lift_with_path(self, process_pallets, process_crates_for): + cli.start_lift(join(test_pallets_folder, 'multiple_pallets.py')) + + self.assertEqual(len(process_crates_for.call_args[0][0]), 2) + self.assertEqual(len(process_pallets.call_args[0][0]), 2) + + def test_lift_with_out_path(self, process_pallets, process_crates_for): + cli._set_config_folders([test_pallets_folder]) + cli.start_lift() + + self.assertEqual(len(process_crates_for.call_args[0][0]), 3) + self.assertEqual(len(process_pallets.call_args[0][0]), 3) diff --git a/tests/test_core.py b/tests/test_core.py index 2b8dcf7f..33cd7a1a 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -8,94 +8,195 @@ import arcpy import unittest -from forklift.core import Core +from forklift import core +from forklift.models import Crate +from forklift.exceptions import ValidationException from os import path from nose import SkipTest +from mock import Mock, patch -class CoreTests(unittest.TestCase): +current_folder = path.dirname(path.abspath(__file__)) +check_for_changes_gdb = path.join(current_folder, 'data', 'checkForChanges.gdb') +check_for_changes_gdb2 = path.join(current_folder, 'data', 'checkForChanges2.gdb') +update_tests_sde = path.join(current_folder, 'data', 'UPDATE_TESTS.sde') +test_gdb = path.join(current_folder, 'data', 'test.gdb') - current_folder = path.dirname(path.abspath(__file__)) - check_for_changes_gdb = path.join(current_folder, 'data', 'checkForChanges.gdb') - check_for_changes_gdb2 = path.join(current_folder, 'data', 'checkForChanges2.gdb') - update_tests_sde = path.join(current_folder, 'data', 'UPDATE_TESTS.sde') - test_gdb = path.join(current_folder, 'data', 'test.gdb') - def setUp(self): - self.patient = Core() +def raise_validation_exception(crate): + raise ValidationException() - if arcpy.Exists(self.test_gdb): - arcpy.Delete_management(self.test_gdb) + +class CoreTests(unittest.TestCase): + + def setUp(self): + if arcpy.Exists(test_gdb): + arcpy.Delete_management(test_gdb) def tearDown(self): - if arcpy.Exists(self.test_gdb): - arcpy.Delete_management(self.test_gdb) + if arcpy.Exists(test_gdb): + arcpy.Delete_management(test_gdb) def check_for_local_sde(self): - if not arcpy.Exists(path.join(self.update_tests_sde, 'ZipCodes')): + if not arcpy.Exists(path.join(update_tests_sde, 'ZipCodes')): raise SkipTest('No test SDE dectected, skipping test') def run_check_for_changes(self, fc1, fc2): - f1 = path.join(self.check_for_changes_gdb, fc1) - f2 = path.join(self.check_for_changes_gdb, fc2) + return core._check_for_changes(Crate(fc1, check_for_changes_gdb, check_for_changes_gdb, fc2)) + + def test_update_no_existing_destination(self): + core._create_destination_data = Mock() + + crate = Crate('badname', 'nofolder', '') + + self.assertEquals(core.update(crate, lambda x: True), Crate.CREATED) + core._create_destination_data.assert_called_once() + + @patch('arcpy.Exists') + def test_update_custom_validation_that_fails(self, arcpy_exists): + arcpy_exists.return_value = True + + crate = Crate('', '', '') + + self.assertEquals(core.update(crate, raise_validation_exception)[0], Crate.INVALID_DATA) + + @patch('arcpy.Exists') + def test_update_default_validation_that_fails(self, arcpy_exists): + arcpy_exists.return_value = True + core._check_schema = Mock(side_effect=ValidationException()) + + def custom(crate): + return NotImplemented - return self.patient.check_for_changes(f1, f2, False) + crate = Crate('', '', '') + + self.assertEquals(core.update(crate, custom)[0], Crate.INVALID_DATA) + + @patch('arcpy.Exists') + def test_update_successfully_updated(self, arcpy_exists): + arcpy_exists.return_value = True + core._check_for_changes = Mock(return_value=True) + core._move_data = Mock() + + crate = Crate('', '', '') + + self.assertEquals(core.update(crate, lambda c: True), Crate.UPDATED) + + @patch('arcpy.Exists') + def test_update_no_changes(self, arcpy_exists): + arcpy_exists.return_value = True + core._check_for_changes = Mock(return_value=False) + + crate = Crate('', '', '') + + self.assertEquals(core.update(crate, lambda c: True), Crate.NO_CHANGES) + + @patch('arcpy.Exists') + def test_update_error(self, arcpy_exists): + arcpy_exists.return_value = True + core._check_for_changes = Mock(side_effect=Exception('error')) + + crate = Crate('', '', '') + + self.assertEquals(core.update(crate, lambda c: True), (Crate.UNHANDLED_EXCEPTION, 'error')) def test_check_for_changes(self): self.assertFalse(self.run_check_for_changes('ZipCodes', 'ZipCodes_same')) self.assertTrue(self.run_check_for_changes('ZipCodes', 'ZipCodes_geoMod')) self.assertTrue(self.run_check_for_changes('ZipCodes', 'ZipCodes_attMod')) self.assertTrue(self.run_check_for_changes('ZipCodes', 'ZipCodes_newFeature')) + self.assertFalse(self.run_check_for_changes('DNROilGasWells', 'DNROilGasWells')) + self.assertFalse(self.run_check_for_changes('Line', 'Line')) + self.assertFalse(self.run_check_for_changes('NullShape', 'NullShape')) + self.assertFalse(self.run_check_for_changes('Providers', 'Providers')) + self.assertTrue(self.run_check_for_changes('NullDates', 'NullDates2')) def test_check_for_changes_null_date_fields(self): self.assertTrue(self.run_check_for_changes('NullDates', 'NullDates2')) def test_filter_shape_fields(self): - self.assertEquals(self.patient._filter_fields(['shape', 'test', 'Shape_length', 'Global_ID']), ['test']) + self.assertEquals( + core._filter_fields(['shape', 'test', 'Shape_length', 'Global_ID']), ['test']) def test_schema_changes(self): - arcpy.Copy_management(self.check_for_changes_gdb, self.test_gdb) + arcpy.Copy_management(check_for_changes_gdb, test_gdb) - result = self.patient.check_schema(path.join(self.test_gdb, 'ZipCodes'), - path.join(self.check_for_changes_gdb, 'FieldLength')) + result = core._check_schema( + path.join(test_gdb, 'ZipCodes'), + path.join(check_for_changes_gdb, 'FieldLength')) self.assertEquals(result, False) - result = self.patient.check_schema(path.join(self.test_gdb, 'ZipCodes'), - path.join(self.check_for_changes_gdb, 'ZipCodes')) + result = core._check_schema( + path.join(test_gdb, 'ZipCodes'), + path.join(check_for_changes_gdb, 'ZipCodes')) self.assertEquals(result, True) - def test_schema_changes_ignore_length_for_all_except_text(self): + def test_check_schema_ignore_length_for_all_except_text(self): self.check_for_local_sde() # only worry about length on text fields - result = self.patient.check_schema(path.join(self.update_tests_sde, - r'UPDATE_TESTS.DBO.Hello\UPDATE_TESTS.DBO.DNROilGasWells'), - path.join(self.check_for_changes_gdb, 'DNROilGasWells')) + result = core._check_schema( + path.join(update_tests_sde, r'UPDATE_TESTS.DBO.Hello\UPDATE_TESTS.DBO.DNROilGasWells'), + path.join(check_for_changes_gdb, 'DNROilGasWells')) self.assertEquals(result, True) - def test_no_updates(self): + def test_move_data_table(self): self.check_for_local_sde() - arcpy.Copy_management(self.check_for_changes_gdb2, self.test_gdb) + arcpy.Copy_management(check_for_changes_gdb, test_gdb) - changes = self.patient.update_fgdb_from_sde(self.test_gdb, self.update_tests_sde) + crate = Crate('Providers', update_tests_sde, test_gdb) #: table + core._move_data(crate) - self.assertEquals(len(changes), 0) + self.assertEquals(int(arcpy.GetCount_management(crate.destination).getOutput(0)), 57) - def test_updates(self): + def test_move_data_feature_class(self): self.check_for_local_sde() - arcpy.Copy_management(self.check_for_changes_gdb, self.test_gdb) + arcpy.Copy_management(check_for_changes_gdb, test_gdb) - changes = self.patient.update_fgdb_from_sde(self.test_gdb, self.update_tests_sde) + crate = Crate('DNROilGasWells', update_tests_sde, test_gdb) #: feature class + core._move_data(crate) - self.assertEquals(changes[1], 'PROVIDERS') # table - self.assertEquals(changes[0], 'DNROILGASWELLS') # within dataset + self.assertEquals(int(arcpy.GetCount_management(crate.destination).getOutput(0)), 5) def test_check_schema_match(self): - self.assertEquals(self.patient.check_schema(path.join(self.check_for_changes_gdb, 'FieldLength'), - path.join(self.check_for_changes_gdb, 'FieldLength2')), False) - - self.assertEquals(self.patient.check_schema(path.join(self.check_for_changes_gdb, 'FieldType'), - path.join(self.check_for_changes_gdb, 'FieldType2')), False) - - self.assertEquals(self.patient.check_schema(path.join(self.check_for_changes_gdb, 'ZipCodes'), - path.join(self.check_for_changes_gdb2, 'ZipCodes')), True) + self.assertEquals( + core._check_schema( + path.join(check_for_changes_gdb, 'FieldLength'), + path.join(check_for_changes_gdb, 'FieldLength2')), False) + + self.assertEquals( + core._check_schema( + path.join(check_for_changes_gdb, 'FieldType'), + path.join(check_for_changes_gdb, 'FieldType2')), False) + + self.assertEquals( + core._check_schema( + path.join(check_for_changes_gdb, 'ZipCodes'), + path.join(check_for_changes_gdb2, 'ZipCodes')), True) + + def test_create_destination_data_feature_class(self): + arcpy.CreateFileGDB_management(path.join(current_folder, 'data'), 'test.gdb') + + fc_crate = Crate('DNROilGasWells', check_for_changes_gdb, test_gdb) + core._create_destination_data(fc_crate) + self.assertTrue(arcpy.Exists(fc_crate.destination)) + + def test_create_destination_data_table(self): + arcpy.CreateFileGDB_management(path.join(current_folder, 'data'), 'test.gdb') + + tbl_crate = Crate('Providers', check_for_changes_gdb, test_gdb) + core._create_destination_data(tbl_crate) + self.assertTrue(arcpy.Exists(tbl_crate.destination)) + + def test_create_destination_data_reproject(self): + arcpy.CreateFileGDB_management(path.join(current_folder, 'data'), 'test.gdb') + + spatial_reference = arcpy.SpatialReference(3857) + fc_crate = Crate('DNROilGasWells', + check_for_changes_gdb, + test_gdb, + destination_coordinate_system=spatial_reference, + geographic_transformation='NAD_1983_To_WGS_1984_5') + core._create_destination_data(fc_crate) + self.assertTrue(arcpy.Exists(fc_crate.destination)) + self.assertEquals(arcpy.Describe(fc_crate.destination).spatialReference.name, spatial_reference.name) diff --git a/tests/test_crate.py b/tests/test_crate.py index da5151b4..f7710f00 100644 --- a/tests/test_crate.py +++ b/tests/test_crate.py @@ -7,15 +7,15 @@ ''' import unittest -from forklift.crate import Crate +from forklift.models import Crate class TestCrate(unittest.TestCase): def test_pass_all_values(self): crate = Crate('sourceName', 'blah', 'hello', 'blur') self.assertEquals(crate.source_name, 'sourceName') - self.assertEquals(crate.source, 'blah') - self.assertEquals(crate.destination, 'hello') + self.assertEquals(crate.source_workspace, 'blah') + self.assertEquals(crate.destination_workspace, 'hello') self.assertEquals(crate.destination_name, 'blur') def test_destination_name_defaults_to_source(self): diff --git a/tests/test_lift.py b/tests/test_lift.py index 7ffdc9c8..e4688aca 100644 --- a/tests/test_lift.py +++ b/tests/test_lift.py @@ -1,76 +1,89 @@ #!/usr/bin/env python # * coding: utf8 * ''' -test_lift.py +test_forklift.py -A module that contains tests for the lift.py module +A module for testing lift.py ''' import unittest from forklift import lift -from json import loads -from os import remove -from os.path import abspath, dirname, join, exists +from forklift.models import Pallet, Crate +from mock import Mock class TestLift(unittest.TestCase): - test_data_folder = join(dirname(abspath(__file__)), 'data') - def setUp(self): - if exists('config.json'): - remove('config.json') + self.PalletMock = Mock(Pallet) - def tearDown(self): - if exists('config.json'): - remove('config.json') + def test_process_crate_for_set_results(self): + crate1 = Crate('', '', 'a', '') + crate2 = Crate('', '', 'b', '') + pallet = Pallet() + pallet._crates = [crate1, crate2] + update_def = Mock(return_value=Crate.UPDATED) + lift.process_crates_for([pallet], update_def) - def test_init_creates_config_file(self): - path = lift.init() + self.assertEquals(update_def.call_count, 2) + self.assertEquals(crate1.result, Crate.UPDATED) + self.assertEquals(crate2.result, Crate.UPDATED) - self.assertTrue(exists(path)) + def test_process_crate_doesnt_call_update_def_on_duplicate_crates(self): + crate1 = Crate('', '', 'a', '') + crate2 = Crate('', '', 'a', '') + pallet = Pallet() + pallet._crates = [crate1, crate2] + update_def = Mock(return_value=Crate.UPDATED) + lift.process_crates_for([pallet], update_def) - with open(path) as config: - self.assertEquals(['c:\\scheduled'], loads(config.read())) + self.assertEquals(update_def.call_count, 1) + self.assertEquals(crate1.result, Crate.UPDATED) + self.assertEquals(crate2.result, Crate.UPDATED) - def test_list_pallets(self): - test_pallets_folder = join(self.test_data_folder, 'list_pallets') - pallets = lift.list_pallets(paths=[test_pallets_folder]) + def test_process_pallets_all_ready_to_ship(self): + ready_pallet = self.PalletMock() + ready_pallet.is_ready_to_ship.return_value = True - self.assertEquals(len(pallets), 3) - self.assertEquals(pallets[0][0], join(test_pallets_folder, 'multiple_pallets.py')) - self.assertEquals(pallets[0][1], 'PalletOne') + lift.process_pallets([ready_pallet, ready_pallet]) - def test_set_config_paths_requires_list(self): - self.assertRaises(Exception, lift._set_config_paths, 'hello') + self.assertEquals(ready_pallet.ship.call_count, 2) - def test_add_pallet_folder(self): - path = lift.init() + def test_process_pallets_all_requires_processing(self): + requires_pallet = self.PalletMock() + requires_pallet.is_ready_to_ship.return_value = True + requires_pallet.requires_processing.return_value = True - lift.add_pallet_folder('another/folder') + lift.process_pallets([requires_pallet, requires_pallet]) - with open(path) as config: - self.assertEquals(['c:\\scheduled', 'another/folder'], loads(config.read())) + self.assertEquals(requires_pallet.process.call_count, 2) - def test_add_pallet_folder_checks_for_duplicates(self): - lift.init() + def test_process_pallets_mixed_bag(self): + pallet1 = Mock(Pallet)('one') + pallet1.is_ready_to_ship = Mock(return_value=True) + pallet1.requires_processing = Mock(return_value=False) - lift.add_pallet_folder('another/folder') - self.assertRaises(Exception, lift.add_pallet_folder, 'another/folder') + pallet2 = Mock(Pallet)('two') + pallet2.is_ready_to_ship = Mock(return_value=False) + pallet2.requires_processing = Mock(return_value=False) - def test_remove_pallet_folder(self): - path = lift.init() - test_config_path = join(self.test_data_folder, 'remove_test_config.json') + pallet3 = Mock(Pallet)('three') + pallet3.is_ready_to_ship = Mock(return_value=True) + pallet3.requires_processing = Mock(return_value=True) - with open(path, 'w') as json_data_file, open(test_config_path) as test_config_file: - json_data_file.write(test_config_file.read()) + lift.process_pallets([pallet1, pallet2, pallet3]) - lift.remove_pallet_folder('path/one') + pallet1.ship.assert_called_once() + pallet1.process.assert_not_called() + pallet2.ship.assert_not_called() + pallet2.process.assert_not_called() + pallet3.ship.assert_called_once() + pallet3.process.assert_called_once() - with open(path) as test_config_file: - self.assertEquals(['path/two'], loads(test_config_file.read())) + def test_process_pallets_returns_reports(self): + reports_pallet = self.PalletMock() + reports_pallet.get_report.return_value = 'hello' - def test_remove_pallet_folder_checks_for_existing(self): - lift.init() + result = lift.process_pallets([reports_pallet, reports_pallet]) - self.assertRaises(Exception, lift.remove_pallet_folder, 'blah') + self.assertEquals(result, ['hello', 'hello']) diff --git a/tests/test_pallet.py b/tests/test_pallet.py index 9f6758ef..d5bb2609 100644 --- a/tests/test_pallet.py +++ b/tests/test_pallet.py @@ -7,7 +7,7 @@ ''' import unittest -from forklift.pallet import Pallet +from forklift.models import Pallet, Crate class Pallet(Pallet): @@ -45,33 +45,173 @@ def test_add_crates(self): source = 'C:\\MapData\\UDNR.sde' dest = 'C:\\MapData\\UDNR.gdb' self.patient.add_crates( - ['fc1', ('fc3', 'source'), ('fc4', 'source', 'destination', 'fc4_new')], {'source': source, - 'destination': dest}) + ['fc1', ('fc3', 'source'), ('fc4', 'source', 'destination', 'fc4_new')], {'source_workspace': source, + 'destination_workspace': dest}) - self.assertEquals(len(self.patient.crates), 3) + self.assertEquals(len(self.patient.get_crates()), 3) #: single source_name with defaults - self.assertEquals(self.patient.crates[0].source_name, 'fc1') - self.assertEquals(self.patient.crates[0].source, source) - self.assertEquals(self.patient.crates[0].destination, dest) - self.assertEquals(self.patient.crates[0].destination_name, 'fc1') + self.assertEquals(self.patient.get_crates()[0].source_name, 'fc1') + self.assertEquals(self.patient.get_crates()[0].source_workspace, source) + self.assertEquals(self.patient.get_crates()[0].destination_workspace, dest) + self.assertEquals(self.patient.get_crates()[0].destination_name, 'fc1') - self.assertEquals(self.patient.crates[1].source, 'source') - self.assertEquals(self.patient.crates[1].destination, dest) + self.assertEquals(self.patient.get_crates()[1].source_workspace, 'source') + self.assertEquals(self.patient.get_crates()[1].destination_workspace, dest) - self.assertEquals(self.patient.crates[2].destination_name, 'fc4_new') + self.assertEquals(self.patient.get_crates()[2].destination_name, 'fc4_new') def test_add_crates_empty_defaults(self): self.patient.add_crates([('fc1', 'source1', 'destination1'), ('fc2', 'source2', 'destination2', 'fc2_new')]) - self.assertEquals(len(self.patient.crates), 2) + self.assertEquals(len(self.patient.get_crates()), 2) #: single source_name with defaults - self.assertEquals(self.patient.crates[0].source_name, 'fc1') - self.assertEquals(self.patient.crates[0].source, 'source1') - self.assertEquals(self.patient.crates[0].destination, 'destination1') - self.assertEquals(self.patient.crates[0].destination_name, 'fc1') - - self.assertEquals(self.patient.crates[1].source, 'source2') - self.assertEquals(self.patient.crates[1].destination, 'destination2') - self.assertEquals(self.patient.crates[1].destination_name, 'fc2_new') + self.assertEquals(self.patient.get_crates()[0].source_name, 'fc1') + self.assertEquals(self.patient.get_crates()[0].source_workspace, 'source1') + self.assertEquals(self.patient.get_crates()[0].destination_workspace, 'destination1') + self.assertEquals(self.patient.get_crates()[0].destination_name, 'fc1') + + self.assertEquals(self.patient.get_crates()[1].source_workspace, 'source2') + self.assertEquals(self.patient.get_crates()[1].destination_workspace, 'destination2') + self.assertEquals(self.patient.get_crates()[1].destination_name, 'fc2_new') + + def test_add_crate(self): + self.patient.add_crate(('fc1', 'source1', 'destination1')) + self.assertEquals(len(self.patient.get_crates()), 1) + + #: single source_name with defaults + self.assertEquals(self.patient.get_crates()[0].source_name, 'fc1') + self.assertEquals(self.patient.get_crates()[0].source_workspace, 'source1') + + def test_is_ready_to_ship_no_crates_returns_true(self): + self.assertTrue(self.patient.is_ready_to_ship()) + + def test_is_ready_to_ship_crates_with_updates_returns_true(self): + updated = Crate('', '', '', '') + updated.result = Crate.UPDATED + + self.patient._crates = [updated, updated, updated] + + self.assertTrue(self.patient.is_ready_to_ship()) + + def test_is_ready_to_ship_crates_with_no_changes_returns_true(self): + no_changes = Crate('', '', '', '') + no_changes.result = Crate.NO_CHANGES + + self.patient._crates = [no_changes, no_changes, no_changes] + + self.assertTrue(self.patient.is_ready_to_ship()) + + def test_is_ready_to_ship_crates_with_updates_and_no_changes_returns_true(self): + updated = Crate('', '', '', '') + updated.result = Crate.UPDATED + + no_changes = Crate('', '', '', '') + no_changes.result = Crate.NO_CHANGES + + self.patient._crates = [no_changes, updated, no_changes] + + self.assertTrue(self.patient.is_ready_to_ship()) + + def test_is_ready_to_ship_crates_with_any_schema_changed_returns_false(self): + updated = Crate('', '', '', '') + updated.result = Crate.UPDATED + + no_changes = Crate('', '', '', '') + no_changes.result = Crate.NO_CHANGES + + schema_change = Crate('', '', '', '') + schema_change.result = Crate.INVALID_DATA + + self.patient._crates = [updated, no_changes, schema_change] + + self.assertFalse(self.patient.is_ready_to_ship()) + + def test_is_ready_to_ship_crates_with_any_exception_returns_false(self): + updated = Crate('', '', '', '') + updated.result = Crate.UPDATED + + no_changes = Crate('', '', '', '') + no_changes.result = Crate.NO_CHANGES + + unhandled_exception = Crate('', '', '', '') + unhandled_exception.result = Crate.UNHANDLED_EXCEPTION + + self.patient._crates = [updated, no_changes, unhandled_exception] + + self.assertFalse(self.patient.is_ready_to_ship()) + + def test_is_ready_to_ship_crates_with_all_returns_false(self): + updated = Crate('', '', '', '') + updated.result = Crate.UPDATED + + no_changes = Crate('', '', '', '') + no_changes.result = Crate.NO_CHANGES + + schema_change = Crate('', '', '', '') + schema_change.result = Crate.INVALID_DATA + + unhandled_exception = Crate('', '', '', '') + unhandled_exception.result = Crate.UNHANDLED_EXCEPTION + + self.patient._crates = [updated, no_changes, unhandled_exception, schema_change] + + self.assertFalse(self.patient.is_ready_to_ship()) + + def test_requires_processing_with_no_crates_returns_false(self): + self.assertFalse(self.patient.requires_processing()) + + def test_requires_processing_crates_with_updates_returns_true(self): + updated = Crate('', '', '', '') + updated.result = Crate.UPDATED + + self.patient._crates = [updated, updated] + + self.assertTrue(self.patient.requires_processing()) + + def test_requires_processing_crates_with_updates_and_changes_returns_true(self): + updated = Crate('', '', '', '') + updated.result = Crate.UPDATED + + no_changes = Crate('', '', '', '') + no_changes.result = Crate.NO_CHANGES + + self.patient._crates = [updated, no_changes, no_changes] + + self.assertTrue(self.patient.requires_processing()) + + def test_requires_processing_crates_with_update_and_no_changes_returns_true(self): + updated = Crate('', '', '', '') + updated.result = Crate.UPDATED + + self.patient._crates = [updated, updated, updated] + + self.assertTrue(self.patient.requires_processing()) + + def test_requires_processing_crates_with_schema_changes_returns_false(self): + updated = Crate('', '', '', '') + updated.result = Crate.UPDATED + + schema_change = Crate('', '', '', '') + schema_change.result = Crate.INVALID_DATA + + self.patient._crates = [schema_change, updated, updated] + + self.assertFalse(self.patient.requires_processing()) + + def test_requires_processing_crates_with_unhandled_exception_returns_false(self): + updated = Crate('', '', '', '') + updated.result = Crate.UPDATED + + unhandled_exception = Crate('', '', '', '') + unhandled_exception.result = Crate.UNHANDLED_EXCEPTION + + self.patient._crates = [updated, updated, unhandled_exception] + + self.assertFalse(self.patient.requires_processing()) + + def test_not_implemented(self): + self.assertEqual(self.patient.process(), NotImplemented) + self.assertEqual(self.patient.ship(), NotImplemented) + self.assertEqual(self.patient.validate_crate(None), NotImplemented) diff --git a/tox.ini b/tox.ini index dbc40c93..dd17cf43 100644 --- a/tox.ini +++ b/tox.ini @@ -16,6 +16,7 @@ commands = nosetests --with-id --rednose --cov-config .coveragerc --with-coverage --cover-package forklift --cov-report term-missing --cover-erase {posargs} [testenv:lint] +skip_install = true usedevelop = False commands = flake8 deps = flake8