diff --git a/.gitignore b/.gitignore index 7bbc71c..8ecb6ab 100644 --- a/.gitignore +++ b/.gitignore @@ -99,3 +99,7 @@ ENV/ # mypy .mypy_cache/ + +# IDE and Editor artifacts # +*.bbprojectd +.idea diff --git a/example/cfg.json b/example/cfg.json new file mode 100644 index 0000000..efdd717 --- /dev/null +++ b/example/cfg.json @@ -0,0 +1,47 @@ +{ + "core": { + "cert_arn": "arn:aws:iot:us-west-2::cert/EXAMPLEEXAMPLEa95f4e32EXAMPLEa888e13EXAMPLEac56337EXAMPLEeed338a", + "thing_arn": "arn:aws:iot:us-west-2::thing/", + "thing_name": "" + }, + "core_list": { + "id": "", + "version_arn": "" + }, + "device_list": { + "id": "", + "version_arn": "" + }, + "devices": { + "GGD_example": { + "cert_arn": "arn:aws:iot:us-west-2::cert/EXAMPLEEXAMPLEa95f4e32EXAMPLEa888e13EXAMPLEac56337EXAMPLEeed338a", + "thing_arn": "arn:aws:iot:us-west-2::thing/", + "thing_name": "" + } + }, + "group": { + "id": "" + }, + "lambda_functions": { + "MockDevice": { + "arn": "arn:aws:lambda:us-west-2::function:MockDevice:", + "arn_qualifier": "" + } + }, + "lambda_list": { + "id": "", + "version_arn": "" + }, + "logging_list": { + "id": "", + "version_arn": "" + }, + "subscription_list": { + "id": "", + "version_arn": "" + }, + "subscriptions": { + "errors": "/errors", + "telemetry": "/telemetry" + } +} \ No newline at end of file diff --git a/example/mock_device.py b/example/mock_device.py new file mode 100644 index 0000000..9f89056 --- /dev/null +++ b/example/mock_device.py @@ -0,0 +1,117 @@ + +""" +A mock device Lambda +""" +from __future__ import print_function +import json +import time +import random +import logging +import greengrasssdk + +gg_client = greengrasssdk.client('iot-data') + + +def mock_temp(): + return random.randint(-30, 115) + + +def mock_voltage(): + random.randint(1, 1000) + + +def mock_amperage(): + random.uniform(0.0, 40.0) + + +def get_shadow_state(): + logging.info("[get_shadow_state]") + return json.loads(gg_client.get_thing_shadow(thingName='MockDevice')) + + +def get_pub_frequency(mock_shadow): + if 'state' in mock_shadow is False or \ + 'reported' in mock_shadow['state'] is False or \ + 'pub_frequency' in mock_shadow['state']['reported'] is False: + return 1 + + return mock_shadow['state']['reported']['pub_frequency'] + + +def get_pub_topic(mock_shadow): + if 'state' in mock_shadow is False or \ + 'reported' in mock_shadow['state'] is False or \ + 'pub_topic' in mock_shadow['state']['reported'] is False: + return '/mock/telemetry' + + return mock_shadow['state']['reported']['pub_topic'] + + +def get_telemetry(): + return json.dumps([ + { + "version": "2017-05-08", + "deviceId": "mock-01", + "data": [ + { + "sensorId": "fake_temperature_01", + "ts": "{0}".format(time.time()), + "value": mock_temp() + }, + { + "sensorId": "fake_temperature_02", + "ts": "{0}".format(time.time()), + "value": mock_temp() + } + ] + }, + { + "version": "2017-05-08", + "deviceId": "mock-02", + "data": [ + { + "sensorId": "fake_voltage_01", + "ts": "{0}".format(time.time()), + "value": mock_voltage() + }, + { + "sensorId": "fake_amperage_01", + "ts": "{0}".format(time.time()), + "value": mock_amperage() + } + ] + + } + ]) + + +def publish_telemetry(mock_shadow): + response = gg_client.publish( + topic=get_pub_topic(mock_shadow), + qos=0, + payload=get_telemetry() + ) + print("[publish_telemetry] publish resp:{0}".format(response)) + + +def run_mock(): + mock_shadow = get_shadow_state() + while True: + for count, element in enumerate(range(10), 1): # Start count from 1 + publish_telemetry(mock_shadow) + time.sleep(get_pub_frequency(mock_shadow)) + if count % 10 == 0: # every 10 times through, update mock_shadow + mock_shadow = get_shadow_state() + +run_mock() + + +# Handler for processing lambda events +def handler(event, context): + # Unwrap the message + msg = json.loads(event) + logging.info("[handler] thinking about message: {0}".format(msg)) + + # publish some telemetry + mock_shadow = get_shadow_state() + publish_telemetry(mock_shadow) \ No newline at end of file diff --git a/group_setup/__init__.py b/group_setup/__init__.py new file mode 100644 index 0000000..2382262 --- /dev/null +++ b/group_setup/__init__.py @@ -0,0 +1,328 @@ +import os +import json +import boto3 +import logging +from boto3.session import Session +from botocore.exceptions import ClientError + +__version__ = '0.1.0' + +logging.basicConfig(format='%(asctime)s|%(name)-8s|%(levelname)s: %(message)s', + level=logging.INFO) + + +class GroupConfigFile(object): + def __init__(self, config_file='cfg.json'): + super(GroupConfigFile, self).__init__() + self.config_file = config_file + if self.get_config() is None: + raise ValueError("Error reading config file: {0}".format( + self.config_file)) + + def get_config(self): + config = None + if os.path.exists(self.config_file) and os.path.isfile( + self.config_file): + try: + with open(self.config_file, "r") as in_file: + config = json.load(in_file) + except OSError as ose: + logging.error( + 'OSError while reading config file. {0}'.format(ose)) + return config + + def update(self, **kwargs): + if len(kwargs.keys()) == 0: + logging.warning("No new configuration to update.") + return + # config['group'] = {"id": group_info['Id']} + config = self.get_config() + if 'core' in kwargs: + for key, val in kwargs['core']: + config['core'][key] = val + kwargs.pop('core') + if 'lambda_functions' in kwargs: + for key in kwargs['lambda_functions']: + config['lambda_functions'][key] = kwargs['lambda_functions'][ + key] + kwargs.pop('lambda_functions') + if 'devices' in kwargs: + for key in kwargs['devices']: + config['devices'][key] = kwargs['devices'][key] + kwargs.pop('devices') + if 'core_def' in kwargs: + for key, val in kwargs['core_def']: + config['core_def'][key] = val + kwargs.pop('core_def') + if 'device_def' in kwargs: + for key, val in kwargs['device_def']: + config['device_def'][key] = val + kwargs.pop('device_def') + if 'group' in kwargs.keys(): + for key, val in kwargs['group']: + logging.info('Updating group key:{0} and value:{0}'.format( + key, val)) + config['group'][key] = val + kwargs.pop('group') + + if len(kwargs) > 0: + # treat the rest of the kwargs as simple property value assignments + for key in kwargs.keys(): + logging.info("Update config key:{0}".format(key)) + config[key] = kwargs[key] + self.write_config(config) + + def write_config(self, config): + try: + with open(self.config_file, "w") as out_file: + json.dump(config, out_file, indent=2, + separators=(',', ': '), sort_keys=True) + logging.debug( + 'Config file:{0} updated.'.format(self.config_file)) + except OSError as ose: + logging.error( + 'OSError while writing config file. {0}'.format(ose)) + + def is_fresh(self): + cfg = self.get_config() + if cfg is not None: + if all(x == '' for x in ( + cfg['group']['id'], cfg['func_def']['id'], + cfg['core_def']['id'], cfg['device_def']['id'], + cfg['logger_def']['id'] + )): + return True + + return False + + def make_fresh(self): + config = self.get_config() + config['group']['id'] = '' + config['group']['version'] = '' + config['group']['version_arn'] = '' + config['core_def']['id'] = '' + config['core_def']['version_arn'] = '' + config['device_def']['id'] = '' + config['device_def']['version_arn'] = '' + config['func_def']['id'] = '' + config['func_def']['version_arn'] = '' + config['logger_def']['id'] = '' + config['logger_def']['version_arn'] = '' + config['subscription_def']['id'] = '' + config['subscription_def']['version_arn'] = '' + self.write_config(config=config) + + def read(self, prop): + return self.get_config()[prop] + + def __getitem__(self, prop): + return self.read(prop) + + def __setitem__(self, key, val): + cfg = self.get_config() + cfg[key] = val + self.write_config(cfg) + + +class GroupType(object): + MOCK_TYPE = 'mock' + + def __init__(self, type_name='mock', config=None, region='us-west-2'): + super(GroupType, self).__init__() + self.type_name = type_name + self.config = config + self.region = region + + # TODO revise the thing policy with min privileges required + # TODO revise the thing policy with min resources required + def create_and_attach_thing_policy(self): + if self.config['core']['thing_name'] is '': + raise ValueError("Config file values seem to be mis-configured.") + + # Create and attach to the principal/certificate the minimal action + # privileges Thing policy that allows publish and subscribe + thing_policy = { + "Version": "2012-10-17", + "Statement": [{ + "Effect": "Allow", + "Action": [ + "iot:*", + "greengrass:*" + # "iot:Connect", + # "iot:Publish", + # "iot:Receive", + # "iot:Subscribe" + ], + "Resource": [ + # "arn:aws:iot:{0}:*:*".format(region) + "*" + ] + }] + } + + iot = Session(region_name=self.region).client('iot') + policy_name = '{0}-{1}'.format(self.type_name, + self.config['core']['thing_name']) + policy = json.dumps(thing_policy) + logging.debug('[create_and_attach_thing_policy] policy:{0}'.format(policy)) + try: + p = iot.create_policy( + policyName=policy_name, + policyDocument=policy + ) + logging.debug( + "[create_and_attach_thing_policy] Created Policy: {0}".format( + p['policyName'])) + + cert_arn = self.config['core']['cert_arn'] + iot.attach_principal_policy(policyName=policy_name, + principal=cert_arn) + logging.debug( + "[create_and_attach_thing_policy] Attached {0} to {1}".format( + policy_name, cert_arn)) + return p['policyName'], p['policyArn'] + + except ClientError as ce: + if ce.response['Error']['Code'] == 'ResourceAlreadyExistsException': + logging.warning( + "[create_and_attach_thing_policy] {0}".format( + ce.response['Error']['Message'])) + # since policy already exists return nothing, assuming previous success + + def create_and_attach_iam_role(self): + logging.info("[begin] [create_and_attach_iam_role]") + iam = Session(region_name=self.region).client('iam') + iam_res = Session(region_name=self.region).resource('iam') + gg_client = boto3.client('greengrass', region_name=self.region) + role_name = '{0}_service_role'.format(self.type_name) + aws_lambda_ro_access_arn = "arn:aws:iam::aws:policy/AWSLambdaReadOnlyAccess" + aws_iot_full_access_arn = "arn:aws:iam::aws:policy/AWSIoTFullAccess" + + assume_role_policy = { + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Principal": { + "Service": "greengrass.amazonaws.com" + }, + "Action": "sts:AssumeRole" + } + ] + } + gg_inline_policy = { + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "g3s20170630", + "Effect": "Allow", + "Action": [ + "greengrass:*" + ], + "Resource": [ + "*" + ] + } + ] + } + try: + resp = iam.create_role( + RoleName=role_name, + AssumeRolePolicyDocument=json.dumps(assume_role_policy) + ) + logging.debug( + "[create_and_attach_iam_role] create_role {0}".format(resp)) + resp = iam.attach_role_policy( + RoleName=role_name, + PolicyArn=aws_lambda_ro_access_arn + ) + logging.debug( + "[create_and_attach_iam_role] attach_policy 1 {0}".format(resp)) + resp = iam.attach_role_policy( + RoleName=role_name, + PolicyArn=aws_iot_full_access_arn + ) + logging.debug( + "[create_and_attach_iam_role] attach_policy 2 {0}".format(resp)) + resp = iam.put_role_policy( + RoleName=role_name, + PolicyName='g3s_inline_policy', + PolicyDocument=json.dumps(gg_inline_policy) + ) + logging.debug( + "[create_and_attach_iam_role] put_policy {0}".format(resp)) + role = iam_res.Role(role_name) + gg_client.attach_service_role_to_account(RoleArn=role.arn) + logging.info( + "[end] [create_and_attach_iam_role] attached service role") + + except ClientError as ce: + if ce.response['Error']['Code'] == 'ResourceAlreadyExistsException': + logging.warning( + "[create_and_attach_iam_role] {0}".format( + ce.response['Error']['Message'])) + else: + logging.error("[create_and_attach_iam_role] {0}".format( + ce.response['Error']['Message'])) + # since role already exists return nothing, assuming previous success + + def get_core_definition(self, config): + if GroupType.MOCK_TYPE is self.type_name: + return [{ + "ThingArn": config['core']['thing_arn'], + "CertificateArn": config['core']['cert_arn'], + "Id": "{0}_00".format(self.type_name), + "SyncShadow": True + }] + else: + raise NotImplementedError( + 'Override get_core_definition for group type {0}.'.format( + self.type_name + )) + + def get_device_definition(self, config): + if GroupType.MOCK_TYPE is self.type_name: + return [{ + "Id": "{0}_10".format(self.type_name), + "ThingArn": config['devices']['GGD_example']['thing_arn'], + "CertificateArn": + config['devices']['GGD_example']['cert_arn'], + "SyncShadow": False + }] + else: + raise NotImplementedError( + 'Override get_device_definition for group type {0}.'.format( + self.type_name + )) + + def get_subscription_definition(self, config): + d = config['devices'] + l = config['lambda_functions'] + s = config['subscriptions'] + + if GroupType.MOCK_TYPE is self.type_name: + return [ + { + "Id": "1", + "Source": d['GGD_example']['thing_arn'], + "Subject": s['telemetry'], + "Target": l['MockDevice']['arn'] + }, + { + "Id": "4", + "Source": d['GGD_example']['thing_arn'], + "Subject": s['telemetry'], + "Target": "cloud" + }, + { + "Id": "14", + "Source": l['MockDevice']['arn'], + "Subject": s['errors'], + "Target": "cloud" + } + ] + else: + raise NotImplementedError( + 'Override get_subscription_definition for group type {0}.'.format( + self.type_name + )) diff --git a/group_setup/cmd.py b/group_setup/cmd.py new file mode 100755 index 0000000..4e12dc6 --- /dev/null +++ b/group_setup/cmd.py @@ -0,0 +1,327 @@ +#!/usr/bin/env python + +import fire +import boto3 +import logging +from botocore.exceptions import ClientError +from group_setup import GroupConfigFile, GroupType + +logging.basicConfig(format='%(asctime)s|%(name)-8s|%(levelname)s: %(message)s', + level=logging.INFO) + +# add your own additional group types to this dict for use by the commands +group_types = { + GroupType.MOCK_TYPE: GroupType() +} + + +def create(config_file, group_type=GroupType.MOCK_TYPE, + group_name=None, region='us-west-2'): + """ + Create a Greengrass group in the given region. + + config_file: config file of the group to create + group_type: either the default or an overridden group type + group_name: the name of the group. If no name is given then group_type + will be used. + region: the region in which to create the new group. + """ + config = GroupConfigFile(config_file=config_file) + if config.is_fresh() is False: + raise ValueError( + "Config file already tracking previously created group") + + group_types[GroupType.MOCK_TYPE] = GroupType(config=config, region=region) + if group_type not in group_types.keys(): + raise ValueError("Can only create {0} groups.".format(group_types)) + + gt = group_types[group_type] + + # Create a Group + logging.info("[begin] Creating a Greengrass Group") + if group_name is None: + group_name = group_type + + gg_client = boto3.client("greengrass", region_name=region) + + group_info = gg_client.create_group(Name="{0}_group".format(group_name)) + config['group'] = {"id": group_info['Id']} + gt.create_and_attach_thing_policy() + gt.create_and_attach_iam_role() + + cl_arn = _create_core_definition(gg_client=gg_client, group_type=gt, + config=config, group_name=group_name) + dl_arn = _create_device_definition(gg_client=gg_client, group_type=gt, + config=config, group_name=group_name) + lv_arn = _create_function_definition(gg_client=gg_client, group_type=gt, + config=config) + log_arn = _create_logger_definition(gg_client=gg_client, group_type=gt, + config=config) + sub_arn = _create_subscription_definition(gg_client=gg_client, + group_type=gt, config=config) + + # Add all the constituent parts to the Greengrass Group + grp = gg_client.create_group_version( + GroupId=group_info['Id'], + CoreDefinitionVersionArn=cl_arn, + DeviceDefinitionVersionArn=dl_arn, + FunctionDefinitionVersionArn=lv_arn, + LoggerDefinitionVersionArn=log_arn, + SubscriptionDefinitionVersionArn=sub_arn + ) + config['group'] = { + "id": group_info['Id'], + "version_arn": grp['Arn'], + "version": grp['Version'] + } + logging.info("[end] Created Greengrass Group {0}".format(group_info['Id'])) + + +def _create_core_definition(gg_client, group_type, config, group_name): + core_def = group_type.get_core_definition(config=config) + core_def_id = config['core_def']['id'] + if core_def_id is None or len(core_def_id) == 0: + cd = gg_client.create_core_definition( + Name="{0}_core_def".format(group_name) + ) + core_def_id = cd['Id'] + cdv = gg_client.create_core_definition_version( + CoreDefinitionId=core_def_id, + Cores=core_def + ) + cd_arn = cdv['Arn'] + logging.info("Created Core definition ARN:{0}".format(cd_arn)) + config['core_def'] = {'id': core_def_id, 'version_arn': cd_arn} + logging.info("CoreDefinitionId: {0}".format(core_def_id)) + return cd_arn + else: + logging.info("CoreDefinition already exists:{0}".format(core_def_id)) + return + + +def _create_device_definition(gg_client, group_type, config, group_name): + device_def = group_type.get_device_definition(config=config) + device_def_id = config['device_def']['id'] + if device_def_id is None or len(device_def_id) == 0: + dl = gg_client.create_device_definition( + Name="{0}_device_def".format(group_name)) + device_def_id = dl['Id'] + dlv = gg_client.create_device_definition_version( + DeviceDefinitionId=device_def_id, + Devices=device_def + ) + dl_arn = dlv['Arn'] + logging.info("Created Device definition ARN:{0}".format(dl_arn)) + config['device_def'] = {'id': dl['Id'], 'version_arn': dl_arn} + logging.info("DeviceDefinitionId: {0}".format(device_def_id)) + return dl_arn + else: + logging.info("DeviceDefinition already exists:{0}".format( + device_def_id) + ) + return + + +def _create_function_definition(gg_client, group_type, config): + # Add latest version of Lambda functions to a Function definition + aws = boto3.client('lambda') + latest_funcs = dict() + func_definition = [] + # first determine the latest versions of configured Lambda functions + for key in config['lambda_functions']: + lambda_name = key + a = aws.list_aliases(FunctionName=lambda_name) + # assume only one Alias associated with the Lambda function + alias_arn = a['Aliases'][0]['AliasArn'] + logging.info("function {0}, found aliases: {1}".format( + lambda_name, a) + ) + + # get the function pointed to by the alias + q = config['lambda_functions'][lambda_name]['arn_qualifier'] + f = aws.get_function(FunctionName=lambda_name, Qualifier=q) + logging.info("retrieved func config: {0}".format(f['Configuration'])) + latest_funcs[lambda_name] = { + "arn": alias_arn, + "arn_qualifier": q + } + func_definition.append({ + "Id": "{0}".format(lambda_name.lower()), + "FunctionArn": alias_arn, + "FunctionConfiguration": { + "Executable": f['Configuration']['Handler'], + "MemorySize": int(f['Configuration']['MemorySize']) * 1000, + "Timeout": int(f['Configuration']['Timeout']) + } + }) # function definition + + # if we found one or more configured functions, create a func definition + if len(func_definition) > 0: + ll = gg_client.create_function_definition( + Name="{0}_func_def".format(group_type.type_name) + ) + lmbv = gg_client.create_function_definition_version( + FunctionDefinitionId=ll['Id'], + Functions=func_definition + ) + config['lambda_functions'] = latest_funcs # update config with versions + ll_arn = lmbv['Arn'] + logging.info("Created Function definition ARN:{0}".format(ll_arn)) + config['func_def'] = {'id': ll['Id'], 'version_arn': ll_arn} + return ll_arn + else: + return '' + + +def _create_logger_definition(gg_client, group_type, config): + log_info = gg_client.create_logger_definition( + Name="{0}_logger_def".format(group_type.type_name) + ) + logv = gg_client.create_logger_definition_version( + LoggerDefinitionId=log_info['Id'], + Loggers=[{ + "Id": "gg-logging", + "Component": "GreengrassSystem", "Level": "INFO", + "Space": 5000, # size in KB + "Type": "FileSystem" + }, { + "Id": "func-logging", + "Component": "Lambda", "Level": "DEBUG", + "Space": 5000, # size in KB + "Type": "FileSystem" + }] + ) + log_arn = logv['Arn'] + logging.info("Created Lambda definition ARN:{0}".format(log_arn)) + config['logger_def'] = { + "id": log_info['Id'], + "version_arn": log_arn + } + + return log_arn + + +def _create_subscription_definition(gg_client, group_type, config): + """ + Configure routing subscriptions for a Greengrass group. + + group_type: either default or an overridden group type + config: GroupConfigFile object used for routing subscriptions + """ + logging.info('[begin] Configuring routing subscriptions') + sub_info = gg_client.create_subscription_definition( + Name="{0}_routing".format(group_type.type_name) + ) + logging.info('Created subscription definition: {0}'.format(sub_info)) + + subs = group_type.get_subscription_definition(config=config) + subv = gg_client.create_subscription_definition_version( + SubscriptionDefinitionId=sub_info['Id'], + Subscriptions=subs + ) + sub_arn = subv['Arn'] + config['subscription_def'] = { + "id": sub_info['Id'], + "version_arn": sub_arn + } + logging.info('[end] Configured routing subscriptions') + return sub_arn + + +def delete(config_file, region='us-west-2'): + logging.info('[begin] Deleting Group') + config = GroupConfigFile(config_file=config_file) + + gg_client = boto3.client("greengrass", region_name=region) + + logger_def_id = config['logger_def']['id'] + logging.info('Deleting logger_def_id:{0}'.format(logger_def_id)) + try: + gg_client.delete_logger_definition(LoggerDefinitionId=logger_def_id) + except ClientError as ce: + logging.error(ce.message) + + func_def_id = config['func_def']['id'] + logging.info('Deleting func_def_id:{0}'.format(func_def_id)) + try: + gg_client.delete_function_definition(FunctionDefinitionId=func_def_id) + except ClientError as ce: + logging.error(ce.message) + + device_def_id = config['device_def']['id'] + logging.info('Deleting device_def_id:{0}'.format(device_def_id)) + try: + gg_client.delete_device_definition(DeviceDefinitionId=device_def_id) + except ClientError as ce: + logging.error(ce.message) + + core_def_id = config['core_def']['id'] + logging.info('Deleting core_def_id:{0}'.format(core_def_id)) + try: + gg_client.delete_core_definition(CoreDefinitionId=core_def_id) + except ClientError as ce: + logging.error(ce.message) + + group_id = config['group']['id'] + logging.info('Deleting group_id:{0}'.format(group_id)) + try: + gg_client.delete_group(GroupId=group_id) + except ClientError as ce: + logging.error(ce.message) + return + + logging.info('[end] Deleted group') + + +def clean_file(config_file): + logging.info('[begin] Cleaning config file') + config = GroupConfigFile(config_file=config_file) + + if config.is_fresh() is True: + raise ValueError("Config is already clean.") + config.make_fresh() + logging.info('[end] Cleaned config file:{0}'.format(config_file)) + + +def clean_all(config_file, region='us-west-2'): + logging.info('[begin] Cleaning all provisioned artifacts') + config = GroupConfigFile(config_file=config_file) + if config.is_fresh() is True: + raise ValueError("Config is already clean.") + + delete(config_file, region=region) + clean_file(config_file) + + logging.info('[end] Cleaned all provisioned artifacts') + + +def deploy(group_type, config_file, region='us-west-2'): + if group_type not in group_types: + raise ValueError("Can only deploy {0} groups.".format(group_types)) + + config = GroupConfigFile(config_file=config_file) + if config.is_fresh(): + raise ValueError("Config not yet tracking a group. Cannot deploy.") + + gg_client = boto3.client("greengrass", region_name=region) + dep_req = gg_client.create_deployment( + GroupId=config['group']['id'], + GroupVersionId=config['group']['version'], + DeploymentType="NewDeployment" + ) + print("Group deploy requested for deployment_id:{0}".format( + dep_req['DeploymentId'], + )) + + +def main(): + fire.Fire({ + 'create': create, + 'deploy': deploy, + 'clean_all': clean_all, + 'clean_file': clean_file + }) + + +if __name__ == '__main__': + main() diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000..3480374 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,2 @@ +[bdist_wheel] +universal=1 \ No newline at end of file diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..d5d5d85 --- /dev/null +++ b/setup.py @@ -0,0 +1,45 @@ +""" +greengrass-group-setup +---------------------- +""" +import os +from setuptools import setup +from group_setup import __version__ + + +def open_file(fname): + return open(os.path.join(os.path.dirname(__file__), fname)) + + +setup( + name='greengrass-group-setup', + version=__version__, + url='https://github.com/awslabs/aws-greengrass-group-setup', + license=open("LICENSE.md").read(), + author='Brett Francis', + author_email='brettf@amazon.com', + description='A file driven approach to the creation of an entire AWS Greengrass group', + long_description=open_file("README.md").read(), + py_modules=['group_setup'], + zip_safe=False, + include_package_data=True, + install_requires=['boto3>=1.4.4', 'fire>=0.1.1'], + packages=["group_setup"], + keywords='greengrass group aws iot', + entry_points=''' + [console_scripts] + group_setup=group_setup.cmd:main + ''', + classifiers=[ + 'Intended Audience :: Developers', + 'Natural Language :: English', + 'Environment :: Web Environment', + 'License :: OSI Approved :: MIT License', + 'Operating System :: OS Independent', + 'Development Status :: 4 - Beta', + 'Programming Language :: Python', + 'Programming Language :: Python :: 2.7', + 'Topic :: Software Development :: Libraries :: Python Modules', + 'Topic :: Utilities' + ] +) \ No newline at end of file