Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

source: Instantiation pattern should be consitant with data flow #46

Closed
pdxjohnny opened this issue Apr 10, 2019 · 0 comments · Fixed by #71
Closed

source: Instantiation pattern should be consitant with data flow #46

pdxjohnny opened this issue Apr 10, 2019 · 0 comments · Fixed by #71
Labels
bug Something isn't working

Comments

@pdxjohnny
Copy link
Member

As seen in df/base.py, we want to make the Source class and its children follow the same design pattern.

class BaseSourceContext(abc.ABC):
    '''
    Abstract Base Class for context managing a Source
    '''

    async def __aenter__(self) -> 'BaseSourceContext':
        return self

    async def __aexit__(self, exc_type, exc_value, traceback):
        pass

class BaseSource(Entrypoint):
    '''
    Abstract Base Class for a Source
    '''

    ENTRY_POINT = 'dffml.source'

    def __init__(self, config: BaseConfig) -> None:
        self.config = config
        self.logger = LOGGER.getChild(self.__class__.__qualname__)

    @classmethod 
    @abc.abstractmethod 
    def args(cls) -> Dict[str, Arg]: 
        pass 
  
    @classmethod 
    @abc.abstractmethod 
    def config(cls, cmd: CMD): 
        pass 

    @abc.abstractmethod
    def __call__(self) -> 'BaseSourceContext':
        return BaseISourceContext(self)

CMD will need to change to resemble DataFlowFacilitatorCMD as well.

dffml/dffml/util/cli/cmd.py

Lines 130 to 193 in bf3493e

class BaseDataFlowFacilitatorCMD(CMD):
'''
Set timeout for features
'''
arg_ops = Arg('-ops', required=True, nargs='+',
action=ParseOperationAction)
arg_input_network = Arg('-input-network',
action=ParseInputNetworkAction, default=MemoryInputNetwork)
arg_operation_network = Arg('-operation-network',
action=ParseOperationNetworkAction, default=MemoryOperationNetwork)
arg_lock_network = Arg('-lock-network',
action=ParseLockNetworkAction, default=MemoryLockNetwork)
arg_rchecker = Arg('-rchecker',
action=ParseRedundancyCheckerAction,
default=MemoryRedundancyChecker)
# TODO We should be able to specify multiple operation implementation
# networks. This would enable operations to live in different place,
# accessed via the orchestrator transparently.
arg_opimpn = Arg('-opimpn',
action=ParseOperationImplementationNetworkAction,
default=MemoryOperationImplementationNetwork)
arg_orchestrator = Arg('-orchestrator',
action=ParseOrchestratorAction, default=MemoryOrchestrator)
arg_output_specs = Arg('-output-specs', required=True, nargs='+',
action=ParseOutputSpecsAction)
arg_inputs = Arg('-inputs', nargs='+',
action=ParseInputsAction, default=[],
help='Other inputs to add under each ctx (repo\'s src_url will ' + \
'be used as the context)')
arg_repo_def = Arg('-repo-def', default=False, type=str,
help='Definition to be used for repo.src_url.' + \
'If set, repo.src_url will be added to the set of inputs ' + \
'under each context (which is also the repo\'s src_url)')
arg_remap = Arg('-remap', nargs='+', required=True,
action=ParseRemapAction,
help='For each repo, -remap output_operation_name.sub=feature_name')
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.dff = DataFlowFacilitator()
self.linker = Linker()
self.exported = self.linker.export(*self.ops)
self.definitions, self.operations, _outputs = \
self.linker.resolve(self.exported)
# Load all entrypoints which may possibly be selected. Then have them add
# their arguments to the DataFlowFacilitator-tots command.
@classmethod
def add_bases(cls):
class LoadedDataFlowFacilitator(cls):
pass
for base in [BaseInputNetwork,
BaseOperationNetwork,
BaseLockNetwork,
BaseRedundancyChecker,
BaseOperationImplementationNetwork,
BaseOrchestrator]:
for loaded in base.load():
for arg_name, arg in loaded.args().items():
setattr(LoadedDataFlowFacilitator, arg_name, arg)
return LoadedDataFlowFacilitator
DataFlowFacilitatorCMD = BaseDataFlowFacilitatorCMD.add_bases()

Related:

async def __aenter__(self):
await self.open()
# TODO Context management
return self

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
bug Something isn't working
Projects
No open projects
Development

Successfully merging a pull request may close this issue.

1 participant