Skip to content

Commit

Permalink
More work on hand and all over.
Browse files Browse the repository at this point in the history
Simplified API calls and removed a lot of logging
Added untested AbstractGrouping.insert_transform_buffer as a way to insert a buffer node from the context of a Grouping.
Made the API property a true dictionary and allowed for APIs to be accessible along with the new APIProxy.DEFAULT_API property.

Added fileop function to dcc.scene.
Completely removed digit as it makes more sense to add as a method than a class.
Added test suite for Hand but haven't filled it in.
  • Loading branch information
AndresMWeber committed Feb 3, 2018
1 parent 7f0490e commit ab6b6f0
Show file tree
Hide file tree
Showing 15 changed files with 269 additions and 143 deletions.
2 changes: 1 addition & 1 deletion anvil/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,8 @@ class AnvilLog(log.LogMixin):

LOG = AnvilLog

import runtime
import plugins
import runtime
import utils
import objects
import grouping
Expand Down
27 changes: 9 additions & 18 deletions anvil/grouping/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import anvil.runtime as rt
import anvil.config as cfg
import anvil.objects.attribute as at
import anvil.objects as ot
from anvil.meta_data import MetaData
from anvil.utils.generic import merge_dicts

Expand Down Expand Up @@ -36,9 +37,6 @@ def __init__(self, layout_joints=None, parent=None, top_node=None, name_tokens=N
self.name_tokens = self.BUILT_IN_NAME_TOKENS.merge(name_tokens, new=True)
self.meta_data = self.BUILT_IN_META_DATA.merge(meta_data, new=True)

self.info('Processed: %r.__init__(top_node=%s, parent=%s, name_tokens=%s, meta_data=%s, kwargs=%s)',
self, top_node, parent, self.name_tokens, self.meta_data, kwargs)

self._nomenclate = nomenclate.Nom(self.name_tokens.data)
self.chain_nomenclate = nomenclate.Nom()

Expand All @@ -58,17 +56,13 @@ def build(self, joints=None, meta_data=None, name_tokens=None, **kwargs):
self.name_tokens.merge(name_tokens)
self.build_joints = joints or self.layout_joints

self.info('Building sub-rig %s(joints=%s, meta_data=%s, kwargs=%s',
self.__class__.__name__, self.meta_data, self.build_kwargs, self.layout_joints)

def build_layout(self):
raise NotImplementedError

def connect_rendering_delegate(self, assignee=None):
# TODO: API Attribute dependent...dangerous.
assignee = anvil.factory(assignee) if assignee is not None else self.root

self.info('Assigning/Connecting display attributes to %s', assignee)
for attr, attr_kwargs in iteritems(self.RENDERING_ATTRIBUTES):
attr_name = '%s_rendering' % attr
group_name = 'group_%s' % attr
Expand All @@ -80,28 +74,24 @@ def connect_rendering_delegate(self, assignee=None):
target_group.overrideEnabled.set(1)
rendering_attribute.connect(target_group.visibility, force=True)
assignee.buffer_connect(attr_name, target_group.overrideDisplayType, -1, force=True)
self.info('Display attributes connected to %s', assignee)

def initialize_sub_rig_attributes(self, controller=None, attr_dict=None):
attr_dict = self.BUILT_IN_ATTRIBUTES if attr_dict is None else attr_dict
if attr_dict:
controller = self.root if controller is None else anvil.factory(controller)
self.info('Assigning %s with sub-rig attributes %s', controller, attr_dict)
for attr, attr_kwargs in iteritems(attr_dict):
controller.add_attr(attr, **attr_kwargs)

def parent(self, new_parent):
nodes_exist = [rt.dcc.scene.exists(node) if node != None else False for node in [self.root, new_parent]]
if all(nodes_exist or [False]):
self.info('Parenting root of %r to %s', self, new_parent)
rt.dcc.scene.parent(self.root, new_parent)
return True
else:
self.warning('Parent(%s) -> %r does not exist.', new_parent, self.root)
return False

def rename_chain(self, objects, use_end_naming=False, **name_tokens):
self.info('Renaming chain %s for parent %s', objects, self)
self.chain_nomenclate.merge_dict(self.name_tokens.merge(name_tokens))

for index, object in enumerate(objects):
Expand All @@ -114,7 +104,6 @@ def rename(self, *input_dicts, **kwargs):
new_tokens = MetaData(*input_dicts, **kwargs)
self.name_tokens.merge(new_tokens)
self._nomenclate.merge_dict(**self.name_tokens.data)
self.debug('Renaming %r...with name tokens %s and new tokens %s', self, self.name_tokens, new_tokens)
self._cascading_function(lambda n:
n.rename(self._nomenclate.get(**n.name_tokens.update(new_tokens))),
lambda n:
Expand All @@ -123,12 +112,18 @@ def rename(self, *input_dicts, **kwargs):
def build_node(self, node_class, node_key, build_fn='build', *args, **kwargs):
kwargs[cfg.NAME_TOKENS] = self.name_tokens.merge(kwargs.get(cfg.NAME_TOKENS, {}), new=True)
kwargs[cfg.META_DATA] = self.meta_data.merge(kwargs.get(cfg.META_DATA, {}), new=True)
self.info('Grouping %r is building node: %s = %s(%s)...parent name tokens: %s',
self, node_key, node_class, kwargs, self.name_tokens)
dag_node = getattr(node_class, build_fn)(*args, **kwargs)
self.register_node(node_key, dag_node)
return dag_node

def insert_transform_buffer(self, node_to_buffer, **kwargs):
name_tokens = kwargs.get(cfg.NAME_TOKENS) or node_to_buffer.name_tokens
buffer = ot.Transform.build(parent=node_to_buffer, name_tokens=name_tokens, **kwargs)
buffer.transform.set((0,0,0))
buffer.parent(node_to_buffer.get_parent())
return buffer


def register_node(self, node_key, dag_node, overwrite=True, name_tokens=None, meta_data=None):
if dag_node is None:
self.warning('Attempted register node %s with key %s but it does not exist', dag_node, node_key)
Expand Down Expand Up @@ -159,13 +154,9 @@ def find_node(self, node_key):
def _cascading_function(self, object_function, grouping_function):
for sub_key, sub_node in iteritems(self.hierarchy):
if anvil.is_agrouping(sub_node):
self.debug('Renaming sub_grouping %s:%r based on tokens %s', sub_key, sub_node, sub_node.name_tokens)
grouping_function(sub_node)
elif anvil.is_aobject(sub_node):
self.debug('Renaming sub_node %s:%r based on tokens %s', sub_key, sub_node, sub_node.name_tokens)
object_function(sub_node)
self.debug('Renamed sub_node %r based on tokens %s with parent tokens %s',
sub_node, sub_node.name_tokens, self.name_tokens)

def __getattr__(self, item):
try:
Expand Down
9 changes: 0 additions & 9 deletions anvil/plugins/base/api_proxy.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,34 +33,25 @@ def _validate_function(cls, schema, api, function_name):
def to_validate(function):
@wraps(function)
def validator(*args, **kwargs):
cls.LOG.debug('Validating call for %s.%s(args=%s, kwargs=%s) against schema %s' % (
api.__name__, function_name, ', '.join([repr(a) for a in args]),
', '.join(['%s=%s' % (k, v) for k, v in iteritems(kwargs)]),
list(schema['properties'])))
validate(kwargs, schema)
kwargs = cls._initialize_and_filter_flags(kwargs, schema)
return cls._log_and_run_api_call(api, function_name, *args, **kwargs)

return validator

return to_validate

@classmethod
def _initialize_and_filter_flags(cls, flags, schema):
new_flags = {} if flags is None else flags.copy()

schema_properties = list(schema.get('properties'))
cls.LOG.debug('Filtering flags %s for the schema properties %s' % (new_flags, schema_properties))

for flag_key in list(new_flags):
if flag_key not in schema_properties:
cls.LOG.debug('Flag %s not in schema...removing from flags' % (flag_key))
new_flags.pop(flag_key)

for schema_property in schema_properties:
default = schema['properties'][schema_property].get(cfg.DEFAULT)
if default is not None and new_flags.get(schema_property) is None:
cls.LOG.debug('Setting flag %s from default value %s in schema ' % (schema_property, default))
new_flags[schema_property] = default

return new_flags
Expand Down
3 changes: 2 additions & 1 deletion anvil/plugins/dcc_plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,8 @@ class DCCPlugin(object):

def __init__(self, dcc_module):
self.ENGINE = dcc_module.__name__
self.ENGINE_API = dcc_module.dependencies.API or {}
self.APIs = dcc_module.dependencies.APIs or {}
self.ENGINE_API = dcc_module.dependencies.DEFAULT_API

self.scene = dcc_module.scene
self.create = dcc_module.create
Expand Down
2 changes: 1 addition & 1 deletion anvil/plugins/maya/animation.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,6 @@
'insertBlend': api_proxy.BOOL_TYPE, 'minimizeRotation': api_proxy.BOOL_TYPE,
'outTangentType': api_proxy.STR_TYPE, 'shape': api_proxy.BOOL_TYPE,
'time': api_proxy.NUM_TYPE, 'value': api_proxy.NUM_TYPE}}),
API, 'setKeyframe')
DEFAULT_API, 'setKeyframe')
def set_keyframe(*objects, **kwargs):
pass
50 changes: 25 additions & 25 deletions anvil/plugins/maya/connections.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@

@api_proxy.APIProxy._validate_function(
api_proxy.merge_dicts(api_proxy.DEFAULT_SCHEMA, {'properties': {'ignoreUnitConversion': api_proxy.BOOL_TYPE}}),
API, 'mute')
DEFAULT_API, 'mute')
def connected_attr(attribute_dag_path_1, attribute_dag_path_2, **kwargs):
pass

Expand All @@ -43,7 +43,7 @@ def connected_attr(attribute_dag_path_1, attribute_dag_path_2, **kwargs):
{'properties':
{'disable': api_proxy.BOOL_TYPE,
'force': api_proxy.BOOL_TYPE}}),
API, 'mute')
DEFAULT_API, 'mute')
def mute(attribute_dag_path, **kwargs):
pass

Expand All @@ -53,7 +53,7 @@ def mute(attribute_dag_path, **kwargs):
{'properties':
{'connection': api_proxy.BOOL_TYPE,
'datablock': api_proxy.BOOL_TYPE}}),
API, 'isDirty')
DEFAULT_API, 'isDirty')
def dirty_attr(attribute_dag_path, **kwargs):
pass

Expand All @@ -67,7 +67,7 @@ def dirty_attr(attribute_dag_path, **kwargs):
'isExactDestination': api_proxy.BOOL_TYPE, 'isExactSource': api_proxy.BOOL_TYPE,
'isLocked': api_proxy.BOOL_TYPE, 'isSource': api_proxy.BOOL_TYPE,
'sourceFromDestination': api_proxy.BOOL_TYPE}}),
API, 'connectionInfo')
DEFAULT_API, 'connectionInfo')
def connection_info(attribute_dag_path, **kwargs):
pass

Expand All @@ -81,7 +81,7 @@ def connection_info(attribute_dag_path, **kwargs):
'logicalAnd': api_proxy.BOOL_TYPE, 'multi': api_proxy.BOOL_TYPE,
'short': api_proxy.BOOL_TYPE, 'type': api_proxy.STR_TYPE,
'userInterface': api_proxy.BOOL_TYPE, 'writable': api_proxy.BOOL_TYPE}}),
API, 'attributeInfo')
DEFAULT_API, 'attributeInfo')
def info_attr(attribute_dag_path, **kwargs):
pass

Expand All @@ -95,7 +95,7 @@ def info_attr(attribute_dag_path, **kwargs):
'groupLevels': api_proxy.BOOL_TYPE, 'historyAttr': api_proxy.BOOL_TYPE,
'interestLevel': api_proxy.INT_TYPE, 'leaf': api_proxy.BOOL_TYPE,
'levels': api_proxy.INT_TYPE, 'pruneDagObjects': api_proxy.BOOL_TYPE}}),
API, 'listHistory')
DEFAULT_API, 'listHistory')
def list_history(dag_path, **kwargs):
pass

Expand All @@ -107,7 +107,7 @@ def list_history(dag_path, **kwargs):
'shapes': api_proxy.BOOL_TYPE, 'skipConversionNodes': api_proxy.BOOL_TYPE,
'source': api_proxy.BOOL_TYPE, 'type': api_proxy.STR_TYPE}
}),
API, 'listConnections')
DEFAULT_API, 'listConnections')
def list_connections(attribute_dag_path, **kwargs):
pass

Expand All @@ -117,7 +117,7 @@ def list_connections(attribute_dag_path, **kwargs):
{'properties': {
'remove': api_proxy.BOOL_TYPE,
'query': api_proxy.BOOL_TYPE}}),
API, 'aliasAttr')
DEFAULT_API, 'aliasAttr')
def alias_attr(attribute_dag_path, **kwargs):
pass

Expand Down Expand Up @@ -150,7 +150,7 @@ def alias_attr(attribute_dag_path, **kwargs):
'typeExact': api_proxy.STR_TYPE, 'usedAsColor': api_proxy.BOOL_TYPE,
'usedAsFilename': api_proxy.BOOL_TYPE, 'usesMultiBuilder': api_proxy.BOOL_TYPE,
'worldspace': api_proxy.BOOL_TYPE, 'writable': api_proxy.BOOL_TYPE}}),
API, 'attributeQuery')
DEFAULT_API, 'attributeQuery')
def query_attr(attribute, **kwargs):
pass

Expand Down Expand Up @@ -186,7 +186,7 @@ def query_attr(attribute, **kwargs):
'userDefined': api_proxy.BOOL_TYPE,
'visible': api_proxy.BOOL_TYPE,
'write': api_proxy.BOOL_TYPE}}),
API, 'listAttr')
DEFAULT_API, 'listAttr')
def list_attr(node, *args, **kwargs):
pass

Expand All @@ -196,7 +196,7 @@ def list_attr(node, *args, **kwargs):
{'properties': {
'attribute': api_proxy.STR_TYPE,
'name': api_proxy.STR_TYPE}}),
API, 'deleteAttr')
DEFAULT_API, 'deleteAttr')
def delete_attr(attribute_dag_path_or_node, **kwargs):
pass

Expand All @@ -216,7 +216,7 @@ def delete_attr(attribute_dag_path_or_node, **kwargs):
'size': api_proxy.BOOL_TYPE,
'time': api_proxy.NUM_TYPE,
'type': api_proxy.BOOL_TYPE}}),
API, 'getAttr')
DEFAULT_API, 'getAttr')
def get_attr(attribute_dag_path, *args, **kwargs):
pass

Expand All @@ -233,7 +233,7 @@ def get_attr(attribute_dag_path, *args, **kwargs):
'lock': api_proxy.BOOL_TYPE,
'size': api_proxy.NUM_TYPE,
'type': api_proxy.STR_TYPE}}),
API, 'setAttr')
DEFAULT_API, 'setAttr')
def set_attr(attribute, **kwargs):
pass

Expand All @@ -260,7 +260,7 @@ def set_attr(attribute, **kwargs):
'usedAsFilename': api_proxy.BOOL_TYPE, 'usedAsProxy': api_proxy.BOOL_TYPE,
'writable': api_proxy.BOOL_TYPE
}}),
API, 'addAttr')
DEFAULT_API, 'addAttr')
def add_attr(attribute, **kwargs):
pass

Expand All @@ -272,38 +272,38 @@ def add_attr(attribute, **kwargs):
'lock': api_proxy.BOOL_TYPE,
'nextAvailable': api_proxy.STR_TYPE,
'referenceDest': api_proxy.STR_TYPE}}),
API, 'connectAttr')
DEFAULT_API, 'connectAttr')
def connect_attr(source_attribute, destination_attribute, **kwargs):
pass


@api_proxy.APIProxy._validate_function(
api_proxy.merge_dicts(api_proxy.DEFAULT_SCHEMA, {'properties': {'nextAvailable': api_proxy.BOOL_TYPE}}),
API, 'disconnectAttr')
DEFAULT_API, 'disconnectAttr')
def disconnect_attr(*attributes, **kwargs):
pass


@api_proxy.APIProxy._validate_function(
api_proxy.merge_dicts(api_proxy.DEFAULT_SCHEMA,
{'properties': api_proxy.merge_dicts(default_properties, offset_schema)}),
API, 'pointConstraint')
DEFAULT_API, 'pointConstraint')
def translate(source, targets, **flags):
pass


@api_proxy.APIProxy._validate_function(
api_proxy.merge_dicts(api_proxy.DEFAULT_SCHEMA,
{'properties': api_proxy.merge_dicts(default_properties, offset_schema, cacheable_schema)}),
API, 'orientConstraint')
DEFAULT_API, 'orientConstraint')
def rotate(source, targets, **flags):
pass


@api_proxy.APIProxy._validate_function(
api_proxy.merge_dicts(api_proxy.DEFAULT_SCHEMA,
{'properties': api_proxy.merge_dicts(default_properties, offset_schema, aim_schema)}),
API, 'aimConstraint')
DEFAULT_API, 'aimConstraint')
def aim(source, targets, **flags):
pass

Expand All @@ -314,7 +314,7 @@ def aim(source, targets, **flags):
{"scaleCompensate": api_proxy.BOOL_TYPE,
"targetList": api_proxy.BOOL_TYPE},
offset_schema)}),
API, 'scaleConstraint')
DEFAULT_API, 'scaleConstraint')
def scale(source, targets, **flags):
pass

Expand All @@ -328,36 +328,36 @@ def scale(source, targets, **flags):
},
offset_schema,
cacheable_schema)}),
API, 'parentConstraint')
DEFAULT_API, 'parentConstraint')
def parent(source, targets, **flags):
pass


@api_proxy.APIProxy._validate_function(
api_proxy.merge_dicts(api_proxy.DEFAULT_SCHEMA,
{'properties': api_proxy.merge_dicts(default_properties, aim_schema)}),
API, 'tangentConstraint')
DEFAULT_API, 'tangentConstraint')
def tangent(source, targets, **flags):
pass


@api_proxy.APIProxy._validate_function(
api_proxy.merge_dicts(api_proxy.DEFAULT_SCHEMA, {'properties': api_proxy.merge_dicts(default_properties)}),
API, 'geometryConstraint')
DEFAULT_API, 'geometryConstraint')
def geometry_point(source, targets, **flags):
pass


@api_proxy.APIProxy._validate_function(
api_proxy.merge_dicts(api_proxy.DEFAULT_SCHEMA,
{'properties': api_proxy.merge_dicts(default_properties, aim_schema)}),
API, 'normalConstraint')
DEFAULT_API, 'normalConstraint')
def geometry_normal(source, targets, **flags):
pass


@api_proxy.APIProxy._validate_function(
api_proxy.merge_dicts(api_proxy.DEFAULT_SCHEMA, {'properties': api_proxy.merge_dicts(default_properties)}),
API, 'poleVectorConstraint')
DEFAULT_API, 'poleVectorConstraint')
def pole_vector(source, targets, **flags):
pass
Loading

0 comments on commit ab6b6f0

Please sign in to comment.