Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Port #1476 ('dict' option for weights_initializer and bias_initializer) to tf_legacy #1599

Merged
merged 2 commits into from
Dec 16, 2021
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 8 additions & 8 deletions ludwig/combiners/combiners.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,8 +60,8 @@ class ConcatCombinerConfig:
num_fc_layers: int = schema.NonNegativeInteger(default=0)
fc_size: int = schema.PositiveInteger(default=256)
use_bias: bool = True
weights_initializer: str = schema.InitializerOptions(default='glorot_uniform')
bias_initializer: str = schema.InitializerOptions(default='zeros')
weights_initializer: Union[str, Dict] = schema.InitializerOrDict(default='glorot_uniform')
bias_initializer: Union[str, Dict] = schema.InitializerOrDict(default='zeros')
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Out of curiosity, what's an example of a bias initializer configured using a dict?

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Don't have a concrete example for you 😅 but it's explicity supported by the developer guide, traces to TF documentation for initializer parameters

weights_regularizer: Optional[str] = schema.RegularizerOptions()
bias_regularizer: Optional[str] = schema.RegularizerOptions()
activity_regularizer: Optional[str] = schema.RegularizerOptions()
Expand Down Expand Up @@ -488,8 +488,8 @@ class TransformerCombinerConfig:
num_fc_layers: int = schema.NonNegativeInteger(default=0)
fc_size: int = schema.PositiveInteger(default=256)
use_bias: bool = True
weights_initializer: str = schema.InitializerOptions(default='glorot_uniform')
bias_initializer: str = schema.InitializerOptions(default='zeros')
weights_initializer: Union[str, Dict] = schema.InitializerOrDict(default='glorot_uniform')
bias_initializer: Union[str, Dict] = schema.InitializerOrDict(default='zeros')
weights_regularizer: Optional[str] = schema.RegularizerOptions()
bias_regularizer: Optional[str] = schema.RegularizerOptions()
activity_regularizer: Optional[str] = schema.RegularizerOptions()
Expand Down Expand Up @@ -619,8 +619,8 @@ class TabTransformerCombinerConfig:
num_fc_layers: int = schema.NonNegativeInteger(default=0)
fc_size: int = schema.PositiveInteger(default=256)
use_bias: bool = True
weights_initializer: str = schema.InitializerOptions(default='glorot_uniform')
bias_initializer: str = schema.InitializerOptions(default='zeros')
weights_initializer: Union[str, Dict] = schema.InitializerOrDict(default='glorot_uniform')
bias_initializer: Union[str, Dict] = schema.InitializerOrDict(default='zeros')
weights_regularizer: Optional[str] = schema.RegularizerOptions()
bias_regularizer: Optional[str] = schema.RegularizerOptions()
activity_regularizer: Optional[str] = schema.RegularizerOptions()
Expand Down Expand Up @@ -809,8 +809,8 @@ class ComparatorCombinerConfig:
num_fc_layers: int = schema.NonNegativeInteger(default=1)
fc_size: int = schema.PositiveInteger(default=256)
use_bias: bool = True
weights_initializer: str = schema.InitializerOptions(default='glorot_uniform')
bias_initializer: str = schema.InitializerOptions(default='zeros')
weights_initializer: Union[str, Dict] = schema.InitializerOrDict(default='glorot_uniform')
bias_initializer: Union[str, Dict] = schema.InitializerOrDict(default='zeros')
weights_regularizer: Optional[str] = schema.RegularizerOptions()
bias_regularizer: Optional[str] = schema.RegularizerOptions()
activity_regularizer: Optional[str] = schema.RegularizerOptions()
Expand Down
46 changes: 46 additions & 0 deletions ludwig/utils/schema_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,12 @@ def Embed():
_embed_options = ['add']


def InitializerOrDict(default='xavier_uniform'):
return field(metadata={
'marshmallow_field': InitializerOptionsOrCustomDictField(allow_none=False)
}, default=default)


class EmbedInputFeatureNameField(fields.Field):
def _deserialize(self, value, attr, data, **kwargs):
if value is None:
Expand Down Expand Up @@ -117,6 +123,46 @@ def _jsonschema_type_mapping(self):
}


class InitializerOptionsOrCustomDictField(fields.Field):
def _deserialize(self, value, attr, data, **kwargs):
initializers = list(initializer_registry.keys())
if isinstance(value, str):
if value not in initializers:
raise ValidationError(
f"Expected one of: {initializers}, found: {value}"
)
return value

if isinstance(value, dict):
if 'type' not in value:
raise ValidationError(
f"Dict must contain 'type'"
)
if value['type'] not in initializers:
raise ValidationError(
f"Dict expected key 'type' to be one of: {initializers}, found: {value}"
)
return value

raise ValidationError('Field should be str or dict')

def _jsonschema_type_mapping(self):
initializers = list(initializer_registry.keys())
return {
'oneOf': [
{'type': 'string', 'enum': initializers},
{
"type": "object",
"properties": {
"type": { "type": "string", 'enum': initializers },
},
"required": ["type"],
"additionalProperties": True,
},
]
}


def load_config(cls, **kwargs):
schema = marshmallow_dataclass.class_schema(cls)()
return schema.load(kwargs)
Expand Down
20 changes: 18 additions & 2 deletions tests/ludwig/utils/test_schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -263,15 +263,31 @@ def test_config_bad_combiner_types_enums():
config['combiner']['weights_initializer'] = 'fail'
with pytest.raises(ValidationError, match=r"'fail' is not of*"):
validate_config(config)

config['combiner']['weights_initializer'] = {}
with pytest.raises(ValidationError, match=r"Failed validating 'type'"):
validate_config(config)
config['combiner']['weights_initializer'] = {'type':'fail'}
with pytest.raises(ValidationError, match=r"'fail' is not one of*"):
validate_config(config)
config['combiner']['weights_initializer'] = {'type':'normal', 'stddev': 0}
validate_config(config)

# Test bias initializer:
del config['combiner']['weights_initializer']
config['combiner']['bias_initializer'] = 'variance_scaling'
validate_config(config)
config['combiner']['bias_initializer'] = 'fail'
with pytest.raises(ValidationError, match=r"'fail' is not of*"):
validate_config(config)

config['combiner']['bias_initializer'] = {}
with pytest.raises(ValidationError, match=r"Failed validating 'type'"):
validate_config(config)
config['combiner']['bias_initializer'] = {'type':'fail'}
with pytest.raises(ValidationError, match=r"'fail' is not one of*"):
validate_config(config)
config['combiner']['bias_initializer'] = {'type':'zeros', 'stddev': 0}
validate_config(config)

# Test weights regularizer:
del config['combiner']['bias_initializer']
config['combiner']['weights_regularizer'] = 'l1'
Expand Down