Skip to content

Commit

Permalink
feat: added support for conditional parameters in hyperparameter tuni…
Browse files Browse the repository at this point in the history
…ng (#1544)

* feat: added support for conditional parameters in hyperparameter tuning

* 🦉 Updates from OwlBot post-processor

See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md

* 🦉 Updates from OwlBot post-processor

See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md

* fixing unit tests

* 🦉 Updates from OwlBot post-processor

See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md

* 🦉 Updates from OwlBot post-processor

See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md

* fixed all failing tests

* addressed PR comments

* 🦉 Updates from OwlBot post-processor

See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md

* 🦉 Updates from OwlBot post-processor

See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md

Co-authored-by: Owl Bot <gcf-owl-bot[bot]@users.noreply.github.com>
  • Loading branch information
rosiezou and gcf-owl-bot[bot] committed Aug 2, 2022
1 parent 3526b3e commit 744cc38
Show file tree
Hide file tree
Showing 2 changed files with 151 additions and 17 deletions.
84 changes: 75 additions & 9 deletions google/cloud/aiplatform/hyperparameter_tuning.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-

# Copyright 2021 Google LLC
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
Expand Down Expand Up @@ -29,6 +29,10 @@
"unspecified": gca_study_compat.StudySpec.ParameterSpec.ScaleType.SCALE_TYPE_UNSPECIFIED,
}

_INT_VALUE_SPEC = "integer_value_spec"
_DISCRETE_VALUE_SPEC = "discrete_value_spec"
_CATEGORICAL_VALUE_SPEC = "categorical_value_spec"


class _ParameterSpec(metaclass=abc.ABCMeta):
"""Base class represents a single parameter to optimize."""
Expand Down Expand Up @@ -77,10 +81,30 @@ def _to_parameter_spec(
self, parameter_id: str
) -> gca_study_compat.StudySpec.ParameterSpec:
"""Converts this parameter to ParameterSpec."""
# TODO: Conditional parameters
conditions = []
if self.conditional_parameter_spec is not None:
for (conditional_param_id, spec) in self.conditional_parameter_spec.items():
condition = (
gca_study_compat.StudySpec.ParameterSpec.ConditionalParameterSpec()
)
if self._parameter_spec_value_key == _INT_VALUE_SPEC:
condition.parent_int_values = gca_study_compat.StudySpec.ParameterSpec.ConditionalParameterSpec.IntValueCondition(
values=spec.parent_values
)
elif self._parameter_spec_value_key == _CATEGORICAL_VALUE_SPEC:
condition.parent_categorical_values = gca_study_compat.StudySpec.ParameterSpec.ConditionalParameterSpec.CategoricalValueCondition(
values=spec.parent_values
)
elif self._parameter_spec_value_key == _DISCRETE_VALUE_SPEC:
condition.parent_discrete_values = gca_study_compat.StudySpec.ParameterSpec.ConditionalParameterSpec.DiscreteValueCondition(
values=spec.parent_values
)
condition.parameter_spec = spec._to_parameter_spec(conditional_param_id)
conditions.append(condition)
parameter_spec = gca_study_compat.StudySpec.ParameterSpec(
parameter_id=parameter_id,
scale_type=_SCALE_TYPE_MAP.get(getattr(self, "scale", "unspecified")),
conditional_parameter_specs=conditions,
)

setattr(
Expand All @@ -105,6 +129,8 @@ def __init__(
min: float,
max: float,
scale: str,
conditional_parameter_spec: Optional[Dict[str, "_ParameterSpec"]] = None,
parent_values: Optional[Sequence[Union[int, float, str]]] = None,
):
"""
Value specification for a parameter in ``DOUBLE`` type.
Expand All @@ -120,9 +146,16 @@ def __init__(
Required. The type of scaling that should be applied to this parameter.
Accepts: 'linear', 'log', 'reverse_log'
conditional_parameter_spec (Dict[str, _ParameterSpec]):
Optional. The conditional parameters associated with the object. The dictionary key
is the ID of the conditional parameter and the dictionary value is one of
`IntegerParameterSpec`, `CategoricalParameterSpec`, or `DiscreteParameterSpec`
parent_values (Sequence[Union[int, float, str]]):
Optional. This argument is only needed when the object is a conditional parameter
and specifies the parent parameter's values for which the condition applies.
"""

super().__init__()
super().__init__(conditional_parameter_spec, parent_values)

self.min = min
self.max = max
Expand All @@ -142,6 +175,8 @@ def __init__(
min: int,
max: int,
scale: str,
conditional_parameter_spec: Optional[Dict[str, "_ParameterSpec"]] = None,
parent_values: Optional[Sequence[Union[int, float, str]]] = None,
):
"""
Value specification for a parameter in ``INTEGER`` type.
Expand All @@ -157,9 +192,18 @@ def __init__(
Required. The type of scaling that should be applied to this parameter.
Accepts: 'linear', 'log', 'reverse_log'
conditional_parameter_spec (Dict[str, _ParameterSpec]):
Optional. The conditional parameters associated with the object. The dictionary key
is the ID of the conditional parameter and the dictionary value is one of
`IntegerParameterSpec`, `CategoricalParameterSpec`, or `DiscreteParameterSpec`
parent_values (Sequence[int]):
Optional. This argument is only needed when the object is a conditional parameter
and specifies the parent parameter's values for which the condition applies.
"""

super().__init__()
super().__init__(
conditional_parameter_spec=conditional_parameter_spec,
parent_values=parent_values,
)

self.min = min
self.max = max
Expand All @@ -177,15 +221,26 @@ class CategoricalParameterSpec(_ParameterSpec):
def __init__(
self,
values: Sequence[str],
conditional_parameter_spec: Optional[Dict[str, "_ParameterSpec"]] = None,
parent_values: Optional[Sequence[Union[int, float, str]]] = None,
):
"""Value specification for a parameter in ``CATEGORICAL`` type.
Args:
values (Sequence[str]):
Required. The list of possible categories.
conditional_parameter_spec (Dict[str, _ParameterSpec]):
Optional. The conditional parameters associated with the object. The dictionary key
is the ID of the conditional parameter and the dictionary value is one of
`IntegerParameterSpec`, `CategoricalParameterSpec`, or `DiscreteParameterSpec`
parent_values (Sequence[str]):
Optional. This argument is only needed when the object is a conditional parameter
and specifies the parent parameter's values for which the condition applies.
"""

super().__init__()
super().__init__(
conditional_parameter_spec=conditional_parameter_spec,
parent_values=parent_values,
)

self.values = values

Expand All @@ -202,6 +257,8 @@ def __init__(
self,
values: Sequence[float],
scale: str,
conditional_parameter_spec: Optional[Dict[str, "_ParameterSpec"]] = None,
parent_values: Optional[Sequence[Union[int, float, str]]] = None,
):
"""Value specification for a parameter in ``DISCRETE`` type.
Expand All @@ -216,9 +273,18 @@ def __init__(
Required. The type of scaling that should be applied to this parameter.
Accepts: 'linear', 'log', 'reverse_log'
conditional_parameter_spec (Dict[str, _ParameterSpec]):
Optional. The conditional parameters associated with the object. The dictionary key
is the ID of the conditional parameter and the dictionary value is one of
`IntegerParameterSpec`, `CategoricalParameterSpec`, or `DiscreteParameterSpec`
parent_values (Sequence[float]):
Optional. This argument is only needed when the object is a conditional parameter
and specifies the parent parameter's values for which the condition applies.
"""

super().__init__()
super().__init__(
conditional_parameter_spec=conditional_parameter_spec,
parent_values=parent_values,
)

self.values = values
self.scale = scale
84 changes: 76 additions & 8 deletions tests/unit/aiplatform/test_hyperparameter_tuning_job.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,13 @@

_TEST_LABELS = {"my_hp_key": "my_hp_value"}

_TEST_CONDITIONAL_PARAMETER_DECAY = hpt.DoubleParameterSpec(
min=1e-07, max=1, scale="linear", parent_values=[32, 64]
)
_TEST_CONDITIONAL_PARAMETER_LR = hpt.DoubleParameterSpec(
min=1e-07, max=1, scale="linear", parent_values=[4, 8, 16]
)

_TEST_BASE_HYPERPARAMETER_TUNING_JOB_PROTO = gca_hyperparameter_tuning_job_compat.HyperparameterTuningJob(
display_name=_TEST_DISPLAY_NAME,
study_spec=gca_study_compat.StudySpec(
Expand Down Expand Up @@ -109,8 +116,34 @@
parameter_id="batch_size",
scale_type=gca_study_compat.StudySpec.ParameterSpec.ScaleType.UNIT_LINEAR_SCALE,
discrete_value_spec=gca_study_compat.StudySpec.ParameterSpec.DiscreteValueSpec(
values=[16, 32]
values=[4, 8, 16, 32, 64]
),
conditional_parameter_specs=[
gca_study_compat.StudySpec.ParameterSpec.ConditionalParameterSpec(
parent_discrete_values=gca_study_compat.StudySpec.ParameterSpec.ConditionalParameterSpec.DiscreteValueCondition(
values=[32, 64]
),
parameter_spec=gca_study_compat.StudySpec.ParameterSpec(
double_value_spec=gca_study_compat.StudySpec.ParameterSpec.DoubleValueSpec(
min_value=1e-07, max_value=1
),
scale_type=gca_study_compat.StudySpec.ParameterSpec.ScaleType.UNIT_LINEAR_SCALE,
parameter_id="decay",
),
),
gca_study_compat.StudySpec.ParameterSpec.ConditionalParameterSpec(
parent_discrete_values=gca_study_compat.StudySpec.ParameterSpec.ConditionalParameterSpec.DiscreteValueCondition(
values=[4, 8, 16]
),
parameter_spec=gca_study_compat.StudySpec.ParameterSpec(
double_value_spec=gca_study_compat.StudySpec.ParameterSpec.DoubleValueSpec(
min_value=1e-07, max_value=1
),
scale_type=gca_study_compat.StudySpec.ParameterSpec.ScaleType.UNIT_LINEAR_SCALE,
parameter_id="learning_rate",
),
),
],
),
],
algorithm=gca_study_compat.StudySpec.Algorithm.RANDOM_SEARCH,
Expand Down Expand Up @@ -388,7 +421,12 @@ def test_create_hyperparameter_tuning_job(
values=["relu", "sigmoid", "elu", "selu", "tanh"]
),
"batch_size": hpt.DiscreteParameterSpec(
values=[16, 32], scale="linear"
values=[4, 8, 16, 32, 64],
scale="linear",
conditional_parameter_spec={
"decay": _TEST_CONDITIONAL_PARAMETER_DECAY,
"learning_rate": _TEST_CONDITIONAL_PARAMETER_LR,
},
),
},
parallel_trial_count=_TEST_PARALLEL_TRIAL_COUNT,
Expand Down Expand Up @@ -454,7 +492,12 @@ def test_create_hyperparameter_tuning_job_with_timeout(
values=["relu", "sigmoid", "elu", "selu", "tanh"]
),
"batch_size": hpt.DiscreteParameterSpec(
values=[16, 32], scale="linear"
values=[4, 8, 16, 32, 64],
scale="linear",
conditional_parameter_spec={
"decay": _TEST_CONDITIONAL_PARAMETER_DECAY,
"learning_rate": _TEST_CONDITIONAL_PARAMETER_LR,
},
),
},
parallel_trial_count=_TEST_PARALLEL_TRIAL_COUNT,
Expand Down Expand Up @@ -515,7 +558,12 @@ def test_run_hyperparameter_tuning_job_with_fail_raises(
values=["relu", "sigmoid", "elu", "selu", "tanh"]
),
"batch_size": hpt.DiscreteParameterSpec(
values=[16, 32], scale="linear"
values=[4, 8, 16, 32, 64],
scale="linear",
conditional_parameter_spec={
"decay": _TEST_CONDITIONAL_PARAMETER_DECAY,
"learning_rate": _TEST_CONDITIONAL_PARAMETER_LR,
},
),
},
parallel_trial_count=_TEST_PARALLEL_TRIAL_COUNT,
Expand Down Expand Up @@ -574,7 +622,12 @@ def test_run_hyperparameter_tuning_job_with_fail_at_creation(self):
values=["relu", "sigmoid", "elu", "selu", "tanh"]
),
"batch_size": hpt.DiscreteParameterSpec(
values=[16, 32], scale="linear"
values=[4, 8, 16, 32, 64],
scale="linear",
conditional_parameter_spec={
"decay": _TEST_CONDITIONAL_PARAMETER_DECAY,
"learning_rate": _TEST_CONDITIONAL_PARAMETER_LR,
},
),
},
parallel_trial_count=_TEST_PARALLEL_TRIAL_COUNT,
Expand Down Expand Up @@ -639,7 +692,12 @@ def test_hyperparameter_tuning_job_get_state_raises_without_run(self):
values=["relu", "sigmoid", "elu", "selu", "tanh"]
),
"batch_size": hpt.DiscreteParameterSpec(
values=[16, 32, 64], scale="linear"
values=[4, 8, 16, 32, 64],
scale="linear",
conditional_parameter_spec={
"decay": _TEST_CONDITIONAL_PARAMETER_DECAY,
"learning_rate": _TEST_CONDITIONAL_PARAMETER_LR,
},
),
},
parallel_trial_count=_TEST_PARALLEL_TRIAL_COUNT,
Expand Down Expand Up @@ -697,7 +755,12 @@ def test_create_hyperparameter_tuning_job_with_tensorboard(
values=["relu", "sigmoid", "elu", "selu", "tanh"]
),
"batch_size": hpt.DiscreteParameterSpec(
values=[16, 32], scale="linear"
values=[4, 8, 16, 32, 64],
scale="linear",
conditional_parameter_spec={
"decay": _TEST_CONDITIONAL_PARAMETER_DECAY,
"learning_rate": _TEST_CONDITIONAL_PARAMETER_LR,
},
),
},
parallel_trial_count=_TEST_PARALLEL_TRIAL_COUNT,
Expand Down Expand Up @@ -769,7 +832,12 @@ def test_create_hyperparameter_tuning_job_with_enable_web_access(
values=["relu", "sigmoid", "elu", "selu", "tanh"]
),
"batch_size": hpt.DiscreteParameterSpec(
values=[16, 32], scale="linear"
values=[4, 8, 16, 32, 64],
scale="linear",
conditional_parameter_spec={
"decay": _TEST_CONDITIONAL_PARAMETER_DECAY,
"learning_rate": _TEST_CONDITIONAL_PARAMETER_LR,
},
),
},
parallel_trial_count=_TEST_PARALLEL_TRIAL_COUNT,
Expand Down

0 comments on commit 744cc38

Please sign in to comment.