Skip to content

Commit

Permalink
Merge pull request #1762 from PrincetonUniversity/devel
Browse files Browse the repository at this point in the history
Devel
  • Loading branch information
dillontsmith authored Sep 29, 2020
2 parents 46a5489 + 98e5c2c commit 40726d5
Show file tree
Hide file tree
Showing 34 changed files with 434 additions and 383 deletions.
9 changes: 5 additions & 4 deletions psyneulink/core/components/component.py
Original file line number Diff line number Diff line change
Expand Up @@ -1652,6 +1652,7 @@ def _deferred_init(self, context=None):
# Complete initialization
# MODIFIED 10/27/18 OLD:
super(self.__class__,self).__init__(**self._init_args)

# MODIFIED 10/27/18 NEW: FOLLOWING IS NEEDED TO HANDLE FUNCTION DEFERRED INIT (JDC)
# try:
# super(self.__class__,self).__init__(**self._init_args)
Expand Down Expand Up @@ -1786,7 +1787,7 @@ def generate_error(param_name):
for param_name in runtime_params:
if not isinstance(param_name, str):
generate_error(param_name)
elif hasattr(self, param_name):
elif param_name in self.parameters:
if param_name in {FUNCTION, INPUT_PORTS, OUTPUT_PORTS}:
generate_error(param_name)
if context.execution_id not in self._runtime_params_reset:
Expand All @@ -1797,7 +1798,7 @@ def generate_error(param_name):
# Any remaining params should either belong to the Component's function
# or, if the Component is a Function, to it or its owner
elif ( # If Component is not a function, and its function doesn't have the parameter or
(not is_function_type(self) and not hasattr(self.function, param_name))
(not is_function_type(self) and param_name not in self.function.parameters)
# the Component is a standalone function:
or (is_function_type(self) and not self.owner)):
generate_error(param_name)
Expand Down Expand Up @@ -2856,8 +2857,8 @@ def _instantiate_function(self, function, function_params=None, context=None):
# KAM added 6/14/18 for functions that do not pass their has_initializers status up to their owner via property
# FIX: need comprehensive solution for has_initializers; need to determine whether ports affect mechanism's
# has_initializers status
if self.function.has_initializers:
self.has_initializers = True
if self.function.parameters.has_initializers._get(context):
self.parameters.has_initializers._set(True, context)

self._parse_param_port_sources()

Expand Down
4 changes: 2 additions & 2 deletions psyneulink/core/components/functions/objectivefunctions.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ class Parameters(Function_Base.Parameters):
:default value: False
:type: ``bool``
"""
normalize = False
normalize = Parameter(False, stateful=False)
metric = Parameter(None, stateful=False)


Expand Down Expand Up @@ -205,7 +205,7 @@ class Parameters(ObjectiveFunction.Parameters):
metric = Parameter(ENERGY, stateful=False)
metric_fct = Parameter(None, stateful=False, loggable=False)
transfer_fct = Parameter(None, stateful=False, loggable=False)
normalize = False
normalize = Parameter(False, stateful=False)

@tc.typecheck
def __init__(self,
Expand Down
13 changes: 9 additions & 4 deletions psyneulink/core/components/functions/optimizationfunctions.py
Original file line number Diff line number Diff line change
Expand Up @@ -405,11 +405,11 @@ def _validate_params(self, request_set, target_set=None, context=None):

if SEARCH_SPACE in request_set and request_set[SEARCH_SPACE] is not None:
search_space = request_set[SEARCH_SPACE]
if not all(isinstance(s, (SampleIterator, type(None))) for s in search_space):
if not all(isinstance(s, (SampleIterator, type(None), list, tuple, np.ndarray)) for s in search_space):
raise OptimizationFunctionError("All entries in list specified for {} arg of {} must be a {}".
format(repr(SEARCH_SPACE),
self.__class__.__name__,
SampleIterator.__name__))
"SampleIterator, list, tuple, or ndarray"))

if SEARCH_TERMINATION_FUNCTION in request_set and request_set[SEARCH_TERMINATION_FUNCTION] is not None:
if not is_function_type(request_set[SEARCH_TERMINATION_FUNCTION]):
Expand Down Expand Up @@ -495,7 +495,11 @@ def _function(self,
self._unspecified_args = []

current_sample = self._check_args(variable=variable, context=context, params=params)
current_value = self.owner.objective_mechanism.parameters.value._get(context) if self.owner else 0.

try:
current_value = self.owner.objective_mechanism.parameters.value._get(context)
except AttributeError:
current_value = 0

samples = []
values = []
Expand Down Expand Up @@ -766,6 +770,7 @@ class GradientOptimization(OptimizationFunction):
"""

componentName = GRADIENT_OPTIMIZATION_FUNCTION
bounds = None

class Parameters(OptimizationFunction.Parameters):
"""
Expand Down Expand Up @@ -924,7 +929,7 @@ def reset(self, *args, context=None):
super().reset(*args)

# Differentiate objective_function using autograd.grad()
if OBJECTIVE_FUNCTION in args[0]:
if OBJECTIVE_FUNCTION in args[0] and not self.gradient_function:
try:
from autograd import grad
self.gradient_function = grad(self.objective_function)
Expand Down
2 changes: 0 additions & 2 deletions psyneulink/core/components/functions/selectionfunctions.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,8 +162,6 @@ class OneHot(SelectionFunction):

componentName = ONE_HOT_FUNCTION

bounds = None

classPreferences = {
PREFERENCE_SET_NAME: 'OneHotClassPreferences',
REPORT_OUTPUT_PREF: PreferenceEntry(False, PreferenceLevel.INSTANCE),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -245,8 +245,6 @@ def __init__(self,
**kwargs
)

self.has_initializers = True

# FIX CONSIDER MOVING THIS TO THE LEVEL OF Function_Base OR EVEN Component
def _validate_params(self, request_set, target_set=None, context=None):
"""Check inner dimension (length) of all parameters used for the function
Expand Down Expand Up @@ -574,8 +572,6 @@ def __init__(self,
prefs=prefs,
)

self.has_initializers = True

def _accumulator_check_args(self, variable=None, context=None, params=None, target_set=None):
"""validate params and assign any runtime params.
Expand Down Expand Up @@ -611,7 +607,7 @@ def _accumulator_check_args(self, variable=None, context=None, params=None, targ
runtime_params = params
if runtime_params:
for param_name in runtime_params:
if hasattr(self, param_name):
if param_name in self.parameters:
if param_name in {FUNCTION, INPUT_PORTS, OUTPUT_PORTS}:
continue
if context.execution_id not in self._runtime_params_reset:
Expand Down Expand Up @@ -825,8 +821,6 @@ def __init__(self,
prefs=prefs,
)

self.has_initializers = True

def _function(self,
variable=None,
context=None,
Expand Down Expand Up @@ -1060,8 +1054,6 @@ def __init__(self,
prefs=prefs,
)

self.has_initializers = True

def _validate_params(self, request_set, target_set=None, context=None):
super()._validate_params(
request_set=request_set,
Expand Down Expand Up @@ -1594,8 +1586,6 @@ def __init__(self,
prefs=prefs,
)

self.has_initializers = True

def _validate_params(self, request_set, target_set=None, context=None):

# Handle list or array for rate specification
Expand Down Expand Up @@ -2047,8 +2037,6 @@ def __init__(self,
prefs=prefs,
)

self.has_initializers = True

def _validate_params(self, request_set, target_set=None, context=None):

super()._validate_params(request_set=request_set, target_set=target_set,context=context)
Expand Down Expand Up @@ -2440,8 +2428,6 @@ def __init__(self,
prefs=prefs,
)

self.has_initializers = True

def _validate_noise(self, noise):
if noise is not None and not isinstance(noise, float) and not(isinstance(noise, np.ndarray) and np.issubdtype(noise.dtype, np.floating)):
raise FunctionError(
Expand Down Expand Up @@ -2858,8 +2844,6 @@ def __init__(self,
prefs=prefs,
)

self.has_initializers = True

def _validate_noise(self, noise):
if noise is not None and not isinstance(noise, float):
raise FunctionError(
Expand Down Expand Up @@ -3140,8 +3124,6 @@ def __init__(self,
prefs=prefs
)

self.has_initializers = True

def _function(self,
variable=None,
context=None,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -232,8 +232,6 @@ def __init__(self,
prefs=prefs,
)

self.has_initializers = True

def _initialize_previous_value(self, initializer, context=None):
initializer = initializer or []
previous_value = deque(initializer, maxlen=self.parameters.history.get(context))
Expand Down Expand Up @@ -731,8 +729,6 @@ def __init__(self,
self.parameters.key_size._set(len(self.previous_value[KEYS][0]), Context())
self.parameters.val_size._set(len(self.previous_value[VALS][0]), Context())

self.has_initializers = True

def _get_state_ids(self):
return super()._get_state_ids() + ["ring_memory"]

Expand Down Expand Up @@ -1000,8 +996,6 @@ def _instantiate_attributes_before_function(self, function=None, context=None):
context
)

self.has_initializers = True

if isinstance(self.distance_function, type):
self.distance_function = self.distance_function(context=context)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
import numpy as np

from psyneulink.core import llvm as pnlvm
from psyneulink.core.components.component import DefaultsFlexibility
from psyneulink.core.components.component import DefaultsFlexibility, _has_initializers_setter
from psyneulink.core.components.functions.function import Function_Base, FunctionError
from psyneulink.core.components.functions.distributionfunctions import DistributionFunction
from psyneulink.core.globals.keywords import INITIALIZER, STATEFUL_FUNCTION_TYPE, STATEFUL_FUNCTION, NOISE, RATE
Expand Down Expand Up @@ -194,7 +194,7 @@ class Parameters(Function_Base.Parameters):
rate = Parameter(1.0, modulable=True)
previous_value = Parameter(np.array([0]), pnl_internal=True)
initializer = Parameter(np.array([0]), pnl_internal=True)

has_initializers = Parameter(True, setter=_has_initializers_setter, pnl_internal=True)

@handle_external_context()
@tc.typecheck
Expand Down Expand Up @@ -238,8 +238,6 @@ def __init__(self,
**kwargs
)

self.has_initializers = True

def _validate(self, context=None):
self._validate_rate(self.defaults.rate)
self._validate_initializers(self.defaults.variable, context=context)
Expand Down Expand Up @@ -444,15 +442,20 @@ def _instantiate_attributes_before_function(self, function=None, context=None):
# use np.broadcast_to to guarantee that all initializer type attributes take on the same shape as variable
if not np.isscalar(self.defaults.variable):
for attr in self.initializers:
setattr(self, attr, np.broadcast_to(getattr(self, attr), self.defaults.variable.shape).copy())
param = getattr(self.parameters, attr)
param._set(
np.broadcast_to(
param._get(context),
self.defaults.variable.shape
).copy(),
context
)

# create all stateful attributes and initialize their values to the current values of their
# corresponding initializer attributes
for i, attr_name in enumerate(self.stateful_attributes):
initializer_value = getattr(self, self.initializers[i]).copy()
setattr(self, attr_name, initializer_value)

self.has_initializers = True
initializer_value = getattr(self.parameters, self.initializers[i])._get(context).copy()
getattr(self.parameters, attr_name)._set(initializer_value, context)

super()._instantiate_attributes_before_function(function=function, context=context)

Expand Down Expand Up @@ -555,7 +558,7 @@ def reset(self, *args, context=None):
setattr(self, attr, reinitialization_values[i])
getattr(self.parameters, attr).set(reinitialization_values[i],
context, override=True)
value.append(getattr(self, self.stateful_attributes[i]))
value.append(getattr(self.parameters, self.stateful_attributes[i])._get(context))

self.parameters.value.set(value, context, override=True)
return value
Expand Down
Loading

0 comments on commit 40726d5

Please sign in to comment.