Skip to content

Commit

Permalink
Merge pull request #1757 from PrincetonUniversity/devel
Browse files Browse the repository at this point in the history
Devel
  • Loading branch information
dillontsmith committed Sep 21, 2020
2 parents e053667 + 510f6cc commit 46a5489
Show file tree
Hide file tree
Showing 51 changed files with 2,635 additions and 673 deletions.
4 changes: 2 additions & 2 deletions README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -130,9 +130,9 @@ switch to your preferred python3 environment, then run

Dependencies that are automatically installed (except those noted as optional) include:

* numpy (version <1.16)
* numpy
* matplotlib
* toposort (version 1.4)
* toposort
* typecheck-decorator (version 1.2)
* pillow
* llvmlite
Expand Down
1 change: 0 additions & 1 deletion dev_requirements.txt
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
ipykernel
ipython
jupyter
nbconvert<6
psyneulink-sphinx-theme
pytest
pytest-benchmark
Expand Down
8 changes: 5 additions & 3 deletions docs/source/BasicsAndPrimer.rst
Original file line number Diff line number Diff line change
Expand Up @@ -723,7 +723,8 @@ a `ModulatorySignal <ModulatorySignal>` belonging to a `ModulatoryMechanism <Mod
of a `Mechanism <Mechanism>` or a `Projection <Projection>` is modulable, it is assigned a `ParameterPort` -- this is a
Component that belongs to the Mechanism or Projection and can receive a Projection from a ModulatorySignal, allowing
another component to modulate the value of the parameter. ParameterPorts are created for every modulable parameter of
a Mechanism or its `function <Mechanism_Base.function>`, and similarly for Projections. These determine the value
a Mechanism, its `function <Mechanism_Base.function>`, any of its
secondary functions, and similarly for Projections. These determine the value
of the parameter that is actually used when the Component is executed, which may be different than the base value
returned by accessing the parameter directly (as in the examples above); see `ModulatorySignal_Modulation` for a more
complete description of modulation. The current *modulated* value of a parameter can be accessed from the `value
Expand All @@ -738,8 +739,9 @@ the `value <ParameterPort.value>` of the ParameterPort for the parameter::
>>> task.mod_gain
[0.62]

This works for any modulable parameters of the Mechanism or its `function <Mechanism_Base.function>`. Note that,
here, neither the ``parameters`` nor the ``function`` atributes of the Mechanism need to be included in the reference.
This works for any modulable parameters of the Mechanism, its
`function <Mechanism_Base.function>`, or secondary functions. Note that,
here, neither the ``parameters`` nor the ``function`` attributes of the Mechanism need to be included in the reference.
Note also that, as explained above, the value returned is different from the base value of the function's gain
parameter::

Expand Down
4 changes: 2 additions & 2 deletions docs/source/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -153,9 +153,9 @@ switch to your preferred python3 environment, then run

Dependencies that are automatically installed (except those noted as optional) include:

* numpy (version < 1.16)
* numpy
* matplotlib
* toposort (version 1.4)
* toposort
* typecheck-decorator (version 1.2)
* pillow
* llvmlite
Expand Down
4 changes: 2 additions & 2 deletions docs/source/index_logo_with_text.rst
Original file line number Diff line number Diff line change
Expand Up @@ -153,9 +153,9 @@ switch to your preferred python3 environment, then run

Dependencies that are automatically installed (except those noted as optional) include:

* numpy (version < 1.16)
* numpy
* matplotlib
* toposort (version 1.4)
* toposort
* typecheck-decorator (version 1.2)
* pillow
* llvmlite
Expand Down
133 changes: 109 additions & 24 deletions psyneulink/core/components/component.py
Original file line number Diff line number Diff line change
Expand Up @@ -1132,7 +1132,15 @@ def __init__(self,
isinstance(param, SharedParameter)
and not isinstance(param.source, ParameterAlias)
):
self.initial_shared_parameters[param.attribute_name][param.shared_parameter_name] = param.default_value
try:
if parameter_values[param_name] is not None:
isp_val = parameter_values[param_name]
else:
isp_val = copy.deepcopy(param.default_value)
except KeyError:
isp_val = copy.deepcopy(param.default_value)

self.initial_shared_parameters[param.attribute_name][param.shared_parameter_name] = isp_val

# we must know the final variable shape before setting up parameter
# Functions or they will mismatch
Expand Down Expand Up @@ -1311,7 +1319,7 @@ def _get_compilation_params(self):
# Reference to other components
"objective_mechanism", "agent_rep", "projections",
# Shape mismatch
"costs", "auto", "hetero",
"auto", "hetero", "cost", "costs", "combined_costs",
# autodiff specific types
"pytorch_representation", "optimizer"}
# Mechanism's need few extra entires:
Expand Down Expand Up @@ -1345,12 +1353,12 @@ def llvm_param_ids(self):

def _is_param_modulated(self, p):
try:
if p.name in self.owner.parameter_ports:
if p in self.owner.parameter_ports:
return True
except AttributeError:
pass
try:
if p.name in self.parameter_ports:
if p in self.parameter_ports:
return True
except AttributeError:
pass
Expand Down Expand Up @@ -1676,7 +1684,8 @@ def _assign_default_name(self, **kwargs):
return

def _set_parameter_value(self, param, val, context=None):
getattr(self.parameters, param)._set(val, context)
param = getattr(self.parameters, param)
param._set(val, context)
if hasattr(self, "parameter_ports"):
if param in self.parameter_ports:
new_port_value = self.parameter_ports[param].execute(
Expand All @@ -1685,7 +1694,16 @@ def _set_parameter_value(self, param, val, context=None):
self.parameter_ports[param].parameters.value._set(new_port_value, context)
elif hasattr(self, "owner"):
if hasattr(self.owner, "parameter_ports"):
# skip Components, assume they are to be run to provide the
# value instead of given as a variable to a parameter port
if param in self.owner.parameter_ports:
try:
if any([isinstance(v, Component) for v in val]):
return
except TypeError:
if isinstance(val, Component):
return

new_port_value = self.owner.parameter_ports[param].execute(
context=Context(execution_phase=ContextFlags.EXECUTING, execution_id=context.execution_id)
)
Expand Down Expand Up @@ -1982,7 +2000,7 @@ def _is_user_specified(parameter):
else:
param_defaults[p.source.name] = param_defaults[p.name]

for p in filter(lambda x: not isinstance(x, ParameterAlias), self.parameters):
for p in filter(lambda x: not isinstance(x, (ParameterAlias, SharedParameter)), self.parameters):
p._user_specified = _is_user_specified(p)

# copy spec so it is not overwritten later
Expand Down Expand Up @@ -2816,6 +2834,11 @@ def _instantiate_function(self, function, function_params=None, context=None):
except KeyError:
pass

try:
kwargs_to_instantiate.update(self.initial_shared_parameters[FUNCTION])
except KeyError:
pass

# matrix is determined from ParameterPort based on string value in function_params
# update it here if needed
if MATRIX in kwargs_to_instantiate:
Expand All @@ -2840,9 +2863,20 @@ def _instantiate_function(self, function, function_params=None, context=None):

def _instantiate_attributes_after_function(self, context=None):
if hasattr(self, "_parameter_ports"):
shared_params = [p for p in self.parameters if isinstance(p, (ParameterAlias, SharedParameter))]
sources = [p.source for p in shared_params]

for param_port in self._parameter_ports:
setattr(self.__class__, "mod_" + param_port.name, make_property_mod(param_port.name))
setattr(self.__class__, "get_mod_" + param_port.name, make_stateful_getter_mod(param_port.name))
property_names = {param_port.name}
try:
alias_index = sources.index(param_port.source)
property_names.add(shared_params[alias_index].name)
except ValueError:
pass

for property_name in property_names:
setattr(self.__class__, "mod_" + property_name, make_property_mod(property_name, param_port.name))
setattr(self.__class__, "get_mod_" + property_name, make_stateful_getter_mod(property_name, param_port.name))

def _instantiate_value(self, context=None):
# - call self.execute to get value, since the value of a Component is defined as what is returned by its
Expand Down Expand Up @@ -3009,13 +3043,59 @@ def is_finished(self, context=None):
return self.parameters.is_finished_flag._get(context)

def _parse_param_port_sources(self):
try:
if hasattr(self, '_parameter_ports'):
for param_port in self._parameter_ports:
if param_port.source == FUNCTION:
param_port.source = self.function
except AttributeError:
try:
orig_source = param_port.source
param_port.source = param_port.source(self)
del self.parameter_ports.parameter_mapping[orig_source]
self.parameter_ports.parameter_mapping[param_port.source] = param_port
except TypeError:
pass

def _get_current_parameter_value(self, parameter, context=None):
from psyneulink.core.components.ports.parameterport import ParameterPortError

if parameter == "variable" or parameter == self.parameters.variable:
raise ComponentError(
f"The method '_get_current_parameter_value' is intended for retrieving the current "
f"value of a modulable parameter; 'variable' is not a modulable parameter. If looking "
f"for {self.name}'s default variable, try '{self.name}.defaults.variable'."
)

try:
parameter = getattr(self.parameters, parameter)
# just fail now if string and no corresponding parameter (AttributeError)
except TypeError:
pass

parameter_port_list = None
try:
# parameter is SharedParameter and ultimately points to
# something with a corresponding ParameterPort
parameter_port_list = parameter.final_source._owner._owner.parameter_ports
except AttributeError:
# prefer parameter ports from self over owner
try:
parameter_port_list = self._parameter_ports
except AttributeError:
try:
parameter_port_list = self.owner._parameter_ports
except AttributeError:
pass

if parameter_port_list is not None:
try:
return parameter_port_list[parameter].parameters.value._get(context)
# *parameter* string or Parameter didn't correspond to a parameter port
except TypeError:
pass
except ParameterPortError as e:
if 'Multiple ParameterPorts' in str(e):
raise

return parameter._get(context)

def _increment_execution_count(self, count=1):
self.parameters.execution_count.set(self.execution_count + count, override=True)
return self.execution_count
Expand Down Expand Up @@ -3260,7 +3340,8 @@ def _dict_summary(self):
from psyneulink.core.compositions.composition import Composition
from psyneulink.core.components.ports.port import Port
from psyneulink.core.components.ports.outputport import OutputPort
from psyneulink.core.components.projections.pathway.mappingprojection import MappingProjection
from psyneulink.core.components.ports.parameterport import ParameterPortError
from psyneulink.core.components.functions.transferfunctions import LinearMatrix

def parse_parameter_value(value):
if isinstance(value, (list, tuple)):
Expand Down Expand Up @@ -3336,14 +3417,14 @@ def parse_parameter_value(value):
# class default
val = p.default_value
else:
# special handling because MappingProjection matrix just
# refers to its function's matrix but its default values are
# PNL-specific
# special handling because LinearMatrix default values
# can be PNL-specific keywords. In future, generalize
# this workaround
if (
isinstance(self, MappingProjection)
isinstance(self, LinearMatrix)
and p.name == 'matrix'
):
val = self.function.defaults.matrix
val = self.parameters.matrix.values[None]
elif p.spec is not None:
val = p.spec
else:
Expand All @@ -3354,7 +3435,7 @@ def parse_parameter_value(value):
try:
matching_parameter_port = self.owner.parameter_ports[p.name]

if matching_parameter_port.source is self:
if matching_parameter_port.source._owner._owner is self:
val = {
MODEL_SPEC_ID_PARAMETER_SOURCE: '{0}.{1}.{2}'.format(
self.owner.name,
Expand All @@ -3365,7 +3446,7 @@ def parse_parameter_value(value):
MODEL_SPEC_ID_TYPE: type(val)
}
# ContentAddressableList uses TypeError when key not found
except (AttributeError, TypeError):
except (AttributeError, TypeError, ParameterPortError):
pass

# split parameters designated as PsyNeuLink-specific and
Expand Down Expand Up @@ -3533,11 +3614,13 @@ def _model_spec_parameter_blacklist(self):
COMPONENT_BASE_CLASS = Component


def make_property_mod(param_name):
def make_property_mod(param_name, parameter_port_name=None):
if parameter_port_name is None:
parameter_port_name = param_name

def getter(self):
try:
return self._parameter_ports[param_name].value
return self._parameter_ports[parameter_port_name].value
except TypeError:
raise ComponentError("{} does not have a '{}' ParameterPort."
.format(self.name, param_name))
Expand All @@ -3551,11 +3634,13 @@ def setter(self, value):
return prop


def make_stateful_getter_mod(param_name):
def make_stateful_getter_mod(param_name, parameter_port_name=None):
if parameter_port_name is None:
parameter_port_name = param_name

def getter(self, context=None):
try:
return self._parameter_ports[param_name].parameters.value.get(context)
return self._parameter_ports[parameter_port_name].parameters.value.get(context)
except TypeError:
raise ComponentError("{} does not have a '{}' ParameterPort."
.format(self.name, param_name))
Expand Down
Loading

0 comments on commit 46a5489

Please sign in to comment.