Skip to content

Commit

Permalink
Merge pull request #1748 from PrincetonUniversity/devel
Browse files Browse the repository at this point in the history
Devel
  • Loading branch information
dillontsmith authored Sep 14, 2020
2 parents 86a09ca + fc6dc94 commit e053667
Show file tree
Hide file tree
Showing 19 changed files with 628 additions and 481 deletions.
18 changes: 18 additions & 0 deletions .github/dependabot.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
# To get started with Dependabot version updates, you'll need to specify which
# package ecosystems to update and where the package manifests are located.
# Please see the documentation for all configuration options:
# https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates

version: 2
updates:

- package-ecosystem: "github-actions"
directory: "/" # use top dir
schedule:
interval: "daily"
target-branch: "devel"
commit-message:
prefix: "github-actions"
include: "scope"
labels:
- "CI"
14 changes: 7 additions & 7 deletions .github/workflows/pnl-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,31 +23,31 @@ jobs:
fetch-depth: 10

- name: Linux wheels cache
uses: actions/cache@v1
uses: actions/cache@v2.1.1
if: startsWith(runner.os, 'Linux')
with:
path: ~/.cache/pip/wheels
key: ${{ runner.os }}-python-${{ matrix.python-version }}-${{ matrix.python-architecture }}-pip-wheels-${{ github.sha }}
restore-keys: ${{ runner.os }}-python-${{ matrix.python-version }}-${{ matrix.python-architecture }}-pip-wheels

- name: MacOS wheels cache
uses: actions/cache@v1
uses: actions/cache@v2.1.1
if: startsWith(runner.os, 'macOS')
with:
path: ~/Library/Caches/pip/wheels
key: ${{ runner.os }}-python-${{ matrix.python-version }}-${{ matrix.python-architecture }}-pip-wheels-${{ github.sha }}
restore-keys: ${{ runner.os }}-python-${{ matrix.python-version }}-${{ matrix.python-architecture }}-pip-wheels

- name: Windows wheels cache
uses: actions/cache@v1
uses: actions/cache@v2.1.1
if: startsWith(runner.os, 'Windows')
with:
path: ~\AppData\Local\pip\Cache\wheels
key: ${{ runner.os }}-python-${{ matrix.python-version }}-${{ matrix.python-architecture }}-pip-wheels-${{ github.sha }}
restore-keys: ${{ runner.os }}-python-${{ matrix.python-version }}-${{ matrix.python-architecture }}-pip-wheels

- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v1
uses: actions/setup-python@v2.1.2
with:
python-version: ${{ matrix.python-version }}
architecture: ${{ matrix.python-architecture }}
Expand All @@ -66,7 +66,7 @@ jobs:

- name: Windows pytorch
run: |
python -m pip install --upgrade pip
python -m pip install --upgrade pip wheel
pip install torch -f https://download.pytorch.org/whl/cpu/torch_stable.html
if: startsWith(runner.os, 'Windows') && matrix.python-architecture != 'x86'
- name: Shared dependencies
Expand Down Expand Up @@ -102,7 +102,7 @@ jobs:
run: pytest --junit-xml=tests_out.xml --verbosity=0 -n auto --maxprocesses=2

- name: Upload test results
uses: actions/upload-artifact@v1
uses: actions/upload-artifact@v2.1.4
with:
name: test-results-${{ matrix.os }}-${{ matrix.python-version }}-${{ matrix.python-architecture }}
path: tests_out.xml
Expand All @@ -114,7 +114,7 @@ jobs:
python setup.py sdist bdist_wheel
if: contains(github.ref, 'tags')
- name: Upload dist packages
uses: actions/upload-artifact@v1
uses: actions/upload-artifact@v2.1.4
with:
name: dist-${{ matrix.os }}-${{ matrix.python-version }}-${{ matrix.python-architecture }}
path: dist/
Expand Down
1 change: 1 addition & 0 deletions dev_requirements.txt
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
ipykernel
ipython
jupyter
nbconvert<6
psyneulink-sphinx-theme
pytest
pytest-benchmark
Expand Down
111 changes: 77 additions & 34 deletions psyneulink/core/components/component.py
Original file line number Diff line number Diff line change
Expand Up @@ -220,9 +220,9 @@
.. _Component_Function_Params:
* **initial_function_parameters** - the `initial_function_parameters <Component.function>` attribute contains a
dictionary of the parameters for the Component's `function <Component.function>` and their values, to be used to
instantiate the function. Each entry is the name of a parameter, and its value is the value of that parameter.
* **initial_shared_parameters** - the `initial_shared_parameters <Component.function>` attribute contains a
dictionary of any parameters for the Component's functions or attributes, to be used to
instantiate the corresponding object. Each entry is the name of a parameter, and its value is the value of that parameter.
The parameters for a function can be specified when the Component is created in one of the following ways:
* in an argument of the **Component's constructor** -- if all of the allowable functions for a Component's
Expand Down Expand Up @@ -478,6 +478,7 @@
"""
import base64
import collections
import copy
import dill
import functools
Expand Down Expand Up @@ -509,7 +510,7 @@
from psyneulink.core.scheduling.time import Time, TimeScale
from psyneulink.core.globals.sampleiterator import SampleIterator
from psyneulink.core.globals.parameters import \
Defaults, Parameter, ParameterAlias, ParameterError, ParametersBase, copy_parameter_value
Defaults, SharedParameter, Parameter, ParameterAlias, ParameterError, ParametersBase, copy_parameter_value
from psyneulink.core.globals.preferences.basepreferenceset import BasePreferenceSet, VERBOSE_PREF
from psyneulink.core.globals.preferences.preferenceset import \
PreferenceEntry, PreferenceLevel, PreferenceSet, _assign_prefs
Expand Down Expand Up @@ -1093,10 +1094,6 @@ def __init__(self,
**parameter_values
)

self.initial_function_parameters = {
k: v for k, v in parameter_values.items() if k in self.parameters.names() and getattr(self.parameters, k).function_parameter
}

var = call_with_pruned_args(
self._handle_default_variable,
default_variable=default_variable,
Expand All @@ -1110,9 +1107,37 @@ def __init__(self,
self.defaults.variable = default_variable
self.parameters.variable._user_specified = True

# ASSIGN PREFS
_assign_prefs(self, prefs, BasePreferenceSet)

# VALIDATE VARIABLE AND PARAMS, AND ASSIGN DEFAULTS

# TODO: the below overrides setting default values to None context,
# at least in stateless parameters. Possibly more. Below should be
# removed eventually

# Validate the set passed in
self._instantiate_defaults(variable=default_variable,
request_set=parameter_values, # requested set
assign_missing=True, # assign missing params from classPreferences to instanceDefaults
target_set=self.defaults.values(), # destination set to which params are being assigned
default_set=self.class_defaults.values(), # source set from which missing params are assigned
context=context,
)

self.initial_shared_parameters = collections.defaultdict(dict)

for param_name, param in self.parameters.values(show_all=True).items():
if (
isinstance(param, SharedParameter)
and not isinstance(param.source, ParameterAlias)
):
self.initial_shared_parameters[param.attribute_name][param.shared_parameter_name] = param.default_value

# we must know the final variable shape before setting up parameter
# Functions or they will mismatch
self._instantiate_parameter_classes(context)
self._override_unspecified_shared_parameters(context)
self._validate_subfunctions()

if reset_stateful_function_when is not None:
Expand All @@ -1134,9 +1159,6 @@ def __init__(self,
raise ComponentError("{0} is a category class and so must implement a registry".
format(self.__class__.__bases__[0].__name__))

# ASSIGN PREFS
_assign_prefs(self, prefs, BasePreferenceSet)

# ASSIGN LOG
from psyneulink.core.globals.log import Log
self.log = Log(owner=self)
Expand All @@ -1157,21 +1179,6 @@ def __init__(self,
# assume function is a method on self
pass

# VALIDATE VARIABLE AND PARAMS, AND ASSIGN DEFAULTS

# TODO: the below overrides setting default values to None context,
# at least in stateless parameters. Possibly more. Below should be
# removed eventually

# Validate the set passed in
self._instantiate_defaults(variable=default_variable,
request_set=parameter_values, # requested set
assign_missing=True, # assign missing params from classPreferences to instanceDefaults
target_set=self.defaults.values(), # destination set to which params are being assigned
default_set=self.class_defaults.values(), # source set from which missing params are assigned
context=context,
)

self._runtime_params_reset = {}

# KDM 11/12/19: this exists to deal with currently unknown attribute
Expand Down Expand Up @@ -1990,16 +1997,14 @@ def _is_user_specified(parameter):
if isinstance(val, Function):
if val.owner is not None:
val = copy.deepcopy(val)

val.owner = self
else:
val = copy_parameter_value(
p.default_value,
shared_types=shared_types
)

if isinstance(val, Function):
val.owner = self
if isinstance(val, Function):
val.owner = self

p.set(val, context=context, skip_history=True, override=True)

Expand All @@ -2020,12 +2025,20 @@ def _instantiate_parameter_classes(self, context=None):
if (
p.name != FUNCTION
and not p.reference
and not isinstance(p, SharedParameter)
):
if (
inspect.isclass(val)
and issubclass(val, Function)
):
val = val()
# instantiate class val with all relevant shared parameters
# some shared parameters may not be arguments (e.g.
# transfer_fct additive_param when function is Identity)
val = call_with_pruned_args(
val,
**self.initial_shared_parameters[p.name]
)

val.owner = self
p._set(val, context)

Expand All @@ -2040,6 +2053,7 @@ def _update_parameter_class_variables(self, context=None):
p.name != FUNCTION
and not p.reference
and isinstance(val, Function)
and not isinstance(p, SharedParameter)
):
try:
parse_variable_method = getattr(
Expand Down Expand Up @@ -2080,6 +2094,35 @@ def _update_parameter_class_variables(self, context=None):
context
)

def _override_unspecified_shared_parameters(self, context):
for param_name, param in self.parameters.values(show_all=True).items():
if (
isinstance(param, SharedParameter)
and not isinstance(param.source, ParameterAlias)
):
try:
obj = getattr(self.parameters, param.attribute_name)
shared_objs = [obj.default_value, obj._get(context)]
except AttributeError:
obj = getattr(self, param.attribute_name)
shared_objs = [obj]

for c in shared_objs:
if isinstance(c, Component):
try:
shared_obj_param = getattr(c.parameters, param.shared_parameter_name)
except AttributeError:
continue

if (
not shared_obj_param._user_specified
and param.primary
and param.default_value is not None
):
shared_obj_param.default_value = copy.deepcopy(param.default_value)
shared_obj_param._set(copy.deepcopy(param.default_value), context)
shared_obj_param._user_specified = param._user_specified

@handle_external_context()
def reset_params(self, mode=ResetMode.INSTANCE_TO_CLASS, context=None):
"""Reset current and/or instance defaults
Expand Down Expand Up @@ -2127,7 +2170,7 @@ def _initialize_from_context(self, context, base_context=Context(execution_id=No
visited.add(comp)
comp._initialize_from_context(context, base_context, override, visited=visited)

non_alias_params = [p for p in self.stateful_parameters if not isinstance(p, ParameterAlias)]
non_alias_params = [p for p in self.stateful_parameters if not isinstance(p, (ParameterAlias, SharedParameter))]
for param in non_alias_params:
if param.setter is None:
param._initialize_from_context(context, base_context, override)
Expand Down Expand Up @@ -2506,7 +2549,7 @@ def _validate_subfunctions(self):
p.name != FUNCTION # has specialized validation
and isinstance(p.default_value, Function)
and not p.reference
and not p.function_parameter
and not isinstance(p, SharedParameter)
):
# TODO: assert it's not stateful?
function_variable = p.default_value.defaults.variable
Expand Down Expand Up @@ -3284,7 +3327,7 @@ def parse_parameter_value(value):
for p in self.parameters:
if (
p.name not in self._model_spec_parameter_blacklist
and not isinstance(p, ParameterAlias)
and not isinstance(p, (ParameterAlias, SharedParameter))
):
if self.initialization_status is ContextFlags.DEFERRED_INIT:
try:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1097,7 +1097,7 @@ class Parameters(DistributionFunction.Parameters):
starting_point = Parameter(0.0, modulable=True, aliases=[ADDITIVE_PARAM])
threshold = Parameter(1.0, modulable=True)
noise = Parameter(0.5, modulable=True)
t0 = .200
t0 = Parameter(.200, modulable=True)
bias = Parameter(0.5, read_only=True, getter=_DriftDiffusionAnalytical_bias_getter)
# this is read only because conversion is disabled for this function
# this occurs in other places as well
Expand Down
23 changes: 14 additions & 9 deletions psyneulink/core/components/functions/learningfunctions.py
Original file line number Diff line number Diff line change
Expand Up @@ -880,6 +880,9 @@ def _function(self,
if learning_rate is not None:
learning_rate_dim = np.array(learning_rate).ndim

# KDM 9/3/20: if learning_rate comes from a parameter port, it
# will be 1d and will be multiplied twice (variable ->
# activities -> distances)
# If learning_rate is a 1d array, multiply it by variable
if learning_rate_dim == 1:
variable = variable * learning_rate
Expand Down Expand Up @@ -1020,7 +1023,7 @@ class Parameters(LearningFunction.Parameters):
:type: ``float``
"""
variable = Parameter(np.array([0, 0]), read_only=True, pnl_internal=True, constructor_argument='default_variable')
learning_rate = 0.05
learning_rate = Parameter(0.05, modulable=True)
default_learning_rate = 0.05

def __init__(self,
Expand Down Expand Up @@ -1115,17 +1118,19 @@ def _function(self,
variable = np.squeeze(variable)
# MODIFIED 9/21/17 END

# Generate the column array from the variable
# col = variable.reshape(len(variable),1)
col = np.atleast_2d(variable).transpose()

# If learning_rate is a 1d array, multiply it by variable
# KDM 11/21/19: if learning_rate comes from a parameter_port, it will
# be 1 dimensional even if it "should" be a float. This causes test
# failures
# KDM 8/17/20: fix by determining col first. learning_rate otherwise
# would be multiplied twice
if learning_rate_dim == 1:
variable = variable * learning_rate

# Generate the column array from the variable
# col = variable.reshape(len(variable),1)
col = np.atleast_2d(variable).transpose()

# Calculate weight chhange matrix
weight_change_matrix = variable * col
# Zero diagonals (i.e., don't allow correlation of a unit with itself to be included)
Expand Down Expand Up @@ -1343,16 +1348,16 @@ def _function(self,
variable = np.squeeze(variable)
# MODIFIED 9/21/17 END

# If learning_rate is a 1d array, multiply it by variable
if learning_rate_dim == 1:
variable = variable * learning_rate

# IMPLEMENTATION NOTE: THE FOLLOWING NEEDS TO BE REPLACED BY THE CONTRASTIVE HEBBIAN LEARNING RULE:

# Generate the column array from the variable
# col = variable.reshape(len(variable),1)
col = convert_to_np_array(variable, 2).transpose()

# If learning_rate is a 1d array, multiply it by variable
if learning_rate_dim == 1:
variable = variable * learning_rate

# Calculate weight chhange matrix
weight_change_matrix = variable * col
# Zero diagonals (i.e., don't allow correlation of a unit with itself to be included)
Expand Down
Loading

0 comments on commit e053667

Please sign in to comment.