diff --git a/.github/actions/install-pnl/action.yml b/.github/actions/install-pnl/action.yml
index 8571a3293b0..cd4dca7dbe9 100644
--- a/.github/actions/install-pnl/action.yml
+++ b/.github/actions/install-pnl/action.yml
@@ -48,6 +48,17 @@ runs:
[[ ${{ runner.os }} = Windows* ]] && pip install "pywinpty<1" "terminado<0.10"
fi
+ - name: Install updated package
+ if: ${{ startsWith(github.head_ref, 'dependabot/pip') && matrix.pnl-version != 'base' }}
+ shell: bash
+ id: new_package
+ run: |
+ python -m pip install --upgrade pip wheel
+ export NEW_PACKAGE=`echo '${{ github.head_ref }}' | cut -f 4 -d/ | sed 's/-gt.*//' | sed 's/-lt.*//'`
+ echo "::set-output name=new_package::$NEW_PACKAGE"
+ pip install "`echo $NEW_PACKAGE | sed 's/[-_]/./g' | xargs grep *requirements.txt -h -e | head -n1`"
+ pip show "$NEW_PACKAGE" | grep 'Version' | tee new_version.deps
+
- name: Python dependencies
shell: bash
run: |
@@ -66,3 +77,11 @@ runs:
pip cache remove -v $P || true
fi
done
+
+ - name: Check updated package
+ if: ${{ startsWith(github.head_ref, 'dependabot/pip') && matrix.pnl-version != 'base' }}
+ shell: bash
+ run: |
+ pip show ${{ steps.new_package.outputs.new_package }} | grep 'Version' | tee installed_version.deps
+ cmp -s new_version.deps installed_version.deps || echo "::error::Package version restricted by dependencies: ${{ steps.new_package.outputs.new_package }}"
+ diff new_version.deps installed_version.deps
diff --git a/.github/workflows/compare-comment.yml b/.github/workflows/compare-comment.yml
index 61bf6896a5d..15f5e85cf6d 100644
--- a/.github/workflows/compare-comment.yml
+++ b/.github/workflows/compare-comment.yml
@@ -18,7 +18,7 @@ jobs:
steps:
- name: 'Download docs artifacts'
id: docs-artifacts
- uses: actions/github-script@v5
+ uses: actions/github-script@v6
with:
script: |
var artifacts = await github.rest.actions.listWorkflowRunArtifacts({
@@ -70,7 +70,7 @@ jobs:
(diff -r docs-base docs-head && echo 'No differences!' || true) | tee ./result.diff
- name: Post comment with docs diff
- uses: actions/github-script@v5
+ uses: actions/github-script@v6
with:
script: |
var fs = require('fs');
diff --git a/.github/workflows/pnl-ci-docs.yml b/.github/workflows/pnl-ci-docs.yml
index f2396ef7a04..a37c9e7a250 100644
--- a/.github/workflows/pnl-ci-docs.yml
+++ b/.github/workflows/pnl-ci-docs.yml
@@ -65,7 +65,7 @@ jobs:
branch: master
- name: Set up Python ${{ matrix.python-version }}
- uses: actions/setup-python@v3
+ uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
architecture: ${{ matrix.python-architecture }}
@@ -94,17 +94,21 @@ jobs:
- name: Add git tag
# The generated docs include PNL version,
# set it to a fixed value to prevent polluting the diff
+ # This needs to be done after installing PNL
+ # to not interfere with dependency resolution
+ id: add_zero_tag
if: github.event_name == 'pull_request'
- run: git tag --force 'v999.999.999.999'
+ run: git tag --force 'v0.0.0.0'
- name: Build Documentation
run: make -C docs/ html -e SPHINXOPTS="-aE -j auto"
- name: Remove git tag
# The generated docs include PNL version,
- # This was set to a fixed value to prevent polluting the diff
- if: github.event_name == 'pull_request' && always()
- run: git tag -d 'v999.999.999.999'
+ # A special tag was set to a fixed value
+ # to prevent polluting the diff
+ if: steps.add_zero_tag.outcome != 'skipped'
+ run: git tag -d 'v0.0.0.0'
- name: Upload Documentation
uses: actions/upload-artifact@v3
@@ -151,7 +155,7 @@ jobs:
ref: gh-pages
- name: Download branch docs
- uses: actions/download-artifact@v2
+ uses: actions/download-artifact@v3
with:
name: Documentation-head-${{ matrix.os }}-${{ matrix.python-version }}-x64
path: _built_docs/${{ github.ref }}
@@ -168,7 +172,7 @@ jobs:
if: github.ref == 'refs/heads/master' || github.ref == 'refs/heads/devel' || github.ref == 'refs/heads/docs'
- name: Download main docs
- uses: actions/download-artifact@v2
+ uses: actions/download-artifact@v3
with:
name: Documentation-head-${{ matrix.os }}-${{ matrix.python-version }}-x64
# This overwrites files in current directory
diff --git a/.github/workflows/pnl-ci.yml b/.github/workflows/pnl-ci.yml
index 25227973e1d..b97eaa55ecf 100644
--- a/.github/workflows/pnl-ci.yml
+++ b/.github/workflows/pnl-ci.yml
@@ -22,10 +22,6 @@ jobs:
extra-args: ['']
os: [ubuntu-latest, macos-latest, windows-latest]
include:
- # 3.7 is broken on macos-11, https://github.com/actions/virtual-environments/issues/4230
- - python-version: 3.7
- python-architecture: 'x64'
- os: macos-10.15
# add 32-bit build on windows
- python-version: 3.8
python-architecture: 'x86'
@@ -54,7 +50,7 @@ jobs:
run: git fetch --tags origin master
- name: Set up Python ${{ matrix.python-version }}
- uses: actions/setup-python@v3
+ uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
architecture: ${{ matrix.python-architecture }}
diff --git a/.github/workflows/test-release.yml b/.github/workflows/test-release.yml
index 32b6467d85e..0b7887ea5ee 100644
--- a/.github/workflows/test-release.yml
+++ b/.github/workflows/test-release.yml
@@ -21,7 +21,7 @@ jobs:
uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
- uses: actions/setup-python@v3
+ uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
@@ -78,13 +78,13 @@ jobs:
steps:
- name: Download dist files
- uses: actions/download-artifact@v2
+ uses: actions/download-artifact@v3
with:
name: Python-dist-files
path: dist/
- name: Set up Python ${{ matrix.python-version }}
- uses: actions/setup-python@v3
+ uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
@@ -141,7 +141,7 @@ jobs:
steps:
- name: Download dist files
- uses: actions/download-artifact@v2
+ uses: actions/download-artifact@v3
with:
name: Python-dist-files
path: dist/
@@ -175,13 +175,13 @@ jobs:
steps:
- name: Download dist files
- uses: actions/download-artifact@v2
+ uses: actions/download-artifact@v3
with:
name: Python-dist-files
path: dist/
- name: Upload dist files to release
- uses: actions/github-script@v5
+ uses: actions/github-script@v6
with:
script: |
const fs = require('fs')
diff --git a/.gitignore b/.gitignore
index 0b0f973f543..84bfbe22d2a 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,9 +1,9 @@
# Created by https://www.gitignore.io/api/osx,python,pycharm
-# Ignore JSON files created in tests/json/
+# Ignore JSON files created in tests/mdf/
# Maybe these should be generated in tmpdir instead
-tests/json/*.json
+tests/mdf/*.json
# Log files created by SLURM jobs in this directory
Scripts/Debug/predator_prey_opt/logs/
diff --git a/conftest.py b/conftest.py
index 8521ae6c014..94a4de81cc4 100644
--- a/conftest.py
+++ b/conftest.py
@@ -33,7 +33,22 @@
def pytest_addoption(parser):
parser.addoption('--{0}'.format(mark_stress_tests), action='store_true', default=False, help='Run {0} tests (long)'.format(mark_stress_tests))
+ parser.addoption('--fp-precision', action='store', default='fp64', choices=['fp32', 'fp64'],
+ help='Set default fp precision for the runtime compiler. Default: fp64')
+
+def pytest_sessionstart(session):
+ precision = session.config.getvalue("--fp-precision")
+ if precision == 'fp64':
+ pnlvm.LLVMBuilderContext.default_float_ty = pnlvm.ir.DoubleType()
+ elif precision == 'fp32':
+ pnlvm.LLVMBuilderContext.default_float_ty = pnlvm.ir.FloatType()
+ else:
+ assert False, "Unsupported precision parameter: {}".format(precision)
+
def pytest_runtest_setup(item):
+ # Check that all 'cuda' tests are also marked 'llvm'
+ assert 'llvm' in item.keywords or 'cuda' not in item.keywords
+
for m in marks_default_skip:
if m in item.keywords and not item.config.getvalue(m):
pytest.skip('{0} tests not requested'.format(m))
@@ -97,6 +112,16 @@ def comp_mode_no_llvm():
# dummy fixture to allow 'comp_mode' filtering
pass
+@pytest.helpers.register
+def llvm_current_fp_precision():
+ float_ty = pnlvm.LLVMBuilderContext.get_current().float_ty
+ if float_ty == pnlvm.ir.DoubleType():
+ return 'fp64'
+ elif float_ty == pnlvm.ir.FloatType():
+ return 'fp32'
+ else:
+ assert False, "Unknown floating point type: {}".format(float_ty)
+
@pytest.helpers.register
def get_comp_execution_modes():
return [pytest.param(pnlvm.ExecutionMode.Python),
diff --git a/cuda_requirements.txt b/cuda_requirements.txt
index 9a6d83d22c4..63e22850e71 100644
--- a/cuda_requirements.txt
+++ b/cuda_requirements.txt
@@ -1 +1 @@
-pycuda >2018, <2022
+pycuda >2018, <2023
diff --git a/dev_requirements.txt b/dev_requirements.txt
index 95b05810996..ad283dfc78d 100644
--- a/dev_requirements.txt
+++ b/dev_requirements.txt
@@ -1,5 +1,5 @@
jupyter<=1.0.0
-pytest<7.1.2
+pytest<7.1.3
pytest-benchmark<3.4.2
pytest-cov<3.0.1
pytest-helpers-namespace<2021.12.30
diff --git a/doc_requirements.txt b/doc_requirements.txt
index 043ea79e043..f4c95bd01e8 100644
--- a/doc_requirements.txt
+++ b/doc_requirements.txt
@@ -1,3 +1,3 @@
-psyneulink-sphinx-theme<1.2.3.1
+psyneulink-sphinx-theme<1.2.4.1
sphinx<4.2.1
sphinx_autodoc_typehints<1.16.0
diff --git a/docs/source/_static/Pathways_fig.svg b/docs/source/_static/Pathways_fig.svg
new file mode 100644
index 00000000000..a13eea7854f
--- /dev/null
+++ b/docs/source/_static/Pathways_fig.svg
@@ -0,0 +1,2489 @@
+
+
+
diff --git a/psyneulink/core/components/component.py b/psyneulink/core/components/component.py
index bbe4c760afe..e62540ece0f 100644
--- a/psyneulink/core/components/component.py
+++ b/psyneulink/core/components/component.py
@@ -513,7 +513,7 @@
from psyneulink.core import llvm as pnlvm
from psyneulink.core.globals.context import \
Context, ContextError, ContextFlags, INITIALIZATION_STATUS_FLAGS, _get_time, handle_external_context
-from psyneulink.core.globals.json import JSONDumpable
+from psyneulink.core.globals.mdf import MDFSerializable
from psyneulink.core.globals.keywords import \
CONTEXT, CONTROL_PROJECTION, DEFERRED_INITIALIZATION, EXECUTE_UNTIL_FINISHED, \
FUNCTION, FUNCTION_PARAMS, INIT_FULL_EXECUTE_METHOD, INPUT_PORTS, \
@@ -525,7 +525,7 @@
RESET_STATEFUL_FUNCTION_WHEN, VALUE, VARIABLE
from psyneulink.core.globals.log import LogCondition
from psyneulink.core.globals.parameters import \
- Defaults, SharedParameter, Parameter, ParameterAlias, ParameterError, ParametersBase, copy_parameter_value
+ Defaults, SharedParameter, Parameter, ParameterAlias, ParameterError, ParametersBase, check_user_specified, copy_parameter_value
from psyneulink.core.globals.preferences.basepreferenceset import BasePreferenceSet, VERBOSE_PREF
from psyneulink.core.globals.preferences.preferenceset import \
PreferenceLevel, PreferenceSet, _assign_prefs
@@ -724,7 +724,7 @@ def class_defaults(self):
return self.defaults
-class Component(JSONDumpable, metaclass=ComponentsMeta):
+class Component(MDFSerializable, metaclass=ComponentsMeta):
"""
Component( \
default_variable=None, \
@@ -909,7 +909,7 @@ class Component(JSONDumpable, metaclass=ComponentsMeta):
standard_constructor_args = [RESET_STATEFUL_FUNCTION_WHEN, EXECUTE_UNTIL_FINISHED, MAX_EXECUTIONS_BEFORE_FINISHED]
- # helper attributes for JSON model spec
+ # helper attributes for MDF model spec
_model_spec_id_parameters = 'parameters'
_model_spec_id_stateful_parameters = 'stateful_parameters'
@@ -1084,10 +1084,9 @@ def _parse_modulable(self, param_name, param_value):
# insuring that assignment by one instance will not affect the value of others.
name = None
- _deepcopy_shared_keys = frozenset([
- '_init_args',
- ])
+ _deepcopy_shared_keys = frozenset([])
+ @check_user_specified
def __init__(self,
default_variable,
param_defaults,
@@ -1303,6 +1302,9 @@ def __deepcopy__(self, memo):
newone.parameters._owner = newone
newone.defaults._owner = newone
+ for p in newone.parameters:
+ p._owner = newone.parameters
+
# by copying, this instance is no longer "inherent" to a single
# 'import psyneulink' call
newone._is_pnl_inherent = False
@@ -1331,6 +1333,10 @@ def _get_compilation_state(self):
if hasattr(self, 'nodes'):
whitelist.add("num_executions")
+ # Drop combination function params from RTM if not needed
+ if getattr(self.parameters, 'has_recurrent_input_port', False):
+ blacklist.update(['combination_function'])
+
def _is_compilation_state(p):
#FIXME: This should use defaults instead of 'p.get'
return p.name not in blacklist and \
@@ -1362,7 +1368,7 @@ def _convert(p):
state['buffer'], state['uinteger'], state['buffer_pos'],
state['has_uint32'], x.used_seed[0]))
elif isinstance(x, Time):
- val = tuple(getattr(x, graph_scheduler.time._time_scale_to_attr_str(t)) for t in TimeScale)
+ val = tuple(x._get_by_time_scale(t) for t in TimeScale)
elif isinstance(x, Component):
return x._get_state_initializer(context)
elif isinstance(x, ContentAddressableList):
@@ -1423,6 +1429,10 @@ def _get_compilation_params(self):
# "has_initializers" is only used by RTM
blacklist.update(["has_initializers"])
+ # Drop combination function params from RTM if not needed
+ if getattr(self.parameters, 'has_recurrent_input_port', False):
+ blacklist.update(['combination_function'])
+
def _is_compilation_param(p):
if p.name not in blacklist and not isinstance(p, (ParameterAlias, SharedParameter)):
#FIXME: this should use defaults
@@ -2015,32 +2025,30 @@ def _initialize_parameters(self, context=None, **param_defaults):
}
if param_defaults is not None:
- # Exclude any function_params from the items to set on this Component
- # because these should just be pointers to the parameters of the same
- # name on this Component's function
- # Exclude any pass parameters whose value is None (assume this means "use the normal default")
- d = {
- k: v for (k, v) in param_defaults.items()
- if (
- (
- k not in defaults
- and k not in alias_names
- )
- or v is not None
- )
- }
- for p in d:
+ for name, value in copy.copy(param_defaults).items():
try:
- parameter_obj = getattr(self.parameters, p)
+ parameter_obj = getattr(self.parameters, name)
except AttributeError:
- # p in param_defaults does not correspond to a Parameter
+ # name in param_defaults does not correspond to a Parameter
continue
- if d[p] is not None:
+ if (
+ name not in self._user_specified_args
+ and parameter_obj.constructor_argument not in self._user_specified_args
+ ):
+ continue
+
+ if (
+ (
+ name in self._user_specified_args
+ or parameter_obj.constructor_argument in self._user_specified_args
+ )
+ and (value is not None or parameter_obj.specify_none)
+ ):
parameter_obj._user_specified = True
if parameter_obj.structural:
- parameter_obj.spec = d[p]
+ parameter_obj.spec = value
if parameter_obj.modulable:
# later, validate this
@@ -2049,17 +2057,18 @@ def _initialize_parameters(self, context=None, **param_defaults):
parse=True,
modulable=True
)
- parsed = modulable_param_parser(p, d[p])
+ parsed = modulable_param_parser(name, value)
- if parsed is not d[p]:
+ if parsed is not value:
# we have a modulable param spec
- parameter_obj.spec = d[p]
- d[p] = parsed
- param_defaults[p] = parsed
+ parameter_obj.spec = value
+ value = parsed
+ param_defaults[name] = parsed
except AttributeError:
pass
- defaults.update(d)
+ if value is not None or parameter_obj.specify_none:
+ defaults[name] = value
for k in defaults:
defaults[k] = copy_parameter_value(
@@ -3712,7 +3721,9 @@ def parse_parameter_value(value, no_expand_components=False, functions_as_dill=F
else:
try:
value = value.as_mdf_model(simple_edge_format=False)
- except TypeError:
+ except TypeError as e:
+ if "got an unexpected keyword argument 'simple_edge_format'" not in str(e):
+ raise
value = value.as_mdf_model()
elif isinstance(value, ComponentsMeta):
value = value.__name__
diff --git a/psyneulink/core/components/functions/function.py b/psyneulink/core/components/functions/function.py
index cda41d037bf..968cd52a77c 100644
--- a/psyneulink/core/components/functions/function.py
+++ b/psyneulink/core/components/functions/function.py
@@ -159,13 +159,13 @@
IDENTITY_MATRIX, INVERSE_HOLLOW_MATRIX, NAME, PREFERENCE_SET_NAME, RANDOM_CONNECTIVITY_MATRIX, VALUE, VARIABLE,
MODEL_SPEC_ID_METADATA, MODEL_SPEC_ID_MDF_VARIABLE
)
-from psyneulink.core.globals.parameters import Parameter
+from psyneulink.core.globals.parameters import Parameter, check_user_specified
from psyneulink.core.globals.preferences.basepreferenceset import REPORT_OUTPUT_PREF, is_pref_set
from psyneulink.core.globals.preferences.preferenceset import PreferenceEntry, PreferenceLevel
from psyneulink.core.globals.registry import register_category
from psyneulink.core.globals.utilities import (
convert_to_np_array, get_global_seed, is_instance_or_subclass, object_has_single_value, parameter_spec, parse_valid_identifier, safe_len,
- SeededRandomState, contains_type
+ SeededRandomState, contains_type, is_numeric
)
__all__ = [
@@ -605,6 +605,7 @@ def _validate_changes_shape(self, param):
# Note: the following enforce encoding as 1D np.ndarrays (one array per variable)
variableEncodingDim = 1
+ @check_user_specified
@abc.abstractmethod
def __init__(
self,
@@ -897,7 +898,7 @@ def as_mdf_model(self):
if typ not in mdf_functions.mdf_functions:
warnings.warn(f'{typ} is not an MDF standard function, this is likely to produce an incompatible model.')
- model.function = {typ: parameters[self._model_spec_id_parameters]}
+ model.function = typ
return model
@@ -995,6 +996,7 @@ class Manner(Enum):
# These are used both to type-cast the params, and as defaults if none are assigned
# in the initialization call or later (using either _instantiate_defaults or during a function call)
+ @check_user_specified
def __init__(self,
default_variable=None,
propensity=10.0,
@@ -1145,6 +1147,7 @@ class Parameters(Function_Base.Parameters):
REPORT_OUTPUT_PREF: PreferenceEntry(False, PreferenceLevel.INSTANCE),
}
+ @check_user_specified
@tc.typecheck
def __init__(self,
function,
@@ -1185,7 +1188,14 @@ def get_matrix(specification, rows=1, cols=1, context=None):
# Matrix provided (and validated in _validate_params); convert to array
if isinstance(specification, (list, np.matrix)):
- return convert_to_np_array(specification)
+ # # MODIFIED 4/9/22 OLD:
+ # return convert_to_np_array(specification)
+ # MODIFIED 4/9/22 NEW:
+ if is_numeric(specification):
+ return convert_to_np_array(specification)
+ else:
+ return
+ # MODIFIED 4/9/22 END
if isinstance(specification, np.ndarray):
if specification.ndim == 2:
diff --git a/psyneulink/core/components/functions/nonstateful/combinationfunctions.py b/psyneulink/core/components/functions/nonstateful/combinationfunctions.py
index e91fd02a118..be28b1d62eb 100644
--- a/psyneulink/core/components/functions/nonstateful/combinationfunctions.py
+++ b/psyneulink/core/components/functions/nonstateful/combinationfunctions.py
@@ -45,7 +45,7 @@
PREFERENCE_SET_NAME, VARIABLE
from psyneulink.core.globals.utilities import convert_to_np_array, is_numeric, np_array_less_than_2d, parameter_spec
from psyneulink.core.globals.context import ContextFlags
-from psyneulink.core.globals.parameters import Parameter
+from psyneulink.core.globals.parameters import Parameter, check_user_specified
from psyneulink.core.globals.preferences.basepreferenceset import \
REPORT_OUTPUT_PREF, is_pref_set, PreferenceEntry, PreferenceLevel
@@ -201,6 +201,7 @@ class Parameters(CombinationFunction.Parameters):
offset = Parameter(0.0, modulable=True, aliases=[ADDITIVE_PARAM])
changes_shape = Parameter(True, stateful=False, loggable=False, pnl_internal=True)
+ @check_user_specified
@tc.typecheck
def __init__(self,
default_variable=None,
@@ -420,6 +421,7 @@ class Parameters(CombinationFunction.Parameters):
scale = Parameter(1.0, modulable=True, aliases=[MULTIPLICATIVE_PARAM])
offset = Parameter(0.0, modulable=True, aliases=[ADDITIVE_PARAM])
+ @check_user_specified
@tc.typecheck
def __init__(self,
default_variable=None,
@@ -723,6 +725,7 @@ class Parameters(CombinationFunction.Parameters):
offset = Parameter(0.0, modulable=True, aliases=[ADDITIVE_PARAM])
changes_shape = Parameter(True, stateful=False, loggable=False, pnl_internal=True)
+ @check_user_specified
@tc.typecheck
def __init__(self,
# weights: tc.optional(parameter_spec)=None,
@@ -1165,6 +1168,7 @@ class Parameters(CombinationFunction.Parameters):
scale = Parameter(1.0, modulable=True, aliases=[MULTIPLICATIVE_PARAM])
offset = Parameter(0.0, modulable=True, aliases=[ADDITIVE_PARAM])
+ @check_user_specified
@tc.typecheck
def __init__(self,
default_variable=None,
@@ -1689,6 +1693,7 @@ class Parameters(CombinationFunction.Parameters):
scale = Parameter(1.0, modulable=True, aliases=[MULTIPLICATIVE_PARAM])
offset = Parameter(0.0, modulable=True, aliases=[ADDITIVE_PARAM])
+ @check_user_specified
@tc.typecheck
def __init__(self,
default_variable=None,
@@ -1948,6 +1953,7 @@ class Parameters(CombinationFunction.Parameters):
variable = Parameter(np.array([[1], [1]]), pnl_internal=True, constructor_argument='default_variable')
gamma = Parameter(1.0, modulable=True)
+ @check_user_specified
@tc.typecheck
def __init__(self,
default_variable=None,
diff --git a/psyneulink/core/components/functions/nonstateful/distributionfunctions.py b/psyneulink/core/components/functions/nonstateful/distributionfunctions.py
index 91b255b14d4..b8a64bc1510 100644
--- a/psyneulink/core/components/functions/nonstateful/distributionfunctions.py
+++ b/psyneulink/core/components/functions/nonstateful/distributionfunctions.py
@@ -39,7 +39,7 @@
from psyneulink.core.globals.utilities import convert_to_np_array, parameter_spec
from psyneulink.core.globals.preferences.basepreferenceset import is_pref_set
-from psyneulink.core.globals.parameters import Parameter
+from psyneulink.core.globals.parameters import Parameter, check_user_specified
__all__ = [
'DistributionFunction', 'DRIFT_RATE', 'DRIFT_RATE_VARIABILITY', 'DriftDiffusionAnalytical', 'ExponentialDist',
@@ -159,6 +159,7 @@ class Parameters(DistributionFunction.Parameters):
random_state = Parameter(None, loggable=False, getter=_random_state_getter, dependencies='seed')
seed = Parameter(DEFAULT_SEED, modulable=True, fallback_default=True, setter=_seed_setter)
+ @check_user_specified
@tc.typecheck
def __init__(self,
default_variable=None,
@@ -341,6 +342,7 @@ class Parameters(DistributionFunction.Parameters):
mean = Parameter(0.0, modulable=True, aliases=[ADDITIVE_PARAM])
standard_deviation = Parameter(1.0, modulable=True, aliases=[MULTIPLICATIVE_PARAM])
+ @check_user_specified
@tc.typecheck
def __init__(self,
default_variable=None,
@@ -467,6 +469,7 @@ class Parameters(DistributionFunction.Parameters):
random_state = Parameter(None, loggable=False, getter=_random_state_getter, dependencies='seed')
seed = Parameter(DEFAULT_SEED, modulable=True, fallback_default=True, setter=_seed_setter)
+ @check_user_specified
@tc.typecheck
def __init__(self,
default_variable=None,
@@ -593,6 +596,7 @@ class Parameters(DistributionFunction.Parameters):
random_state = Parameter(None, loggable=False, getter=_random_state_getter, dependencies='seed')
seed = Parameter(DEFAULT_SEED, modulable=True, fallback_default=True, setter=_seed_setter)
+ @check_user_specified
@tc.typecheck
def __init__(self,
default_variable=None,
@@ -750,6 +754,7 @@ class Parameters(DistributionFunction.Parameters):
scale = Parameter(1.0, modulable=True, aliases=[MULTIPLICATIVE_PARAM])
dist_shape = Parameter(1.0, modulable=True, aliases=[ADDITIVE_PARAM])
+ @check_user_specified
@tc.typecheck
def __init__(self,
default_variable=None,
@@ -884,6 +889,7 @@ class Parameters(DistributionFunction.Parameters):
scale = Parameter(1.0, modulable=True, aliases=[MULTIPLICATIVE_PARAM])
mean = Parameter(1.0, modulable=True, aliases=[ADDITIVE_PARAM])
+ @check_user_specified
@tc.typecheck
def __init__(self,
default_variable=None,
@@ -1120,6 +1126,7 @@ class Parameters(DistributionFunction.Parameters):
read_only=True
)
+ @check_user_specified
@tc.typecheck
def __init__(self,
default_variable=None,
diff --git a/psyneulink/core/components/functions/nonstateful/learningfunctions.py b/psyneulink/core/components/functions/nonstateful/learningfunctions.py
index c00959d8f6b..c4727f52628 100644
--- a/psyneulink/core/components/functions/nonstateful/learningfunctions.py
+++ b/psyneulink/core/components/functions/nonstateful/learningfunctions.py
@@ -39,7 +39,7 @@
CONTRASTIVE_HEBBIAN_FUNCTION, TDLEARNING_FUNCTION, LEARNING_FUNCTION_TYPE, LEARNING_RATE, \
KOHONEN_FUNCTION, GAUSSIAN, LINEAR, EXPONENTIAL, HEBBIAN_FUNCTION, RL_FUNCTION, BACKPROPAGATION_FUNCTION, MATRIX, \
MSE, SSE
-from psyneulink.core.globals.parameters import Parameter
+from psyneulink.core.globals.parameters import Parameter, check_user_specified
from psyneulink.core.globals.preferences.basepreferenceset import is_pref_set
from psyneulink.core.globals.utilities import is_numeric, scalar_distance, convert_to_np_array
@@ -448,6 +448,7 @@ class Parameters(LearningFunction.Parameters):
gamma_size_n = 1
gamma_size_prior = 1
+ @check_user_specified
def __init__(self,
default_variable=None,
mu_0=None,
@@ -774,6 +775,7 @@ def _validate_distance_function(self, distance_function):
default_learning_rate = 0.05
+ @check_user_specified
def __init__(self,
default_variable=None,
# learning_rate: tc.optional(tc.optional(parameter_spec)) = None,
@@ -1045,6 +1047,7 @@ class Parameters(LearningFunction.Parameters):
modulable=True)
default_learning_rate = 0.05
+ @check_user_specified
def __init__(self,
default_variable=None,
learning_rate=None,
@@ -1278,6 +1281,7 @@ class Parameters(LearningFunction.Parameters):
default_learning_rate = 0.05
+ @check_user_specified
def __init__(self,
default_variable=None,
# learning_rate: tc.optional(tc.optional(parameter_spec)) = None,
@@ -1585,6 +1589,7 @@ class Parameters(LearningFunction.Parameters):
read_only=True
)
+ @check_user_specified
def __init__(self,
default_variable=None,
# learning_rate: tc.optional(tc.optional(parameter_spec)) = None,
@@ -1934,6 +1939,7 @@ class Parameters(LearningFunction.Parameters):
default_learning_rate = 1.0
+ @check_user_specified
@tc.typecheck
def __init__(self,
default_variable=None,
@@ -2175,6 +2181,7 @@ class TDLearning(Reinforcement):
"""
componentName = TDLEARNING_FUNCTION
+ @check_user_specified
def __init__(self,
default_variable=None,
learning_rate=None,
diff --git a/psyneulink/core/components/functions/nonstateful/objectivefunctions.py b/psyneulink/core/components/functions/nonstateful/objectivefunctions.py
index 286cf63a86e..1e8ac37f370 100644
--- a/psyneulink/core/components/functions/nonstateful/objectivefunctions.py
+++ b/psyneulink/core/components/functions/nonstateful/objectivefunctions.py
@@ -33,7 +33,7 @@
DEFAULT_VARIABLE, DIFFERENCE, DISTANCE_FUNCTION, DISTANCE_METRICS, DistanceMetrics, \
ENERGY, ENTROPY, EUCLIDEAN, HOLLOW_MATRIX, MATRIX, MAX_ABS_DIFF, \
NORMED_L0_SIMILARITY, OBJECTIVE_FUNCTION_TYPE, SIZE, STABILITY_FUNCTION
-from psyneulink.core.globals.parameters import Parameter
+from psyneulink.core.globals.parameters import Parameter, check_user_specified
from psyneulink.core.globals.preferences.basepreferenceset import is_pref_set
from psyneulink.core.globals.utilities import is_distance_metric, safe_len, convert_to_np_array
from psyneulink.core.globals.utilities import is_iterable
@@ -206,6 +206,7 @@ class Parameters(ObjectiveFunction.Parameters):
transfer_fct = Parameter(None, stateful=False, loggable=False)
normalize = Parameter(False, stateful=False)
+ @check_user_specified
@tc.typecheck
def __init__(self,
default_variable=None,
@@ -558,6 +559,7 @@ class Energy(Stability):
specifies the `PreferenceSet` for the Function (see `prefs ` for details).
"""
+ @check_user_specified
def __init__(self,
default_variable=None,
size=None,
@@ -667,6 +669,7 @@ class Entropy(Stability):
specifies the `PreferenceSet` for the Function (see `prefs ` for details).
"""
+ @check_user_specified
def __init__(self,
default_variable=None,
normalize:bool=None,
@@ -779,6 +782,7 @@ class Parameters(ObjectiveFunction.Parameters):
variable = Parameter(np.array([[0], [0]]), read_only=True, pnl_internal=True, constructor_argument='default_variable')
metric = Parameter(DIFFERENCE, stateful=False)
+ @check_user_specified
@tc.typecheck
def __init__(self,
default_variable=None,
diff --git a/psyneulink/core/components/functions/nonstateful/optimizationfunctions.py b/psyneulink/core/components/functions/nonstateful/optimizationfunctions.py
index 1f70a337c64..df8d182577c 100644
--- a/psyneulink/core/components/functions/nonstateful/optimizationfunctions.py
+++ b/psyneulink/core/components/functions/nonstateful/optimizationfunctions.py
@@ -48,7 +48,7 @@
from psyneulink.core.globals.keywords import \
BOUNDS, GRADIENT_OPTIMIZATION_FUNCTION, GRID_SEARCH_FUNCTION, GAUSSIAN_PROCESS_FUNCTION, \
OPTIMIZATION_FUNCTION_TYPE, OWNER, VALUE, VARIABLE
-from psyneulink.core.globals.parameters import Parameter
+from psyneulink.core.globals.parameters import Parameter, check_user_specified
from psyneulink.core.globals.sampleiterator import SampleIterator
from psyneulink.core.globals.utilities import call_with_pruned_args
@@ -404,6 +404,7 @@ class Parameters(Function_Base.Parameters):
saved_samples = Parameter([], read_only=True, pnl_internal=True)
saved_values = Parameter([], read_only=True, pnl_internal=True)
+ @check_user_specified
@tc.typecheck
def __init__(
self,
@@ -623,26 +624,26 @@ def _evaluate(self, variable=None, context=None, params=None):
assert all([not getattr(self.parameters, x)._user_specified for x in self._unspecified_args])
self._unspecified_args = []
- # Get initial sample in case it is needed by _search_space_evaluate (e.g., for gradient initialization)
- initial_sample = self._check_args(variable=variable, context=context, params=params)
- try:
- initial_value = self.owner.objective_mechanism.parameters.value._get(context)
- except AttributeError:
- initial_value = 0
-
# EVALUATE ALL SAMPLES IN SEARCH SPACE
# Evaluate all estimates of all samples in search_space
- # If execution mode is not Python and search_space is static, use parallelized evaluation:
- if (self.owner and self.owner.parameters.comp_execution_mode._get(context) != 'Python' and
- all(isinstance(sample_iterator.start, Number) and isinstance(sample_iterator.stop, Number)
- for sample_iterator in self.search_space)):
- # FIX: NEED TO FIX THIS ONCE _grid_evaluate RETURNS all_samples
- all_samples = []
+ # Run compiled mode if requested by parameter and everything is initialized
+ if self.owner and self.owner.parameters.comp_execution_mode._get(context) != 'Python' and \
+ ContextFlags.PROCESSING in context.flags:
+ all_samples = [s for s in itertools.product(*self.search_space)]
all_values, num_evals = self._grid_evaluate(self.owner, context)
+ assert len(all_values) == num_evals
+ assert len(all_samples) == num_evals
last_sample = last_value = None
# Otherwise, default sequential sampling
else:
+ # Get initial sample in case it is needed by _search_space_evaluate (e.g., for gradient initialization)
+ initial_sample = self._check_args(variable=variable, context=context, params=params)
+ try:
+ initial_value = self.owner.objective_mechanism.parameters.value._get(context)
+ except AttributeError:
+ initial_value = 0
+
last_sample, last_value, all_samples, all_values = self._sequential_evaluate(initial_sample,
initial_value,
context)
@@ -654,6 +655,11 @@ def _evaluate(self, variable=None, context=None, params=None):
self.parameters.randomization_dimension._get(context) and \
self.parameters.num_estimates._get(context) is not None:
+ # FIXME: This is easy to support in hybrid mode. We just need to convert ctype results
+ # returned from _grid_evaluate to numpy
+ assert not self.owner or self.owner.parameters.comp_execution_mode._get(context) == 'Python', \
+ "Aggregation function not supported in compiled mode!"
+
# Reshape all the values we encountered to group those that correspond to the same parameter values
# can be aggregated.
all_values = np.reshape(all_values, (-1, self.parameters.num_estimates._get(context)))
@@ -752,6 +758,17 @@ def _sequential_evaluate(self, initial_sample, initial_value, context):
def _grid_evaluate(self, ocm, context):
"""Helper method for evaluation of a grid of samples from search space via LLVM backends."""
+ # If execution mode is not Python, the search space has to be static
+ def _is_static(it:SampleIterator):
+ if isinstance(it.start, Number) and isinstance(it.stop, Number):
+ return True
+
+ if isinstance(it.generator, list):
+ return True
+
+ return False
+
+ assert all(_is_static(sample_iterator) for sample_iterator in self.search_space)
assert ocm is ocm.agent_rep.controller
# Compiled evaluate expects the same variable as mech function
variable = [input_port.parameters.value.get(context) for input_port in ocm.input_ports]
@@ -767,7 +784,6 @@ def _grid_evaluate(self, ocm, context):
else:
assert False, f"Unknown execution mode for {ocm.name}: {execution_mode}."
- # FIX: RETURN SHOULD BE: outcomes, all_samples (THEN FIX CALL IN _function)
return outcomes, num_evals
def _report_value(self, new_value):
@@ -1084,6 +1100,7 @@ def _parse_direction(self, direction):
else:
return -1
+ @check_user_specified
@tc.typecheck
def __init__(self,
default_variable=None,
@@ -1486,6 +1503,7 @@ class Parameters(OptimizationFunction.Parameters):
# TODO: should save_values be in the constructor if it's ignored?
# is False or True the correct value?
+ @check_user_specified
@tc.typecheck
def __init__(self,
default_variable=None,
@@ -1805,30 +1823,6 @@ def _gen_llvm_function_body(self, ctx, builder, params, state_features, arg_in,
builder.store(builder.load(min_value_ptr), out_value_ptr)
return builder
- def _run_grid(self, ocm, variable, context):
-
- # "ct" => c-type variables
- ct_values, num_evals = self._grid_evaluate(ocm, context)
-
- assert len(ct_values) == num_evals
- # Reduce array of values to min/max
- # select_min params are:
- # params, state, min_sample_ptr, sample_ptr, min_value_ptr, value_ptr, opt_count_ptr, count
- bin_func = pnlvm.LLVMBinaryFunction.from_obj(self, tags=frozenset({"select_min"}))
- ct_param = bin_func.byref_arg_types[0](*self._get_param_initializer(context))
- ct_state = bin_func.byref_arg_types[1](*self._get_state_initializer(context))
- ct_opt_sample = bin_func.byref_arg_types[2](float("NaN"))
- ct_alloc = None # NULL for samples
- ct_opt_value = bin_func.byref_arg_types[4]()
- ct_opt_count = bin_func.byref_arg_types[6](0)
- ct_start = bin_func.c_func.argtypes[7](0)
- ct_stop = bin_func.c_func.argtypes[8](len(ct_values))
-
- bin_func(ct_param, ct_state, ct_opt_sample, ct_alloc, ct_opt_value,
- ct_values, ct_opt_count, ct_start, ct_stop)
-
- return np.ctypeslib.as_array(ct_opt_sample), ct_opt_value.value, np.ctypeslib.as_array(ct_values)
-
def _function(self,
variable=None,
context=None,
@@ -1953,15 +1947,37 @@ def _function(self,
"PROGRAM ERROR: bad value for {} arg of {}: {}, {}". \
format(repr(DIRECTION), self.name, direction)
- ocm = self._get_optimized_controller()
+ # Evaluate objective_function for each sample
+ last_sample, last_value, all_samples, all_values = self._evaluate(
+ variable=variable,
+ context=context,
+ params=params,
+ )
# Compiled version
+ ocm = self._get_optimized_controller()
if ocm is not None and ocm.parameters.comp_execution_mode._get(context) in {"PTX", "LLVM"}:
- opt_sample, opt_value, all_values = self._run_grid(ocm, variable, context)
- # This should not be evaluated unless needed
- all_samples = [s for s in itertools.product(*self.search_space)]
- value_optimal = opt_value
- sample_optimal = opt_sample
+
+ # Reduce array of values to min/max
+ # select_min params are:
+ # params, state, min_sample_ptr, sample_ptr, min_value_ptr, value_ptr, opt_count_ptr, count
+ bin_func = pnlvm.LLVMBinaryFunction.from_obj(self, tags=frozenset({"select_min"}))
+ ct_param = bin_func.byref_arg_types[0](*self._get_param_initializer(context))
+ ct_state = bin_func.byref_arg_types[1](*self._get_state_initializer(context))
+ ct_opt_sample = bin_func.byref_arg_types[2](float("NaN"))
+ ct_alloc = None # NULL for samples
+ ct_values = all_values
+ ct_opt_value = bin_func.byref_arg_types[4]()
+ ct_opt_count = bin_func.byref_arg_types[6](0)
+ ct_start = bin_func.c_func.argtypes[7](0)
+ ct_stop = bin_func.c_func.argtypes[8](len(ct_values))
+
+ bin_func(ct_param, ct_state, ct_opt_sample, ct_alloc, ct_opt_value,
+ ct_values, ct_opt_count, ct_start, ct_stop)
+
+ value_optimal = ct_opt_value.value
+ sample_optimal = np.ctypeslib.as_array(ct_opt_sample)
+ all_values = np.ctypeslib.as_array(ct_values)
# These are normally stored in the parent function (OptimizationFunction).
# Since we didn't call super()._function like the python path,
@@ -1974,12 +1990,6 @@ def _function(self,
# Python version
else:
- # Evaluate objective_function for each sample
- last_sample, last_value, all_samples, all_values = self._evaluate(
- variable=variable,
- context=context,
- params=params,
- )
if all_values.size != all_samples.shape[-1]:
raise ValueError(f"OptimizationFunction Error: {self}._evaluate returned mismatched sizes for "
@@ -2198,6 +2208,7 @@ class Parameters(OptimizationFunction.Parameters):
# TODO: should save_values be in the constructor if it's ignored?
# is False or True the correct value?
+ @check_user_specified
@tc.typecheck
def __init__(self,
default_variable=None,
@@ -2452,12 +2463,13 @@ class Parameters(OptimizationFunction.Parameters):
:default value: True
:type: ``bool``
"""
- variable = Parameter([[0], [0]], read_only=True)
+ variable = Parameter([[0], [0]], read_only=True, constructor_argument='default_variable')
random_state = Parameter(None, loggable=False, getter=_random_state_getter, dependencies='seed')
seed = Parameter(DEFAULT_SEED, modulable=True, fallback_default=True, setter=_seed_setter)
save_samples = True
save_values = True
+ @check_user_specified
@tc.typecheck
def __init__(self,
priors,
diff --git a/psyneulink/core/components/functions/nonstateful/selectionfunctions.py b/psyneulink/core/components/functions/nonstateful/selectionfunctions.py
index aff4dc5764f..626f1ade454 100644
--- a/psyneulink/core/components/functions/nonstateful/selectionfunctions.py
+++ b/psyneulink/core/components/functions/nonstateful/selectionfunctions.py
@@ -36,7 +36,7 @@
from psyneulink.core.globals.keywords import \
MAX_VAL, MAX_ABS_VAL, MAX_INDICATOR, MAX_ABS_INDICATOR, MIN_VAL, MIN_ABS_VAL, MIN_INDICATOR, MIN_ABS_INDICATOR, \
MODE, ONE_HOT_FUNCTION, PROB, PROB_INDICATOR, SELECTION_FUNCTION_TYPE, PREFERENCE_SET_NAME
-from psyneulink.core.globals.parameters import Parameter
+from psyneulink.core.globals.parameters import Parameter, check_user_specified
from psyneulink.core.globals.preferences.basepreferenceset import \
REPORT_OUTPUT_PREF, PreferenceEntry, PreferenceLevel, is_pref_set
@@ -201,6 +201,7 @@ def _validate_mode(self, mode):
# returns error message
return 'not one of {0}'.format(options)
+ @check_user_specified
@tc.typecheck
def __init__(self,
default_variable=None,
diff --git a/psyneulink/core/components/functions/nonstateful/transferfunctions.py b/psyneulink/core/components/functions/nonstateful/transferfunctions.py
index 5bce6445bba..eef16ca3f36 100644
--- a/psyneulink/core/components/functions/nonstateful/transferfunctions.py
+++ b/psyneulink/core/components/functions/nonstateful/transferfunctions.py
@@ -70,7 +70,7 @@
RATE, RECEIVER, RELU_FUNCTION, SCALE, SLOPE, SOFTMAX_FUNCTION, STANDARD_DEVIATION, SUM, \
TRANSFER_FUNCTION_TYPE, TRANSFER_WITH_COSTS_FUNCTION, VARIANCE, VARIABLE, X_0, PREFERENCE_SET_NAME
from psyneulink.core.globals.parameters import \
- FunctionParameter, Parameter, get_validator_by_function
+ FunctionParameter, Parameter, get_validator_by_function, check_user_specified
from psyneulink.core.globals.preferences.basepreferenceset import \
REPORT_OUTPUT_PREF, PreferenceEntry, PreferenceLevel, is_pref_set
from psyneulink.core.globals.utilities import parameter_spec, safe_len
@@ -197,6 +197,7 @@ class Identity(TransferFunction): # -------------------------------------------
REPORT_OUTPUT_PREF: PreferenceEntry(False, PreferenceLevel.INSTANCE),
}
+ @check_user_specified
@tc.typecheck
def __init__(self,
default_variable=None,
@@ -364,6 +365,7 @@ class Parameters(TransferFunction.Parameters):
slope = Parameter(1.0, modulable=True, aliases=[MULTIPLICATIVE_PARAM])
intercept = Parameter(0.0, modulable=True, aliases=[ADDITIVE_PARAM])
+ @check_user_specified
@tc.typecheck
def __init__(self,
default_variable=None,
@@ -625,6 +627,7 @@ class Parameters(TransferFunction.Parameters):
offset = Parameter(0.0, modulable=True)
bounds = (0, None)
+ @check_user_specified
@tc.typecheck
def __init__(self,
default_variable=None,
@@ -915,6 +918,7 @@ class Parameters(TransferFunction.Parameters):
scale = Parameter(1.0, modulable=True)
bounds = (0, 1)
+ @check_user_specified
@tc.typecheck
def __init__(self,
default_variable=None,
@@ -1233,6 +1237,7 @@ class Parameters(TransferFunction.Parameters):
scale = Parameter(1.0, modulable=True)
bounds = (0, 1)
+ @check_user_specified
@tc.typecheck
def __init__(self,
default_variable=None,
@@ -1497,6 +1502,7 @@ class Parameters(TransferFunction.Parameters):
leak = Parameter(0.0, modulable=True)
bounds = (None, None)
+ @check_user_specified
@tc.typecheck
def __init__(self,
default_variable=None,
@@ -1705,6 +1711,7 @@ def _validate_variable(self, variable):
if variable.ndim != 1 or len(variable) < 2:
return f"must be list or 1d array of length 2 or greater."
+ @check_user_specified
@tc.typecheck
def __init__(self,
default_variable=None,
@@ -1970,6 +1977,7 @@ class Parameters(TransferFunction.Parameters):
offset = Parameter(0.0, modulable=True)
bounds = (None, None)
+ @check_user_specified
@tc.typecheck
def __init__(self,
default_variable=None,
@@ -2243,6 +2251,7 @@ class Parameters(TransferFunction.Parameters):
seed = Parameter(DEFAULT_SEED, modulable=True, fallback_default=True, setter=_seed_setter)
bounds = (None, None)
+ @check_user_specified
@tc.typecheck
def __init__(self,
default_variable=None,
@@ -2523,6 +2532,7 @@ def _validate_output(self, output):
else:
return 'not one of {0}'.format(options)
+ @check_user_specified
@tc.typecheck
def __init__(self,
default_variable=None,
@@ -2578,7 +2588,7 @@ def _validate_variable(self, variable, context=None):
return np.asarray(variable)
- def __gen_llvm_exp_sum_max(self, builder, index, ctx, vi, gain, max_ptr, exp_sum_ptr, max_ind_ptr):
+ def __gen_llvm_exp_sum(self, builder, index, ctx, vi, gain, exp_sum_ptr):
ptri = builder.gep(vi, [ctx.int32_ty(0), index])
exp_f = ctx.get_builtin("exp", [ctx.float_ty])
@@ -2590,17 +2600,7 @@ def __gen_llvm_exp_sum_max(self, builder, index, ctx, vi, gain, max_ptr, exp_sum
new_exp_sum = builder.fadd(exp_sum, exp_val)
builder.store(new_exp_sum, exp_sum_ptr)
- old_max = builder.load(max_ptr)
- gt = builder.fcmp_ordered(">", exp_val, old_max)
- new_max = builder.select(gt, exp_val, old_max)
- builder.store(new_max, max_ptr)
-
- old_index = builder.load(max_ind_ptr)
- new_index = builder.select(gt, index, old_index)
- builder.store(new_index, max_ind_ptr)
-
def __gen_llvm_exp_div(self, builder, index, ctx, vi, vo, gain, exp_sum):
- assert self.output == ALL
ptro = builder.gep(vo, [ctx.int32_ty(0), index])
ptri = builder.gep(vi, [ctx.int32_ty(0), index])
exp_f = ctx.get_builtin("exp", [ctx.float_ty])
@@ -2611,65 +2611,70 @@ def __gen_llvm_exp_div(self, builder, index, ctx, vi, vo, gain, exp_sum):
builder.store(val, ptro)
- def __gen_llvm_apply(self, ctx, builder, params, _, arg_in, arg_out):
+ def __gen_llvm_apply(self, ctx, builder, params, state, arg_in, arg_out, tags:frozenset):
exp_sum_ptr = builder.alloca(ctx.float_ty)
builder.store(exp_sum_ptr.type.pointee(0), exp_sum_ptr)
- max_ptr = builder.alloca(ctx.float_ty)
- builder.store(max_ptr.type.pointee(float('-inf')), max_ptr)
-
- max_ind_ptr = builder.alloca(ctx.int32_ty)
- builder.store(max_ind_ptr.type.pointee(-1), max_ind_ptr)
-
gain_ptr = pnlvm.helpers.get_param_ptr(builder, self, params, GAIN)
gain = pnlvm.helpers.load_extract_scalar_array_one(builder, gain_ptr)
with pnlvm.helpers.array_ptr_loop(builder, arg_in, "exp_sum_max") as args:
- self.__gen_llvm_exp_sum_max(*args, ctx=ctx, vi=arg_in,
- max_ptr=max_ptr, gain=gain,
- max_ind_ptr=max_ind_ptr,
- exp_sum_ptr=exp_sum_ptr)
+ self.__gen_llvm_exp_sum(*args, ctx=ctx, vi=arg_in, gain=gain,
+ exp_sum_ptr=exp_sum_ptr)
- output_type = self.output
exp_sum = builder.load(exp_sum_ptr)
- index = builder.load(max_ind_ptr)
- ptro = builder.gep(arg_out, [ctx.int32_ty(0), index])
- if output_type == ALL:
+ if self.output == ALL:
with pnlvm.helpers.array_ptr_loop(builder, arg_in, "exp_div") as args:
self.__gen_llvm_exp_div(ctx=ctx, vi=arg_in, vo=arg_out,
gain=gain, exp_sum=exp_sum, *args)
- elif output_type == MAX_VAL:
- # zero out the output array
- with pnlvm.helpers.array_ptr_loop(builder, arg_in, "zero_output") as (b,i):
- b.store(ctx.float_ty(0), b.gep(arg_out, [ctx.int32_ty(0), i]))
-
- ptri = builder.gep(arg_in, [ctx.int32_ty(0), index])
- exp_f = ctx.get_builtin("exp", [ctx.float_ty])
- orig_val = builder.load(ptri)
- val = builder.fmul(orig_val, gain)
- val = builder.call(exp_f, [val])
- val = builder.fdiv(val, exp_sum)
- builder.store(val, ptro)
- elif output_type == MAX_INDICATOR:
- # zero out the output array
- with pnlvm.helpers.array_ptr_loop(builder, arg_in, "zero_output") as (b,i):
- b.store(ctx.float_ty(0), b.gep(arg_out, [ctx.int32_ty(0), i]))
- builder.store(ctx.float_ty(1), ptro)
+ return builder
+
+ one_hot_f = ctx.import_llvm_function(self.one_hot_function, tags=tags)
+ one_hot_p = pnlvm.helpers.get_param_ptr(builder, self, params, 'one_hot_function')
+ one_hot_s = pnlvm.helpers.get_state_ptr(builder, self, state, 'one_hot_function')
+
+ assert one_hot_f.args[3].type == arg_out.type
+ one_hot_out = arg_out
+ one_hot_in = builder.alloca(one_hot_f.args[2].type.pointee)
+
+ if self.output in {MAX_VAL, MAX_INDICATOR}:
+ with pnlvm.helpers.array_ptr_loop(builder, arg_in, "exp_div") as (b, i):
+ self.__gen_llvm_exp_div(ctx=ctx, vi=arg_in, vo=one_hot_in,
+ gain=gain, exp_sum=exp_sum, builder=b, index=i)
+
+ builder.call(one_hot_f, [one_hot_p, one_hot_s, one_hot_in, one_hot_out])
+
+ elif self.output == PROB:
+ one_hot_in_data = builder.gep(one_hot_in, [ctx.int32_ty(0), ctx.int32_ty(0)])
+ one_hot_in_dist = builder.gep(one_hot_in, [ctx.int32_ty(0), ctx.int32_ty(1)])
+
+ with pnlvm.helpers.array_ptr_loop(builder, arg_in, "exp_div") as (b, i):
+ self.__gen_llvm_exp_div(ctx=ctx, vi=arg_in, vo=one_hot_in_dist,
+ gain=gain, exp_sum=exp_sum, builder=b, index=i)
+
+ dist_in = b.gep(arg_in, [ctx.int32_ty(0), i])
+ dist_out = b.gep(one_hot_in_data, [ctx.int32_ty(0), i])
+ b.store(b.load(dist_in), dist_out)
+
+
+ builder.call(one_hot_f, [one_hot_p, one_hot_s, one_hot_in, one_hot_out])
+ else:
+ assert False, "Unsupported output in {}: {}".format(self, self.output)
return builder
- def _gen_llvm_function_body(self, ctx, builder, params, _, arg_in, arg_out, *, tags:frozenset):
+ def _gen_llvm_function_body(self, ctx, builder, params, state, arg_in, arg_out, *, tags:frozenset):
if self.parameters.per_item.get():
assert isinstance(arg_in.type.pointee.element, pnlvm.ir.ArrayType)
assert isinstance(arg_out.type.pointee.element, pnlvm.ir.ArrayType)
for i in range(arg_in.type.pointee.count):
inner_in = builder.gep(arg_in, [ctx.int32_ty(0), ctx.int32_ty(i)])
inner_out = builder.gep(arg_out, [ctx.int32_ty(0), ctx.int32_ty(i)])
- builder = self.__gen_llvm_apply(ctx, builder, params, _, inner_in, inner_out)
+ builder = self.__gen_llvm_apply(ctx, builder, params, state, inner_in, inner_out, tags=tags)
return builder
else:
- return self.__gen_llvm_apply(ctx, builder, params, _, arg_in, arg_out)
+ return self.__gen_llvm_apply(ctx, builder, params, state, arg_in, arg_out, tags=tags)
def apply_softmax(self, input_value, gain, output_type):
# Modulate input_value by gain
@@ -2925,6 +2930,7 @@ class Parameters(TransferFunction.Parameters):
# return True
# return False
+ @check_user_specified
@tc.typecheck
def __init__(self,
default_variable=None,
@@ -3842,8 +3848,7 @@ class Parameters(TransferFunction.Parameters):
:default value: None
:type:
"""
- variable = Parameter(np.array([0]),
- history_min_length=1)
+ variable = Parameter(np.array([0]), history_min_length=1, constructor_argument='default_variable')
intensity = Parameter(np.zeros_like(variable.default_value),
history_min_length=1)
@@ -3927,6 +3932,7 @@ class Parameters(TransferFunction.Parameters):
function_parameter_name=ADDITIVE_PARAM,
)
+ @check_user_specified
@tc.typecheck
def __init__(self,
default_variable=None,
diff --git a/psyneulink/core/components/functions/stateful/integratorfunctions.py b/psyneulink/core/components/functions/stateful/integratorfunctions.py
index dd344d3b9f0..afca06fdb6e 100644
--- a/psyneulink/core/components/functions/stateful/integratorfunctions.py
+++ b/psyneulink/core/components/functions/stateful/integratorfunctions.py
@@ -48,7 +48,7 @@
INTERACTIVE_ACTIVATION_INTEGRATOR_FUNCTION, LEAKY_COMPETING_INTEGRATOR_FUNCTION, \
MULTIPLICATIVE_PARAM, NOISE, OFFSET, OPERATION, ORNSTEIN_UHLENBECK_INTEGRATOR_FUNCTION, OUTPUT_PORTS, PRODUCT, \
RATE, REST, SIMPLE_INTEGRATOR_FUNCTION, SUM, TIME_STEP_SIZE, THRESHOLD, VARIABLE, MODEL_SPEC_ID_MDF_VARIABLE
-from psyneulink.core.globals.parameters import Parameter
+from psyneulink.core.globals.parameters import Parameter, check_user_specified
from psyneulink.core.globals.preferences.basepreferenceset import is_pref_set
from psyneulink.core.globals.utilities import parameter_spec, all_within_range, \
convert_all_elements_to_np_array
@@ -220,6 +220,7 @@ class Parameters(StatefulFunction.Parameters):
previous_value = Parameter(np.array([0]), initializer='initializer')
initializer = Parameter(np.array([0]), pnl_internal=True)
+ @check_user_specified
@tc.typecheck
def __init__(self,
default_variable=None,
@@ -550,6 +551,7 @@ class Parameters(IntegratorFunction.Parameters):
rate = Parameter(1.0, modulable=True, aliases=[MULTIPLICATIVE_PARAM], function_arg=True)
increment = Parameter(0.0, modulable=True, aliases=[ADDITIVE_PARAM], function_arg=True)
+ @check_user_specified
@tc.typecheck
def __init__(self,
default_variable=None,
@@ -826,6 +828,7 @@ class Parameters(IntegratorFunction.Parameters):
rate = Parameter(1.0, modulable=True, aliases=[MULTIPLICATIVE_PARAM], function_arg=True)
offset = Parameter(0.0, modulable=True, aliases=[ADDITIVE_PARAM], function_arg=True)
+ @check_user_specified
@tc.typecheck
def __init__(self,
default_variable=None,
@@ -1061,6 +1064,7 @@ class Parameters(IntegratorFunction.Parameters):
rate = Parameter(1.0, modulable=True, aliases=[MULTIPLICATIVE_PARAM], function_arg=True)
offset = Parameter(0.0, modulable=True, aliases=[ADDITIVE_PARAM], function_arg=True)
+ @check_user_specified
@tc.typecheck
def __init__(self,
default_variable=None,
@@ -1573,6 +1577,7 @@ class Parameters(IntegratorFunction.Parameters):
long_term_logistic = None
+ @check_user_specified
@tc.typecheck
def __init__(self,
default_variable=None,
@@ -2014,6 +2019,7 @@ class Parameters(IntegratorFunction.Parameters):
max_val = Parameter(1.0, function_arg=True)
min_val = Parameter(-1.0, function_arg=True)
+ @check_user_specified
@tc.typecheck
def __init__(self,
default_variable=None,
@@ -2418,6 +2424,7 @@ def _parse_initializer(self, initializer):
else:
return initializer
+ @check_user_specified
@tc.typecheck
def __init__(
self,
@@ -2531,10 +2538,6 @@ def _gen_llvm_integrate(self, builder, index, ctx, vi, vo, params, state):
builder.call(rand_f, [random_state, rand_val_ptr])
rand_val = builder.load(rand_val_ptr)
- if isinstance(rate.type, pnlvm.ir.ArrayType):
- assert len(rate.type) == 1
- rate = builder.extract_value(rate, 0)
-
# Get state pointers
prev_ptr = pnlvm.helpers.get_state_ptr(builder, self, state, "previous_value")
prev_time_ptr = pnlvm.helpers.get_state_ptr(builder, self, state, "previous_time")
@@ -2543,10 +2546,8 @@ def _gen_llvm_integrate(self, builder, index, ctx, vi, vo, params, state):
# + np.sqrt(time_step_size * noise) * random_state.normal()
prev_val_ptr = builder.gep(prev_ptr, [ctx.int32_ty(0), index])
prev_val = builder.load(prev_val_ptr)
+
val = builder.load(builder.gep(vi, [ctx.int32_ty(0), index]))
- if isinstance(val.type, pnlvm.ir.ArrayType):
- assert len(val.type) == 1
- val = builder.extract_value(val, 0)
val = builder.fmul(val, rate)
val = builder.fmul(val, time_step_size)
val = builder.fadd(val, prev_val)
@@ -2894,7 +2895,7 @@ class Parameters(IntegratorFunction.Parameters):
# threshold = Parameter(100.0, modulable=True)
time_step_size = Parameter(1.0, modulable=True)
previous_time = Parameter(None, initializer='starting_point', pnl_internal=True)
- dimension = Parameter(2, stateful=False, read_only=True)
+ dimension = Parameter(3, stateful=False, read_only=True)
initializer = Parameter([0], initalizer='variable', stateful=True)
angle_function = Parameter(None, stateful=False, loggable=False)
random_state = Parameter(None, loggable=False, getter=_random_state_getter, dependencies='seed')
@@ -2933,6 +2934,7 @@ def _parse_noise(self, noise):
noise = np.array(noise)
return noise
+ @check_user_specified
@tc.typecheck
def __init__(self,
default_variable=None,
@@ -3439,6 +3441,7 @@ class Parameters(IntegratorFunction.Parameters):
read_only=True
)
+ @check_user_specified
@tc.typecheck
def __init__(
self,
@@ -3733,6 +3736,7 @@ class Parameters(IntegratorFunction.Parameters):
offset = Parameter(0.0, modulable=True, aliases=[ADDITIVE_PARAM], function_arg=True)
time_step_size = Parameter(0.1, modulable=True, function_arg=True)
+ @check_user_specified
@tc.typecheck
def __init__(self,
default_variable=None,
@@ -4414,6 +4418,7 @@ class Parameters(IntegratorFunction.Parameters):
read_only=True
)
+ @check_user_specified
@tc.typecheck
def __init__(self,
default_variable=None,
diff --git a/psyneulink/core/components/functions/stateful/memoryfunctions.py b/psyneulink/core/components/functions/stateful/memoryfunctions.py
index abade02079c..c6fb7d67731 100644
--- a/psyneulink/core/components/functions/stateful/memoryfunctions.py
+++ b/psyneulink/core/components/functions/stateful/memoryfunctions.py
@@ -45,7 +45,7 @@
ADDITIVE_PARAM, BUFFER_FUNCTION, MEMORY_FUNCTION, COSINE, \
ContentAddressableMemory_FUNCTION, DictionaryMemory_FUNCTION, \
MIN_INDICATOR, MULTIPLICATIVE_PARAM, NEWEST, NOISE, OLDEST, OVERWRITE, RATE, RANDOM, VARIABLE
-from psyneulink.core.globals.parameters import Parameter
+from psyneulink.core.globals.parameters import Parameter, check_user_specified
from psyneulink.core.globals.preferences.basepreferenceset import is_pref_set
from psyneulink.core.globals.utilities import \
all_within_range, convert_to_np_array, convert_to_list, convert_all_elements_to_np_array
@@ -56,6 +56,16 @@
class MemoryFunction(StatefulFunction): # -----------------------------------------------------------------------------
componentType = MEMORY_FUNCTION
+ # TODO: refactor to avoid skip of direct super
+ def _update_default_variable(self, new_default_variable, context=None):
+ if not self.parameters.initializer._user_specified:
+ # use * 0 instead of zeros_like to deal with ragged arrays
+ self._initialize_previous_value([new_default_variable * 0], context)
+
+ # bypass the additional _initialize_previous_value call used by
+ # other stateful functions
+ super(StatefulFunction, self)._update_default_variable(new_default_variable, context=context)
+
class Buffer(MemoryFunction): # ------------------------------------------------------------------------------
"""
@@ -215,6 +225,7 @@ class Parameters(StatefulFunction.Parameters):
changes_shape = Parameter(True, stateful=False, loggable=False, pnl_internal=True)
+ @check_user_specified
@tc.typecheck
def __init__(self,
# FIX: 12/11/18 JDC - NOT SAFE TO SPECIFY A MUTABLE TYPE AS DEFAULT
@@ -259,16 +270,6 @@ def _initialize_previous_value(self, initializer, context=None):
return previous_value
- # TODO: Buffer variable fix: remove this or refactor to avoid skip
- # of direct super
- def _update_default_variable(self, new_default_variable, context=None):
- if not self.parameters.initializer._user_specified:
- self._initialize_previous_value([np.zeros_like(new_default_variable)], context)
-
- # bypass the additional _initialize_previous_value call used by
- # other stateful functions
- super(StatefulFunction, self)._update_default_variable(new_default_variable, context=context)
-
def _instantiate_attributes_before_function(self, function=None, context=None):
self.parameters.previous_value._set(
self._initialize_previous_value(
@@ -1152,6 +1153,7 @@ def _parse_initializer(self, initializer):
initializer = ContentAddressableMemory._enforce_memory_shape(initializer)
return initializer
+ @check_user_specified
@tc.typecheck
def __init__(self,
# FIX: REINSTATE WHEN 3.6 IS RETIRED:
@@ -2173,6 +2175,7 @@ class Parameters(StatefulFunction.Parameters):
selection_function = Parameter(OneHot(mode=MIN_INDICATOR), stateful=False, loggable=False)
+ @check_user_specified
@tc.typecheck
def __init__(self,
default_variable=None,
diff --git a/psyneulink/core/components/functions/stateful/statefulfunction.py b/psyneulink/core/components/functions/stateful/statefulfunction.py
index 1a365aca476..5e22d460526 100644
--- a/psyneulink/core/components/functions/stateful/statefulfunction.py
+++ b/psyneulink/core/components/functions/stateful/statefulfunction.py
@@ -30,7 +30,7 @@
from psyneulink.core.components.functions.function import Function_Base, FunctionError, _noise_setter
from psyneulink.core.globals.context import handle_external_context
from psyneulink.core.globals.keywords import STATEFUL_FUNCTION_TYPE, STATEFUL_FUNCTION, NOISE, RATE
-from psyneulink.core.globals.parameters import Parameter
+from psyneulink.core.globals.parameters import Parameter, check_user_specified
from psyneulink.core.globals.preferences.basepreferenceset import is_pref_set
from psyneulink.core.globals.utilities import iscompatible, convert_to_np_array, contains_type
@@ -213,6 +213,7 @@ def _validate_noise(self, noise):
return 'functions in a list must be instantiated and have the desired noise variable shape'
@handle_external_context()
+ @check_user_specified
@tc.typecheck
def __init__(self,
default_variable=None,
diff --git a/psyneulink/core/components/functions/userdefinedfunction.py b/psyneulink/core/components/functions/userdefinedfunction.py
index 176eff725c7..0cb5db217f3 100644
--- a/psyneulink/core/components/functions/userdefinedfunction.py
+++ b/psyneulink/core/components/functions/userdefinedfunction.py
@@ -18,7 +18,7 @@
from psyneulink.core.globals.keywords import \
CONTEXT, CUSTOM_FUNCTION, OWNER, PARAMS, \
SELF, USER_DEFINED_FUNCTION, USER_DEFINED_FUNCTION_TYPE
-from psyneulink.core.globals.parameters import Parameter
+from psyneulink.core.globals.parameters import Parameter, check_user_specified
from psyneulink.core.globals.preferences import is_pref_set
from psyneulink.core.globals.utilities import _is_module_class, iscompatible
@@ -450,6 +450,7 @@ class Parameters(Function_Base.Parameters):
pnl_internal=True,
)
+ @check_user_specified
@tc.typecheck
def __init__(self,
custom_function=None,
diff --git a/psyneulink/core/components/mechanisms/mechanism.py b/psyneulink/core/components/mechanisms/mechanism.py
index e174f9004e2..567c2f3eeca 100644
--- a/psyneulink/core/components/mechanisms/mechanism.py
+++ b/psyneulink/core/components/mechanisms/mechanism.py
@@ -1098,7 +1098,7 @@
REMOVE_PORTS, PORT_SPEC, _parse_port_spec, PORT_SPECIFIC_PARAMS, PROJECTION_SPECIFIC_PARAMS
from psyneulink.core.components.shellclasses import Mechanism, Projection, Port
from psyneulink.core.globals.context import Context, ContextFlags, handle_external_context
-from psyneulink.core.globals.json import _get_variable_parameter_name, _substitute_expression_args
+from psyneulink.core.globals.mdf import _get_variable_parameter_name
# TODO: remove unused keywords
from psyneulink.core.globals.keywords import \
ADDITIVE_PARAM, EXECUTION_PHASE, EXPONENT, FUNCTION_PARAMS, \
@@ -1109,7 +1109,7 @@
NAME, OUTPUT, OUTPUT_LABELS_DICT, OUTPUT_PORT, OUTPUT_PORT_PARAMS, OUTPUT_PORTS, OWNER_EXECUTION_COUNT, OWNER_VALUE, \
PARAMETER_PORT, PARAMETER_PORT_PARAMS, PARAMETER_PORTS, PROJECTIONS, REFERENCE_VALUE, RESULT, \
TARGET_LABELS_DICT, VALUE, VARIABLE, WEIGHT, MODEL_SPEC_ID_MDF_VARIABLE, MODEL_SPEC_ID_INPUT_PORT_COMBINATION_FUNCTION
-from psyneulink.core.globals.parameters import Parameter
+from psyneulink.core.globals.parameters import Parameter, check_user_specified
from psyneulink.core.globals.preferences.preferenceset import PreferenceLevel
from psyneulink.core.globals.registry import register_category, remove_instance_from_registry
from psyneulink.core.globals.utilities import \
@@ -1680,6 +1680,7 @@ def _parse_output_ports(self, output_ports):
@tc.typecheck
@abc.abstractmethod
+ @check_user_specified
def __init__(self,
default_variable=None,
size=None,
@@ -2915,9 +2916,12 @@ def _gen_llvm_ports(self, ctx, builder, ports, group,
# the function result can result in 1d structure or scalar
# Casting the pointer is LLVM way of adding dimensions
array_1d = pnlvm.ir.ArrayType(p_input_data.type.pointee, 1)
- array_2d = pnlvm.ir.ArrayType(array_1d, 1)
- assert array_1d == p_function.args[2].type.pointee or array_2d == p_function.args[2].type.pointee, \
+ assert array_1d == p_function.args[2].type.pointee, \
"{} vs. {}".format(p_function.args[2].type.pointee, p_input_data.type.pointee)
+ # restrict shape matching to casting 1d values to 2d arrays
+ # for Control/Gating signals
+ assert len(p_function.args[2].type.pointee) == 1
+ assert str(port).startswith("(ControlSignal") or str(port).startswith("(GatingSignal")
p_input = builder.bitcast(p_input_data, p_function.args[2].type)
else:
@@ -3032,7 +3036,7 @@ def _gen_llvm_output_port_parse_variable(self, ctx, builder,
except TypeError as e:
# TypeError means we can't index.
# Convert this to assertion failure below
- pass
+ data = None
else:
#TODO: support more spec options
if name == OWNER_VALUE:
@@ -3042,18 +3046,19 @@ def _gen_llvm_output_port_parse_variable(self, ctx, builder,
else:
data = None
- if data is not None:
- parsed = builder.gep(data, [ctx.int32_ty(0), *(ctx.int32_ty(i) for i in ids)])
- # "num_executions" are kept as int64, we need to convert the value to float first
- if name == "num_executions":
- count = builder.load(parsed)
- count_fp = builder.uitofp(count, ctx.float_ty)
- parsed = builder.alloca(count_fp.type)
- builder.store(count_fp, parsed)
+ assert data is not None, "Unsupported OutputPort spec: {} ({})".format(port_spec, value.type)
- return parsed
+ parsed = builder.gep(data, [ctx.int32_ty(0), *(ctx.int32_ty(i) for i in ids)])
+ # "num_executions" are kept as int64, we need to convert the value to float first
+ # port inputs are also expected to be 1d arrays
+ if name == "num_executions":
+ count = builder.load(parsed)
+ count_fp = builder.uitofp(count, ctx.float_ty)
+ parsed = builder.alloca(pnlvm.ir.ArrayType(count_fp.type, 1))
+ ptr = builder.gep(parsed, [ctx.int32_ty(0), ctx.int32_ty(0)])
+ builder.store(count_fp, ptr)
- assert False, "Unsupported OutputPort spec: {} ({})".format(port_spec, value.type)
+ return parsed
def _gen_llvm_output_ports(self, ctx, builder, value,
mech_params, mech_state, mech_in, mech_out):
@@ -3071,29 +3076,34 @@ def _get_input_data_ptr(b, i):
mech_params, mech_state, mech_in)
return builder
- def _gen_llvm_invoke_function(self, ctx, builder, function, f_params, f_state, variable, *, tags:frozenset):
+ def _gen_llvm_invoke_function(self, ctx, builder, function, f_params, f_state,
+ variable, out, *, tags:frozenset):
+
fun = ctx.import_llvm_function(function, tags=tags)
- fun_out = builder.alloca(fun.args[3].type.pointee, name=function.name + "_output")
+ if out is None:
+ f_out = builder.alloca(fun.args[3].type.pointee, name=function.name + "_output")
+ else:
+ f_out = out
- builder.call(fun, [f_params, f_state, variable, fun_out])
+ builder.call(fun, [f_params, f_state, variable, f_out])
- return fun_out, builder
+ return f_out, builder
def _gen_llvm_is_finished_cond(self, ctx, builder, m_params, m_state):
return ctx.bool_ty(1)
- def _gen_llvm_mechanism_functions(self, ctx, builder, m_base_params, m_params, m_state, arg_in,
- ip_output, *, tags:frozenset):
+ def _gen_llvm_mechanism_functions(self, ctx, builder, m_base_params, m_params, m_state, m_in,
+ m_val, ip_output, *, tags:frozenset):
# Default mechanism runs only the main function
f_base_params = pnlvm.helpers.get_param_ptr(builder, self, m_base_params, "function")
f_params, builder = self._gen_llvm_param_ports_for_obj(
- self.function, f_base_params, ctx, builder, m_base_params, m_state, arg_in)
+ self.function, f_base_params, ctx, builder, m_base_params, m_state, m_in)
f_state = pnlvm.helpers.get_state_ptr(builder, self, m_state, "function")
return self._gen_llvm_invoke_function(ctx, builder, self.function,
f_params, f_state, ip_output,
- tags=tags)
+ m_val, tags=tags)
def _gen_llvm_function_internal(self, ctx, builder, m_params, m_state, arg_in,
arg_out, m_base_params, *, tags:frozenset):
@@ -3101,11 +3111,21 @@ def _gen_llvm_function_internal(self, ctx, builder, m_params, m_state, arg_in,
ip_output, builder = self._gen_llvm_input_ports(ctx, builder,
m_base_params, m_state, arg_in)
+ # This will move history items around to make space for a new entry
+ mech_val_ptr = pnlvm.helpers.get_state_space(builder, self, m_state, "value")
+
value, builder = self._gen_llvm_mechanism_functions(ctx, builder, m_base_params,
m_params, m_state, arg_in,
+ mech_val_ptr,
ip_output, tags=tags)
+ if mech_val_ptr.type.pointee == value.type.pointee:
+ assert value is mech_val_ptr
+ else:
+ # FIXME: Does this need some sort of parsing?
+ warnings.warn("Shape mismatch: function result does not match mechanism value param: {} vs. {}".format(value.type.pointee, mech_val_ptr.type.pointee))
+
# Update num_executions parameter
num_executions_ptr = pnlvm.helpers.get_state_ptr(builder, self, m_state, "num_executions")
for scale in TimeScale:
@@ -3117,13 +3137,6 @@ def _gen_llvm_function_internal(self, ctx, builder, m_params, m_state, arg_in,
new_val = builder.add(new_val, new_val.type(1))
builder.store(new_val, num_exec_time_ptr)
- val_ptr = pnlvm.helpers.get_state_ptr(builder, self, m_state, "value")
- if val_ptr.type.pointee == value.type.pointee:
- pnlvm.helpers.push_state_val(builder, self, m_state, "value", value)
- else:
- # FIXME: Does this need some sort of parsing?
- warnings.warn("Shape mismatch: function result does not match mechanism value param: {} vs. {}".format(value.type.pointee, val_ptr.type.pointee))
-
# Run output ports after updating the mech state (num_executions and value)
builder = self._gen_llvm_output_ports(ctx, builder, value, m_base_params, m_state, arg_in, arg_out)
@@ -4155,7 +4168,7 @@ def as_mdf_model(self):
model.functions.append(
mdf.Function(
id=combination_function_id,
- function={'onnx::ReduceSum': combination_function_args},
+ function='onnx::ReduceSum',
args=combination_function_args
)
)
@@ -4196,9 +4209,6 @@ def as_mdf_model(self):
)
model.functions.append(function_model)
- for func_model in model.functions:
- _substitute_expression_args(func_model)
-
return model
diff --git a/psyneulink/core/components/mechanisms/modulatory/control/controlmechanism.py b/psyneulink/core/components/mechanisms/modulatory/control/controlmechanism.py
index 3da410a7ae5..b4d82a6662e 100644
--- a/psyneulink/core/components/mechanisms/modulatory/control/controlmechanism.py
+++ b/psyneulink/core/components/mechanisms/modulatory/control/controlmechanism.py
@@ -378,11 +378,12 @@
A ControlMechanism's `function ` uses its `outcome `
attribute (the `value ` of its *OUTCOME* `InputPort`) to generate a `control_allocation
`. By default, its `function ` is assigned
-the `DefaultAllocationFunction`, which takes a single value as its input, and assigns that as the value of
-each item of `control_allocation `. Each of these items is assigned as
-the allocation for the corresponding `ControlSignal` in `control_signals `. This
+the `Identity`, which takes a single value as its input, and copies it to the output, this assigns the value of
+each item of `control_allocation `. This item is assigned as
+the allocation for the all `ControlSignal` in `control_signals `. This
distributes the ControlMechanism's input as the allocation to each of its `control_signals
-`. This same behavior also applies to any custom function assigned to a
+`.
+This same behavior also applies to any custom function assigned to a
ControlMechanism that returns a 2d array with a single item in its outer dimension (axis 0). If a function is
assigned that returns a 2d array with more than one item, and it has the same number of `control_signals
`, then each ControlSignal is assigned to the corresponding item of the function's
@@ -587,8 +588,8 @@
import numpy as np
import typecheck as tc
-from psyneulink.core import llvm as pnlvm
from psyneulink.core.components.functions.function import Function_Base, is_function_type
+from psyneulink.core.components.functions.nonstateful.transferfunctions import Identity
from psyneulink.core.components.functions.nonstateful.combinationfunctions import Concatenate
from psyneulink.core.components.functions.nonstateful.combinationfunctions import LinearCombination
from psyneulink.core.components.mechanisms.mechanism import Mechanism, Mechanism_Base
@@ -605,14 +606,13 @@
MECHANISM, MULTIPLICATIVE, MODULATORY_SIGNALS, MONITOR_FOR_CONTROL, MONITOR_FOR_MODULATION, \
OBJECTIVE_MECHANISM, OUTCOME, OWNER_VALUE, PARAMS, PORT_TYPE, PRODUCT, PROJECTION_TYPE, PROJECTIONS, \
SEPARATE, SIZE
-from psyneulink.core.globals.parameters import Parameter
+from psyneulink.core.globals.parameters import Parameter, check_user_specified
from psyneulink.core.globals.preferences.basepreferenceset import is_pref_set
from psyneulink.core.globals.preferences.preferenceset import PreferenceLevel
from psyneulink.core.globals.utilities import ContentAddressableList, convert_to_list, convert_to_np_array, is_iterable
__all__ = [
'CONTROL_ALLOCATION', 'GATING_ALLOCATION', 'ControlMechanism', 'ControlMechanismError', 'ControlMechanismRegistry',
- 'DefaultAllocationFunction'
]
CONTROL_ALLOCATION = 'control_allocation'
@@ -727,58 +727,6 @@ def _net_outcome_getter(owning_component=None, context=None):
return [0]
-class DefaultAllocationFunction(Function_Base):
- """Take a single 1d item and return a 2d array with n identical items
- Takes the default input (a single value in the *OUTCOME* InputPort of the ControlMechanism),
- and returns the same allocation for each of its `control_signals `.
- """
- componentName = 'Default Control Function'
- class Parameters(Function_Base.Parameters):
- """
- Attributes
- ----------
-
- num_control_signals
- see `num_control_signals `
-
- :default value: 1
- :type: ``int``
- """
- num_control_signals = Parameter(1, stateful=False)
-
- def __init__(self,
- default_variable=None,
- params=None,
- owner=None
- ):
-
- super().__init__(default_variable=default_variable,
- params=params,
- owner=owner,
- )
-
- def _function(self,
- variable=None,
- context=None,
- params=None,
- ):
- num_ctl_sigs = self._get_current_parameter_value('num_control_signals')
- result = np.array([variable[0]] * num_ctl_sigs)
- return self.convert_output_type(result)
-
- def reset(self, *args, force=False, context=None, **kwargs):
- # Override Component.reset which requires that the Component is stateful
- pass
-
- def _gen_llvm_function_body(self, ctx, builder, _1, _2, arg_in, arg_out, *, tags:frozenset):
- val_ptr = builder.gep(arg_in, [ctx.int32_ty(0), ctx.int32_ty(0)])
- val = builder.load(val_ptr)
- with pnlvm.helpers.array_ptr_loop(builder, arg_out, "alloc_loop") as (b, idx):
- out_ptr = builder.gep(arg_out, [ctx.int32_ty(0), idx])
- builder.store(val, out_ptr)
- return builder
-
-
class ControlMechanism(ModulatoryMechanism_Base):
"""
ControlMechanism( \
@@ -1201,7 +1149,7 @@ class Parameters(ModulatoryMechanism_Base.Parameters):
)
monitor_for_control = Parameter(
- [OUTCOME],
+ [],
stateful=False,
loggable=False,
read_only=True,
@@ -1218,6 +1166,7 @@ class Parameters(ModulatoryMechanism_Base.Parameters):
aliases=[CONTROL, CONTROL_SIGNALS],
constructor_argument=CONTROL
)
+ function = Parameter(Identity, stateful=False, loggable=False)
def _parse_output_ports(self, output_ports):
def is_2tuple(o):
@@ -1263,6 +1212,7 @@ def _validate_input_ports(self, input_ports):
# method?
# validate_monitored_port_spec(self._owner, input_ports)
+ @check_user_specified
@tc.typecheck
def __init__(self,
default_variable=None,
@@ -1329,8 +1279,6 @@ def __init__(self,
f"creating unnecessary and/or duplicated Components.")
control = convert_to_list(args)
- function = function or DefaultAllocationFunction
-
super(ControlMechanism, self).__init__(
default_variable=default_variable,
size=size,
@@ -1727,42 +1675,33 @@ def _register_control_signal_type(self, context=None):
def _instantiate_control_signals(self, context):
"""Subclasses can override for class-specific implementation (see OptimizationControlMechanism for example)"""
- output_port_specs = list(enumerate(self.output_ports))
- for i, control_signal in output_port_specs:
+ for i, control_signal in enumerate(self.output_ports):
self.control[i] = self._instantiate_control_signal(control_signal, context=context)
- num_control_signals = i + 1
-
- # For DefaultAllocationFunction, set defaults.value to have number of items equal to num control_signals
- if isinstance(self.function, DefaultAllocationFunction):
- self.defaults.value = np.tile(self.function.value, (num_control_signals, 1))
- self.parameters.control_allocation._set(copy.deepcopy(self.defaults.value), context)
- self.function.num_control_signals = num_control_signals
- # For other functions, assume that if its value has:
+ # For functions, assume that if its value has:
# - one item, all control_signals should get it (i.e., the default: (OWNER_VALUE, 0));
# - same number of items as the number of control_signals;
# assign each control_signal to the corresponding item of the function's value
# - a different number of items than number of control_signals,
# leave things alone, and allow any errant indices for control_signals to be caught later.
- else:
- self.defaults.value = np.array(self.function.value)
- self.parameters.value._set(copy.deepcopy(self.defaults.value), context)
+ self.defaults.value = np.array(self.function.value)
+ self.parameters.value._set(copy.deepcopy(self.defaults.value), context)
- len_fct_value = len(self.function.value)
+ len_fct_value = len(self.function.value)
- # Assign each ControlSignal's variable_spec to index of ControlMechanism's value
- for i, control_signal in enumerate(self.control):
+ # Assign each ControlSignal's variable_spec to index of ControlMechanism's value
+ for i, control_signal in enumerate(self.control):
- # If number of control_signals is same as number of items in function's value,
- # assign each ControlSignal to the corresponding item of the function's value
- if len_fct_value == num_control_signals:
- control_signal._variable_spec = [(OWNER_VALUE, i)]
+ # If number of control_signals is same as number of items in function's value,
+ # assign each ControlSignal to the corresponding item of the function's value
+ if len_fct_value == len(self.control):
+ control_signal._variable_spec = (OWNER_VALUE, i)
- if not isinstance(control_signal.owner_value_index, int):
- assert False, \
- f"PROGRAM ERROR: The \'owner_value_index\' attribute for {control_signal.name} " \
- f"of {self.name} ({control_signal.owner_value_index})is not an int."
+ if not isinstance(control_signal.owner_value_index, int):
+ assert False, \
+ f"PROGRAM ERROR: The \'owner_value_index\' attribute for {control_signal.name} " \
+ f"of {self.name} ({control_signal.owner_value_index})is not an int."
def _instantiate_control_signal(self, control_signal, context=None):
"""Parse and instantiate ControlSignal (or subclass relevant to ControlMechanism subclass)
diff --git a/psyneulink/core/components/mechanisms/modulatory/control/defaultcontrolmechanism.py b/psyneulink/core/components/mechanisms/modulatory/control/defaultcontrolmechanism.py
index 0c92b09e3be..c82fff09f9c 100644
--- a/psyneulink/core/components/mechanisms/modulatory/control/defaultcontrolmechanism.py
+++ b/psyneulink/core/components/mechanisms/modulatory/control/defaultcontrolmechanism.py
@@ -40,6 +40,7 @@
from psyneulink.core.components.mechanisms.processing.objectivemechanism import ObjectiveMechanism
from psyneulink.core.globals.defaults import defaultControlAllocation
from psyneulink.core.globals.keywords import CONTROL, INPUT_PORTS, NAME
+from psyneulink.core.globals.parameters import check_user_specified
from psyneulink.core.globals.preferences.basepreferenceset import is_pref_set
from psyneulink.core.globals.preferences.preferenceset import PreferenceLevel
from psyneulink.core.globals.utilities import ContentAddressableList
@@ -87,6 +88,7 @@ class DefaultControlMechanism(ControlMechanism):
# PREFERENCE_SET_NAME: 'DefaultControlMechanismCustomClassPreferences',
# PREFERENCE_KEYWORD: ...}
+ @check_user_specified
@tc.typecheck
def __init__(self,
objective_mechanism:tc.optional(tc.any(ObjectiveMechanism, list))=None,
diff --git a/psyneulink/core/components/mechanisms/modulatory/control/gating/gatingmechanism.py b/psyneulink/core/components/mechanisms/modulatory/control/gating/gatingmechanism.py
index 5338d545fc4..8aa950f2b4a 100644
--- a/psyneulink/core/components/mechanisms/modulatory/control/gating/gatingmechanism.py
+++ b/psyneulink/core/components/mechanisms/modulatory/control/gating/gatingmechanism.py
@@ -190,7 +190,7 @@
from psyneulink.core.globals.keywords import \
CONTROL, CONTROL_SIGNALS, GATE, GATING_PROJECTION, GATING_SIGNAL, GATING_SIGNALS, \
INIT_EXECUTE_METHOD_ONLY, MONITOR_FOR_CONTROL, PORT_TYPE, PROJECTIONS, PROJECTION_TYPE
-from psyneulink.core.globals.parameters import Parameter
+from psyneulink.core.globals.parameters import Parameter, check_user_specified
from psyneulink.core.globals.preferences.basepreferenceset import is_pref_set
from psyneulink.core.globals.preferences.preferenceset import PreferenceLevel
from psyneulink.core.globals.utilities import ContentAddressableList, convert_to_list
@@ -433,6 +433,7 @@ class Parameters(ControlMechanism.Parameters):
constructor_argument='gate'
)
+ @check_user_specified
@tc.typecheck
def __init__(self,
default_gating_allocation=None,
diff --git a/psyneulink/core/components/mechanisms/modulatory/control/optimizationcontrolmechanism.py b/psyneulink/core/components/mechanisms/modulatory/control/optimizationcontrolmechanism.py
index 9b7517f0f60..67b665ce8cf 100644
--- a/psyneulink/core/components/mechanisms/modulatory/control/optimizationcontrolmechanism.py
+++ b/psyneulink/core/components/mechanisms/modulatory/control/optimizationcontrolmechanism.py
@@ -605,7 +605,7 @@
` made for each `control_allocation `).
COMMENT
- .. _OptimizationControlMechanism_State:
+.. _OptimizationControlMechanism_State:
*State*
~~~~~~~
@@ -748,23 +748,24 @@
If an OptimizationControlMechanism has an `objective_mechanism `, it is
assigned a single outcome_input_port, named *OUTCOME*, that receives a Projection from the objective_mechanism's
`OUTCOME OutputPort `. The OptimizationControlMechanism's `objective_mechanism
-` is used to evaluate the outcome of executing its `agent_rep
+` is used to evaluate the outcome of executing its `agent_rep
` for a given `state `. This passes
the result to the OptimizationControlMechanism's *OUTCOME* InputPort, that is placed in its `outcome
` attribute.
.. note::
- An OptimizationControlMechanism's `objective_mechanism ` and its `function
- ` are distinct from, and should not be confused with the `objective_function
- ` parameter of the OptimizationControlMechanism's `function
- `. The `objective_mechanism `\\'s
- `function ` evaluates the `outcome ` of processing
- without taking into account the `costs ` of the OptimizationControlMechanism's
- `control_signals `. In contrast, its `evaluate_agent_rep
- ` method, which is assigned as the `objective_function`
- parameter of its `function `, takes the `costs `
- of the OptimizationControlMechanism's `control_signals ` into
- account when calculating the `net_outcome` that it returns as its result.
+ An OptimizationControlMechanism's `objective_mechanism ` and the `function
+ ` of that Mechanism, are distinct from and should not be confused with the
+ `objective_function ` parameter of the OptimizationControlMechanism's
+ `function `. The `objective_mechanism
+ `\\'s `function ` evaluates the `outcome
+ ` of processing without taking into account the `costs ` of
+ the OptimizationControlMechanism's `control_signals `. In
+ contrast, its `evaluate_agent_rep ` method, which is assigned
+ as the `objective_function` parameter of its `function `, takes the
+ `costs ` of the OptimizationControlMechanism's `control_signals
+ ` into account when calculating the `net_outcome` that it
+ returns as its result.
COMMENT:
ADD HINT HERE RE: USE OF CONCATENATION
@@ -1098,9 +1099,9 @@
ALL, COMPOSITION, COMPOSITION_FUNCTION_APPROXIMATOR, CONCATENATE, DEFAULT_INPUT, DEFAULT_VARIABLE, EID_FROZEN, \
FUNCTION, INPUT_PORT, INTERNAL_ONLY, NAME, OPTIMIZATION_CONTROL_MECHANISM, NODE, OWNER_VALUE, PARAMS, PORT, \
PROJECTIONS, SHADOW_INPUTS, VALUE
-from psyneulink.core.globals.registry import rename_instance_in_registry
-from psyneulink.core.globals.parameters import Parameter
+from psyneulink.core.globals.parameters import Parameter, check_user_specified
from psyneulink.core.globals.preferences.preferenceset import PreferenceLevel
+from psyneulink.core.globals.registry import rename_instance_in_registry
from psyneulink.core.globals.sampleiterator import SampleIterator, SampleSpec
from psyneulink.core.globals.utilities import convert_to_list, ContentAddressableList, is_numeric
from psyneulink.core.llvm.debug import debug_env
@@ -1417,7 +1418,8 @@ class OptimizationControlMechanism(ControlMechanism):
its `monitor_for_control ` attribute, the values of which are used
to compute the `net_outcome ` of executing the `agent_rep
` in a given `OptimizationControlMechanism_State`
- (see `Outcome ` for additional details).
+ (see `objective_mechanism ` and `outcome_input_ports
+ ` for additional details).
state : ndarray
lists the values of the current state -- a concatenation of the `state_feature_values
@@ -1739,6 +1741,7 @@ def _validate_state_feature_default_spec(self, state_feature_default):
f"with a shape appropriate for all of the INPUT Nodes or InputPorts to which it will be applied."
@handle_external_context()
+ @check_user_specified
@tc.typecheck
def __init__(self,
agent_rep=None,
@@ -3496,9 +3499,16 @@ def _gen_llvm_function(self, *, ctx:pnlvm.LLVMBuilderContext, tags:frozenset):
return f
- def _gen_llvm_invoke_function(self, ctx, builder, function, params, context, variable, *, tags:frozenset):
+ def _gen_llvm_invoke_function(self, ctx, builder, function, params, context,
+ variable, out, *, tags:frozenset):
fun = ctx.import_llvm_function(function)
+
+ # The function returns (sample_optimal, value_optimal),
+ # but the value of mechanism is only 'sample_optimal'
+ # so we cannot reuse the space provided and need to explicitly copy
+ # the results later.
fun_out = builder.alloca(fun.args[3].type.pointee, name="func_out")
+ value = builder.gep(fun_out, [ctx.int32_ty(0), ctx.int32_ty(0)])
args = [params, context, variable, fun_out]
# If we're calling compiled version of Composition.evaluate,
@@ -3507,13 +3517,17 @@ def _gen_llvm_invoke_function(self, ctx, builder, function, params, context, var
args += builder.function.args[-3:]
builder.call(fun, args)
- return fun_out, builder
- def _gen_llvm_output_port_parse_variable(self, ctx, builder, params, state, value, port):
- # The function returns (sample_optimal, value_optimal),
- # but the value of mechanism is only 'sample_optimal'
- value = builder.gep(value, [ctx.int32_ty(0), ctx.int32_ty(0)])
- return super()._gen_llvm_output_port_parse_variable(ctx, builder, params, state, value, port)
+ # The mechanism also converts the value to array of arrays
+ # e.g. [3 x double] -> [3 x [1 x double]]
+ assert len(value.type.pointee) == len(out.type.pointee)
+ assert value.type.pointee.element == out.type.pointee.element.element
+ with pnlvm.helpers.array_ptr_loop(builder, out, id='mech_value_copy') as (b, idx):
+ src = b.gep(value, [ctx.int32_ty(0), idx])
+ dst = b.gep(out, [ctx.int32_ty(0), idx, ctx.int32_ty(0)])
+ b.store(b.load(src), dst)
+
+ return out, builder
@property
def agent_rep_type(self):
diff --git a/psyneulink/core/components/mechanisms/modulatory/learning/learningmechanism.py b/psyneulink/core/components/mechanisms/modulatory/learning/learningmechanism.py
index c61c02501f5..2ae1da4c11b 100644
--- a/psyneulink/core/components/mechanisms/modulatory/learning/learningmechanism.py
+++ b/psyneulink/core/components/mechanisms/modulatory/learning/learningmechanism.py
@@ -545,7 +545,7 @@
ADDITIVE, AFTER, ASSERT, ENABLED, INPUT_PORTS, \
LEARNED_PARAM, LEARNING, LEARNING_MECHANISM, LEARNING_PROJECTION, LEARNING_SIGNAL, LEARNING_SIGNALS, \
MATRIX, NAME, ONLINE, OUTPUT_PORT, OWNER_VALUE, PARAMS, PROJECTIONS, SAMPLE, PORT_TYPE, VARIABLE
-from psyneulink.core.globals.parameters import FunctionParameter, Parameter
+from psyneulink.core.globals.parameters import FunctionParameter, Parameter, check_user_specified
from psyneulink.core.globals.preferences.basepreferenceset import is_pref_set
from psyneulink.core.globals.preferences.preferenceset import PreferenceLevel
from psyneulink.core.globals.utilities import ContentAddressableList, convert_to_np_array, is_numeric, parameter_spec, \
@@ -999,6 +999,7 @@ class Parameters(ModulatoryMechanism_Base.Parameters):
structural=True,
)
+ @check_user_specified
@tc.typecheck
def __init__(self,
# default_variable:tc.any(list, np.ndarray),
diff --git a/psyneulink/core/components/mechanisms/modulatory/modulatorymechanism.py b/psyneulink/core/components/mechanisms/modulatory/modulatorymechanism.py
index df26cf1ded9..ebc92c25a03 100644
--- a/psyneulink/core/components/mechanisms/modulatory/modulatorymechanism.py
+++ b/psyneulink/core/components/mechanisms/modulatory/modulatorymechanism.py
@@ -140,6 +140,7 @@
from psyneulink.core.components.mechanisms.mechanism import Mechanism_Base
from psyneulink.core.globals.keywords import ADAPTIVE_MECHANISM
+from psyneulink.core.globals.parameters import check_user_specified
from psyneulink.core.globals.preferences.preferenceset import PreferenceLevel
__all__ = [
@@ -191,6 +192,7 @@ class Parameters(Mechanism_Base.Parameters):
# PREFERENCE_SET_NAME: 'ModulatoryMechanismClassPreferences',
# PREFERENCE_KEYWORD: ...}
+ @check_user_specified
def __init__(self,
default_variable,
size,
diff --git a/psyneulink/core/components/mechanisms/processing/compositioninterfacemechanism.py b/psyneulink/core/components/mechanisms/processing/compositioninterfacemechanism.py
index 80869f28701..94ce762873c 100644
--- a/psyneulink/core/components/mechanisms/processing/compositioninterfacemechanism.py
+++ b/psyneulink/core/components/mechanisms/processing/compositioninterfacemechanism.py
@@ -122,7 +122,7 @@
from psyneulink.core.globals.context import ContextFlags, handle_external_context
from psyneulink.core.globals.keywords import COMPOSITION_INTERFACE_MECHANISM, INPUT_PORTS, OUTPUT_PORTS, \
PREFERENCE_SET_NAME
-from psyneulink.core.globals.parameters import Parameter
+from psyneulink.core.globals.parameters import Parameter, check_user_specified
from psyneulink.core.globals.preferences.basepreferenceset import is_pref_set, REPORT_OUTPUT_PREF
from psyneulink.core.globals.preferences.preferenceset import PreferenceEntry, PreferenceLevel
@@ -174,6 +174,7 @@ class Parameters(ProcessingMechanism_Base.Parameters):
"""
function = Parameter(Identity, stateful=False, loggable=False)
+ @check_user_specified
@tc.typecheck
def __init__(self,
default_variable=None,
diff --git a/psyneulink/core/components/mechanisms/processing/defaultprocessingmechanism.py b/psyneulink/core/components/mechanisms/processing/defaultprocessingmechanism.py
index 8bb14d9bd03..bf3770582bd 100644
--- a/psyneulink/core/components/mechanisms/processing/defaultprocessingmechanism.py
+++ b/psyneulink/core/components/mechanisms/processing/defaultprocessingmechanism.py
@@ -18,6 +18,7 @@
from psyneulink.core.components.mechanisms.mechanism import Mechanism_Base
from psyneulink.core.globals.defaults import SystemDefaultInputValue
from psyneulink.core.globals.keywords import DEFAULT_PROCESSING_MECHANISM
+from psyneulink.core.globals.parameters import Parameter, check_user_specified
from psyneulink.core.globals.preferences.basepreferenceset import is_pref_set
from psyneulink.core.globals.preferences.preferenceset import PreferenceLevel
@@ -50,8 +51,9 @@ class DefaultProcessingMechanism_Base(Mechanism_Base):
# PREFERENCE_KEYWORD: ...}
class Parameters(Mechanism_Base.Parameters):
- variable = np.array([SystemDefaultInputValue])
+ variable = Parameter(np.array([SystemDefaultInputValue]), constructor_argument='default_variable')
+ @check_user_specified
@tc.typecheck
def __init__(self,
default_variable=None,
diff --git a/psyneulink/core/components/mechanisms/processing/integratormechanism.py b/psyneulink/core/components/mechanisms/processing/integratormechanism.py
index 4da4319a3bc..e11dd8b47b4 100644
--- a/psyneulink/core/components/mechanisms/processing/integratormechanism.py
+++ b/psyneulink/core/components/mechanisms/processing/integratormechanism.py
@@ -89,10 +89,9 @@
from psyneulink.core.components.functions.stateful.integratorfunctions import AdaptiveIntegrator
from psyneulink.core.components.mechanisms.processing.processingmechanism import ProcessingMechanism_Base
from psyneulink.core.components.mechanisms.mechanism import Mechanism
-from psyneulink.core.globals.json import _substitute_expression_args
from psyneulink.core.globals.keywords import \
DEFAULT_VARIABLE, INTEGRATOR_MECHANISM, VARIABLE, PREFERENCE_SET_NAME
-from psyneulink.core.globals.parameters import Parameter
+from psyneulink.core.globals.parameters import Parameter, check_user_specified
from psyneulink.core.globals.preferences.basepreferenceset import is_pref_set, REPORT_OUTPUT_PREF
from psyneulink.core.globals.preferences.preferenceset import PreferenceEntry, PreferenceLevel
from psyneulink.core.globals.utilities import parse_valid_identifier
@@ -152,6 +151,7 @@ class Parameters(ProcessingMechanism_Base.Parameters):
function = Parameter(AdaptiveIntegrator(rate=0.5), stateful=False, loggable=False)
#
+ @check_user_specified
@tc.typecheck
def __init__(self,
default_variable=None,
@@ -255,7 +255,4 @@ def as_mdf_model(self):
model.functions.extend(extra_noise_functions)
function_model.args['noise'] = main_noise_function.id
- for func_model in model.functions:
- _substitute_expression_args(func_model)
-
return model
diff --git a/psyneulink/core/components/mechanisms/processing/objectivemechanism.py b/psyneulink/core/components/mechanisms/processing/objectivemechanism.py
index 84b69156e63..2aaffee2c36 100644
--- a/psyneulink/core/components/mechanisms/processing/objectivemechanism.py
+++ b/psyneulink/core/components/mechanisms/processing/objectivemechanism.py
@@ -378,7 +378,7 @@
from psyneulink.core.globals.keywords import \
CONTROL, EXPONENT, EXPONENTS, LEARNING, MATRIX, NAME, OBJECTIVE_MECHANISM, OUTCOME, OWNER_VALUE, \
PARAMS, PREFERENCE_SET_NAME, PROJECTION, PROJECTIONS, PORT_TYPE, VARIABLE, WEIGHT, WEIGHTS
-from psyneulink.core.globals.parameters import Parameter
+from psyneulink.core.globals.parameters import Parameter, check_user_specified
from psyneulink.core.globals.preferences.basepreferenceset import is_pref_set, REPORT_OUTPUT_PREF
from psyneulink.core.globals.preferences.preferenceset import PreferenceEntry, PreferenceLevel
from psyneulink.core.globals.utilities import ContentAddressableList
@@ -562,6 +562,7 @@ class Parameters(ProcessingMechanism_Base.Parameters):
standard_output_port_names.extend([OUTCOME])
# FIX: TYPECHECK MONITOR TO LIST OR ZIP OBJECT
+ @check_user_specified
@tc.typecheck
def __init__(self,
monitor=None,
diff --git a/psyneulink/core/components/mechanisms/processing/processingmechanism.py b/psyneulink/core/components/mechanisms/processing/processingmechanism.py
index 8da4cbdfbe5..d6cbc63488c 100644
--- a/psyneulink/core/components/mechanisms/processing/processingmechanism.py
+++ b/psyneulink/core/components/mechanisms/processing/processingmechanism.py
@@ -98,7 +98,8 @@
from psyneulink.core.components.ports.outputport import OutputPort
from psyneulink.core.globals.keywords import \
FUNCTION, MAX_ABS_INDICATOR, MAX_ABS_ONE_HOT, MAX_ABS_VAL, MAX_INDICATOR, MAX_ONE_HOT, MAX_VAL, MEAN, MEDIAN, \
- NAME, PROB, PROCESSING_MECHANISM, PREFERENCE_SET_NAME, STANDARD_DEVIATION, VARIANCE
+ NAME, PROB, PROCESSING_MECHANISM, PREFERENCE_SET_NAME, STANDARD_DEVIATION, VARIANCE, VARIABLE, OWNER_VALUE
+from psyneulink.core.globals.parameters import check_user_specified
from psyneulink.core.globals.preferences.basepreferenceset import is_pref_set, REPORT_OUTPUT_PREF
from psyneulink.core.globals.preferences.preferenceset import PreferenceEntry, PreferenceLevel
@@ -165,9 +166,11 @@ class ProcessingMechanism_Base(Mechanism_Base):
{NAME: MAX_ABS_INDICATOR,
FUNCTION: OneHot(mode=MAX_ABS_INDICATOR)},
{NAME: PROB,
+ VARIABLE: OWNER_VALUE,
FUNCTION: SoftMax(output=PROB)}])
standard_output_port_names = [i['name'] for i in standard_output_ports]
+ @check_user_specified
def __init__(self,
default_variable=None,
size=None,
@@ -282,6 +285,7 @@ class ProcessingMechanism(ProcessingMechanism_Base):
PREFERENCE_SET_NAME: 'ProcessingMechanismCustomClassPreferences',
REPORT_OUTPUT_PREF: PreferenceEntry(False, PreferenceLevel.INSTANCE)}
+ @check_user_specified
@tc.typecheck
def __init__(self,
default_variable=None,
diff --git a/psyneulink/core/components/mechanisms/processing/transfermechanism.py b/psyneulink/core/components/mechanisms/processing/transfermechanism.py
index 44adbd44596..ca81117a2c5 100644
--- a/psyneulink/core/components/mechanisms/processing/transfermechanism.py
+++ b/psyneulink/core/components/mechanisms/processing/transfermechanism.py
@@ -842,13 +842,13 @@
from psyneulink.core.components.ports.inputport import InputPort
from psyneulink.core.components.ports.outputport import OutputPort
from psyneulink.core.globals.context import ContextFlags, handle_external_context
-from psyneulink.core.globals.json import _get_variable_parameter_name, _substitute_expression_args
+from psyneulink.core.globals.mdf import _get_variable_parameter_name
from psyneulink.core.globals.keywords import \
COMBINE, comparison_operators, EXECUTION_COUNT, FUNCTION, GREATER_THAN_OR_EQUAL, \
CURRENT_VALUE, LESS_THAN_OR_EQUAL, MAX_ABS_DIFF, \
NAME, NOISE, NUM_EXECUTIONS_BEFORE_FINISHED, OWNER_VALUE, RESET, RESULT, RESULTS, \
SELECTION_FUNCTION_TYPE, TRANSFER_FUNCTION_TYPE, TRANSFER_MECHANISM, VARIABLE
-from psyneulink.core.globals.parameters import Parameter, FunctionParameter
+from psyneulink.core.globals.parameters import Parameter, FunctionParameter, check_user_specified
from psyneulink.core.globals.preferences.basepreferenceset import is_pref_set
from psyneulink.core.globals.preferences.preferenceset import PreferenceLevel
from psyneulink.core.globals.utilities import \
@@ -1283,6 +1283,7 @@ def _validate_termination_comparison_op(self, termination_comparison_op):
return f"must be boolean comparison operator or one of the following strings:" \
f" {','.join(comparison_operators.keys())}."
+ @check_user_specified
@tc.typecheck
def __init__(self,
default_variable=None,
@@ -1543,13 +1544,11 @@ def _gen_llvm_is_finished_cond(self, ctx, builder, params, state):
return builder.fcmp_ordered("!=", is_finished_flag,
is_finished_flag.type(0))
- # If modulated, termination threshold is single element array
- if isinstance(threshold_ptr.type.pointee, pnlvm.ir.ArrayType):
- assert len(threshold_ptr.type.pointee) == 1
- threshold_ptr = builder.gep(threshold_ptr, [ctx.int32_ty(0),
- ctx.int32_ty(0)])
+ # If modulated, termination threshold is single element array.
+ # Otherwise, it is scalar
+ threshold = pnlvm.helpers.load_extract_scalar_array_one(builder,
+ threshold_ptr)
- threshold = builder.load(threshold_ptr)
cmp_val_ptr = builder.alloca(threshold.type, name="is_finished_value")
if self.termination_measure is max:
assert self._termination_measure_num_items_expected == 1
@@ -1605,7 +1604,7 @@ def _gen_llvm_is_finished_cond(self, ctx, builder, params, state):
return builder.fcmp_ordered(cmp_str, cmp_val, threshold)
def _gen_llvm_mechanism_functions(self, ctx, builder, m_base_params, m_params,
- m_state, arg_in, ip_out, *, tags:frozenset):
+ m_state, m_in, m_val, ip_out, *, tags:frozenset):
if self.integrator_mode:
if_state = pnlvm.helpers.get_state_ptr(builder, self, m_state,
@@ -1614,20 +1613,23 @@ def _gen_llvm_mechanism_functions(self, ctx, builder, m_base_params, m_params,
"integrator_function")
if_params, builder = self._gen_llvm_param_ports_for_obj(
self.integrator_function, if_base_params, ctx, builder,
- m_base_params, m_state, arg_in)
+ m_base_params, m_state, m_in)
mf_in, builder = self._gen_llvm_invoke_function(
- ctx, builder, self.integrator_function, if_params, if_state, ip_out, tags=tags)
+ ctx, builder, self.integrator_function, if_params,
+ if_state, ip_out, None, tags=tags)
else:
mf_in = ip_out
mf_state = pnlvm.helpers.get_state_ptr(builder, self, m_state, "function")
mf_base_params = pnlvm.helpers.get_param_ptr(builder, self, m_base_params, "function")
mf_params, builder = self._gen_llvm_param_ports_for_obj(
- self.function, mf_base_params, ctx, builder, m_base_params, m_state, arg_in)
+ self.function, mf_base_params, ctx, builder, m_base_params, m_state, m_in)
mf_out, builder = self._gen_llvm_invoke_function(ctx, builder,
- self.function, mf_params, mf_state, mf_in, tags=tags)
+ self.function, mf_params,
+ mf_state, mf_in, m_val,
+ tags=tags)
clip_ptr = pnlvm.helpers.get_param_ptr(builder, self, m_params, "clip")
if len(clip_ptr.type.pointee) != 0:
@@ -1852,7 +1854,4 @@ def as_mdf_model(self):
integrator_function_model, 'noise', main_noise_function.id
)
- for func_model in model.functions:
- _substitute_expression_args(func_model)
-
return model
diff --git a/psyneulink/core/components/ports/inputport.py b/psyneulink/core/components/ports/inputport.py
index 2b1ee1b637a..84fb891f715 100644
--- a/psyneulink/core/components/ports/inputport.py
+++ b/psyneulink/core/components/ports/inputport.py
@@ -589,7 +589,7 @@
LEARNING_SIGNAL, MAPPING_PROJECTION, MATRIX, NAME, OPERATION, OUTPUT_PORT, OUTPUT_PORTS, OWNER, \
PARAMS, PRODUCT, PROJECTIONS, REFERENCE_VALUE, \
SENDER, SHADOW_INPUTS, SHADOW_INPUT_NAME, SIZE, PORT_TYPE, SUM, VALUE, VARIABLE, WEIGHT
-from psyneulink.core.globals.parameters import Parameter
+from psyneulink.core.globals.parameters import Parameter, check_user_specified
from psyneulink.core.globals.preferences.basepreferenceset import is_pref_set
from psyneulink.core.globals.preferences.preferenceset import PreferenceLevel
from psyneulink.core.globals.utilities import \
@@ -874,6 +874,7 @@ def _validate_default_input(self, default_input):
#endregion
@handle_external_context()
+ @check_user_specified
@tc.typecheck
def __init__(self,
owner=None,
diff --git a/psyneulink/core/components/ports/modulatorysignals/controlsignal.py b/psyneulink/core/components/ports/modulatorysignals/controlsignal.py
index a5e534579cc..5a9c22f9e6d 100644
--- a/psyneulink/core/components/ports/modulatorysignals/controlsignal.py
+++ b/psyneulink/core/components/ports/modulatorysignals/controlsignal.py
@@ -421,7 +421,8 @@
OUTPUT_PORT, OUTPUT_PORTS, OUTPUT_PORT_PARAMS, \
PARAMETER_PORT, PARAMETER_PORTS, PROJECTIONS, \
RECEIVER, FUNCTION
-from psyneulink.core.globals.parameters import FunctionParameter, Parameter, get_validator_by_function
+from psyneulink.core.globals.parameters import FunctionParameter, Parameter, get_validator_by_function, \
+ check_user_specified
from psyneulink.core.globals.preferences.basepreferenceset import is_pref_set
from psyneulink.core.globals.preferences.preferenceset import PreferenceLevel
from psyneulink.core.globals.sampleiterator import SampleSpec, SampleIterator
@@ -792,6 +793,7 @@ def _validate_allocation_samples(self, allocation_samples):
#endregion
+ @check_user_specified
@tc.typecheck
def __init__(self,
owner=None,
diff --git a/psyneulink/core/components/ports/modulatorysignals/gatingsignal.py b/psyneulink/core/components/ports/modulatorysignals/gatingsignal.py
index b24e5da5eb7..62d0476e1c3 100644
--- a/psyneulink/core/components/ports/modulatorysignals/gatingsignal.py
+++ b/psyneulink/core/components/ports/modulatorysignals/gatingsignal.py
@@ -252,7 +252,7 @@
from psyneulink.core.globals.keywords import \
GATE, GATING_PROJECTION, GATING_SIGNAL, INPUT_PORT, INPUT_PORTS, \
MODULATES, OUTPUT_PORT, OUTPUT_PORTS, OUTPUT_PORT_PARAMS, PROJECTIONS, RECEIVER
-from psyneulink.core.globals.parameters import Parameter
+from psyneulink.core.globals.parameters import Parameter, check_user_specified
from psyneulink.core.globals.preferences.basepreferenceset import is_pref_set
from psyneulink.core.globals.preferences.preferenceset import PreferenceLevel
@@ -417,6 +417,7 @@ class Parameters(ControlSignal.Parameters):
#endregion
+ @check_user_specified
@tc.typecheck
def __init__(self,
owner=None,
diff --git a/psyneulink/core/components/ports/modulatorysignals/learningsignal.py b/psyneulink/core/components/ports/modulatorysignals/learningsignal.py
index 335896a8bc9..72500b84991 100644
--- a/psyneulink/core/components/ports/modulatorysignals/learningsignal.py
+++ b/psyneulink/core/components/ports/modulatorysignals/learningsignal.py
@@ -194,7 +194,7 @@
from psyneulink.core.components.ports.outputport import PRIMARY
from psyneulink.core.globals.keywords import \
LEARNING_PROJECTION, LEARNING_SIGNAL, OUTPUT_PORT_PARAMS, PARAMETER_PORT, PARAMETER_PORTS, RECEIVER
-from psyneulink.core.globals.parameters import Parameter
+from psyneulink.core.globals.parameters import Parameter, check_user_specified
from psyneulink.core.globals.preferences.basepreferenceset import is_pref_set
from psyneulink.core.globals.preferences.preferenceset import PreferenceLevel
from psyneulink.core.globals.utilities import parameter_spec
@@ -333,6 +333,7 @@ class Parameters(ModulatorySignal.Parameters):
value = Parameter(np.array([0]), read_only=True, aliases=['learning_signal'], pnl_internal=True)
learning_rate = None
+ @check_user_specified
@tc.typecheck
def __init__(self,
owner=None,
diff --git a/psyneulink/core/components/ports/modulatorysignals/modulatorysignal.py b/psyneulink/core/components/ports/modulatorysignals/modulatorysignal.py
index deb1e474258..3e295b414cb 100644
--- a/psyneulink/core/components/ports/modulatorysignals/modulatorysignal.py
+++ b/psyneulink/core/components/ports/modulatorysignals/modulatorysignal.py
@@ -412,6 +412,7 @@
from psyneulink.core.globals.keywords import \
ADDITIVE_PARAM, CONTROL, DISABLE, MAYBE, MECHANISM, MODULATION, MODULATORY_SIGNAL, MULTIPLICATIVE_PARAM, \
OVERRIDE, PROJECTIONS, VARIABLE
+from psyneulink.core.globals.parameters import check_user_specified
from psyneulink.core.globals.preferences.preferenceset import PreferenceLevel
__all__ = [
@@ -562,6 +563,7 @@ class Parameters(OutputPort.Parameters):
# PREFERENCE_SET_NAME: 'OutputPortCustomClassPreferences',
# PREFERENCE_KEYWORD: ...}
+ @check_user_specified
def __init__(self,
owner=None,
size=None,
diff --git a/psyneulink/core/components/ports/outputport.py b/psyneulink/core/components/ports/outputport.py
index 5c2be3a09bc..5e1c2bc1eba 100644
--- a/psyneulink/core/components/ports/outputport.py
+++ b/psyneulink/core/components/ports/outputport.py
@@ -631,7 +631,7 @@
OWNER_VALUE, PARAMS, PARAMS_DICT, PROJECTION, PROJECTIONS, RECEIVER, REFERENCE_VALUE, STANDARD_OUTPUT_PORTS, PORT, \
VALUE, VARIABLE, \
output_port_spec_to_parameter_name, INPUT_PORT_VARIABLES
-from psyneulink.core.globals.parameters import Parameter
+from psyneulink.core.globals.parameters import Parameter, check_user_specified
from psyneulink.core.globals.preferences.basepreferenceset import is_pref_set
from psyneulink.core.globals.preferences.preferenceset import PreferenceLevel
from psyneulink.core.globals.utilities import \
@@ -905,6 +905,7 @@ class Parameters(Port_Base.Parameters):
#endregion
+ @check_user_specified
@tc.typecheck
@handle_external_context()
def __init__(self,
diff --git a/psyneulink/core/components/ports/parameterport.py b/psyneulink/core/components/ports/parameterport.py
index c37514c3f58..cd05d489203 100644
--- a/psyneulink/core/components/ports/parameterport.py
+++ b/psyneulink/core/components/ports/parameterport.py
@@ -382,7 +382,7 @@
CONTEXT, CONTROL_PROJECTION, CONTROL_SIGNAL, CONTROL_SIGNALS, FUNCTION, FUNCTION_PARAMS, \
LEARNING_SIGNAL, LEARNING_SIGNALS, MECHANISM, NAME, PARAMETER_PORT, PARAMETER_PORT_PARAMS, PATHWAY_PROJECTION, \
PROJECTION, PROJECTIONS, PROJECTION_TYPE, REFERENCE_VALUE, SENDER, VALUE
-from psyneulink.core.globals.parameters import ParameterBase, ParameterAlias, SharedParameter
+from psyneulink.core.globals.parameters import ParameterBase, ParameterAlias, SharedParameter, check_user_specified
from psyneulink.core.globals.preferences.basepreferenceset import is_pref_set
from psyneulink.core.globals.preferences.preferenceset import PreferenceLevel
from psyneulink.core.globals.utilities \
@@ -701,6 +701,7 @@ class ParameterPort(Port_Base):
#endregion
tc.typecheck
+ @check_user_specified
def __init__(self,
owner,
reference_value=None,
diff --git a/psyneulink/core/components/ports/port.py b/psyneulink/core/components/ports/port.py
index cdc89dc7b0b..bf17f401732 100644
--- a/psyneulink/core/components/ports/port.py
+++ b/psyneulink/core/components/ports/port.py
@@ -797,7 +797,7 @@ def test_multiple_modulatory_projections_with_mech_and_port_Name_specs(self):
RECEIVER, REFERENCE_VALUE, REFERENCE_VALUE_NAME, SENDER, STANDARD_OUTPUT_PORTS, \
PORT, PORT_COMPONENT_CATEGORY, PORT_CONTEXT, Port_Name, port_params, PORT_PREFS, PORT_TYPE, port_value, \
VALUE, VARIABLE, WEIGHT
-from psyneulink.core.globals.parameters import Parameter
+from psyneulink.core.globals.parameters import Parameter, check_user_specified
from psyneulink.core.globals.preferences.basepreferenceset import VERBOSE_PREF
from psyneulink.core.globals.preferences.preferenceset import PreferenceLevel
from psyneulink.core.globals.registry import register_category
@@ -1004,6 +1004,7 @@ class Parameters(Port.Parameters):
classPreferenceLevel = PreferenceLevel.CATEGORY
+ @check_user_specified
@tc.typecheck
@abc.abstractmethod
def __init__(self,
diff --git a/psyneulink/core/components/projections/modulatory/controlprojection.py b/psyneulink/core/components/projections/modulatory/controlprojection.py
index 624eb563a0d..72d17f635f6 100644
--- a/psyneulink/core/components/projections/modulatory/controlprojection.py
+++ b/psyneulink/core/components/projections/modulatory/controlprojection.py
@@ -120,7 +120,7 @@
from psyneulink.core.globals.context import ContextFlags
from psyneulink.core.globals.keywords import \
CONTROL, CONTROL_PROJECTION, CONTROL_SIGNAL, INPUT_PORT, OUTPUT_PORT, PARAMETER_PORT
-from psyneulink.core.globals.parameters import Parameter, SharedParameter
+from psyneulink.core.globals.parameters import Parameter, SharedParameter, check_user_specified
from psyneulink.core.globals.preferences.basepreferenceset import is_pref_set
from psyneulink.core.globals.preferences.preferenceset import PreferenceLevel
@@ -237,6 +237,7 @@ class Parameters(ModulatoryProjection_Base.Parameters):
projection_sender = ControlMechanism
+ @check_user_specified
@tc.typecheck
def __init__(self,
sender=None,
diff --git a/psyneulink/core/components/projections/modulatory/gatingprojection.py b/psyneulink/core/components/projections/modulatory/gatingprojection.py
index 1c852bbea2c..0bdcc4801e5 100644
--- a/psyneulink/core/components/projections/modulatory/gatingprojection.py
+++ b/psyneulink/core/components/projections/modulatory/gatingprojection.py
@@ -112,7 +112,7 @@
from psyneulink.core.globals.keywords import \
FUNCTION_OUTPUT_TYPE, GATE, GATING_MECHANISM, GATING_PROJECTION, GATING_SIGNAL, \
INPUT_PORT, OUTPUT_PORT
-from psyneulink.core.globals.parameters import Parameter
+from psyneulink.core.globals.parameters import Parameter, check_user_specified
from psyneulink.core.globals.preferences.basepreferenceset import is_pref_set
from psyneulink.core.globals.preferences.preferenceset import PreferenceLevel
@@ -238,6 +238,7 @@ class Parameters(ModulatoryProjection_Base.Parameters):
projection_sender = GatingMechanism
+ @check_user_specified
@tc.typecheck
def __init__(self,
sender=None,
diff --git a/psyneulink/core/components/projections/modulatory/learningprojection.py b/psyneulink/core/components/projections/modulatory/learningprojection.py
index 4b1a4a8bb63..fe0d021db7a 100644
--- a/psyneulink/core/components/projections/modulatory/learningprojection.py
+++ b/psyneulink/core/components/projections/modulatory/learningprojection.py
@@ -202,7 +202,7 @@
from psyneulink.core.globals.keywords import \
LEARNING, LEARNING_PROJECTION, LEARNING_SIGNAL, \
MATRIX, PARAMETER_PORT, PROJECTION_SENDER, ONLINE, AFTER
-from psyneulink.core.globals.parameters import Parameter
+from psyneulink.core.globals.parameters import Parameter, check_user_specified
from psyneulink.core.globals.preferences.basepreferenceset import is_pref_set
from psyneulink.core.globals.preferences.preferenceset import PreferenceLevel
from psyneulink.core.globals.utilities import iscompatible, parameter_spec
@@ -440,6 +440,7 @@ class Parameters(ModulatoryProjection_Base.Parameters):
projection_sender = LearningMechanism
+ @check_user_specified
@tc.typecheck
def __init__(self,
sender:tc.optional(tc.any(LearningSignal, LearningMechanism))=None,
diff --git a/psyneulink/core/components/projections/pathway/mappingprojection.py b/psyneulink/core/components/projections/pathway/mappingprojection.py
index ba6f37c23a8..557c1b3dbd4 100644
--- a/psyneulink/core/components/projections/pathway/mappingprojection.py
+++ b/psyneulink/core/components/projections/pathway/mappingprojection.py
@@ -299,7 +299,7 @@
MAPPING_PROJECTION, MATRIX, \
OUTPUT_PORT, VALUE
from psyneulink.core.globals.log import ContextFlags
-from psyneulink.core.globals.parameters import FunctionParameter, Parameter
+from psyneulink.core.globals.parameters import FunctionParameter, Parameter, check_user_specified
from psyneulink.core.globals.preferences.basepreferenceset import is_pref_set
from psyneulink.core.globals.preferences.preferenceset import PreferenceLevel
@@ -442,6 +442,7 @@ class sockets:
projection_sender = OutputPort
+ @check_user_specified
def __init__(self,
sender=None,
receiver=None,
diff --git a/psyneulink/core/components/projections/pathway/pathwayprojection.py b/psyneulink/core/components/projections/pathway/pathwayprojection.py
index e777205f6c2..61952a9327b 100644
--- a/psyneulink/core/components/projections/pathway/pathwayprojection.py
+++ b/psyneulink/core/components/projections/pathway/pathwayprojection.py
@@ -16,8 +16,6 @@
* `PathwayProjection_Overview`
* `PathwayProjection_Creation`
* `PathwayProjection_Structure`
- - `PathwayProjection_Sender`
- - `PathwayProjection_Receiver`
* `PathwayProjection_Execution`
* `PathwayProjection_Class_Reference`
@@ -46,7 +44,6 @@
A PathwayProjection has the same structure as a `Projection `.
-
.. _PathwayProjection_Execution:
Execution
@@ -63,10 +60,9 @@
"""
-from psyneulink.core.components.projections.projection import Projection_Base, ProjectionRegistry
+from psyneulink.core.components.projections.projection import Projection_Base
from psyneulink.core.globals.context import ContextFlags
from psyneulink.core.globals.keywords import NAME, PATHWAY_PROJECTION, RECEIVER, SENDER
-from psyneulink.core.globals.registry import remove_instance_from_registry
__all__ = []
diff --git a/psyneulink/core/components/projections/projection.py b/psyneulink/core/components/projections/projection.py
index 6999bca6702..d0f8c4c39b2 100644
--- a/psyneulink/core/components/projections/projection.py
+++ b/psyneulink/core/components/projections/projection.py
@@ -409,7 +409,7 @@
from psyneulink.core.components.ports.port import PortError
from psyneulink.core.components.shellclasses import Mechanism, Process_Base, Projection, Port
from psyneulink.core.globals.context import ContextFlags
-from psyneulink.core.globals.json import _get_variable_parameter_name
+from psyneulink.core.globals.mdf import _get_variable_parameter_name
from psyneulink.core.globals.keywords import \
CONTROL, CONTROL_PROJECTION, CONTROL_SIGNAL, EXPONENT, FUNCTION_PARAMS, GATE, GATING_PROJECTION, GATING_SIGNAL, \
INPUT_PORT, LEARNING, LEARNING_PROJECTION, LEARNING_SIGNAL, \
@@ -418,7 +418,7 @@
NAME, OUTPUT_PORT, OUTPUT_PORTS, PARAMS, PATHWAY, PROJECTION, PROJECTION_PARAMS, PROJECTION_SENDER, PROJECTION_TYPE, \
RECEIVER, SENDER, STANDARD_ARGS, PORT, PORTS, WEIGHT, ADD_INPUT_PORT, ADD_OUTPUT_PORT, \
PROJECTION_COMPONENT_CATEGORY
-from psyneulink.core.globals.parameters import Parameter
+from psyneulink.core.globals.parameters import Parameter, check_user_specified
from psyneulink.core.globals.preferences.preferenceset import PreferenceLevel
from psyneulink.core.globals.registry import register_category, remove_instance_from_registry
from psyneulink.core.globals.socket import ConnectionInfo
@@ -631,6 +631,7 @@ class Parameters(Projection.Parameters):
classPreferenceLevel = PreferenceLevel.CATEGORY
+ @check_user_specified
@abc.abstractmethod
def __init__(self,
receiver,
@@ -1066,8 +1067,8 @@ def as_mdf_model(self, simple_edge_format=True):
else:
sender_mech = parse_valid_identifier(self.sender.owner.name)
else:
- sender_name = None
- sender_mech = None
+ sender_name = ''
+ sender_mech = ''
if not isinstance(self.receiver, type):
try:
@@ -1086,8 +1087,8 @@ def as_mdf_model(self, simple_edge_format=True):
else:
receiver_mech = parse_valid_identifier(self.receiver.owner.name)
else:
- receiver_name = None
- receiver_mech = None
+ receiver_name = ''
+ receiver_mech = ''
socket_dict = {
MODEL_SPEC_ID_SENDER_PORT: f'{sender_mech}_{sender_name}',
@@ -1147,10 +1148,7 @@ def as_mdf_model(self, simple_edge_format=True):
else:
metadata = self._mdf_metadata
try:
- metadata[MODEL_SPEC_ID_METADATA]['functions'] = mdf.Function.to_dict_format(
- self.function.as_mdf_model(),
- ordered=False
- )
+ metadata[MODEL_SPEC_ID_METADATA]['functions'] = mdf.Function.to_dict(self.function.as_mdf_model())
except AttributeError:
# projection is in deferred init, special handling here?
pass
diff --git a/psyneulink/core/components/shellclasses.py b/psyneulink/core/components/shellclasses.py
index 7820abc7328..d1d2dc94f84 100644
--- a/psyneulink/core/components/shellclasses.py
+++ b/psyneulink/core/components/shellclasses.py
@@ -28,6 +28,7 @@
"""
from psyneulink.core.components.component import Component
+from psyneulink.core.globals.parameters import check_user_specified
__all__ = [
'Function', 'Mechanism', 'Process_Base', 'Projection', 'ShellClass', 'ShellClassError', 'Port', 'System_Base',
@@ -73,6 +74,7 @@ class Process_Base(ShellClass):
class Mechanism(ShellClass):
+ @check_user_specified
def __init__(self,
default_variable=None,
size=None,
diff --git a/psyneulink/core/compositions/composition.py b/psyneulink/core/compositions/composition.py
index 836454b9c0f..b720ef3921e 100644
--- a/psyneulink/core/compositions/composition.py
+++ b/psyneulink/core/compositions/composition.py
@@ -8,8 +8,8 @@
# ********************************************* Composition ************************************************************
-"""
+"""
Contents
--------
@@ -111,24 +111,36 @@
The following arguments of the Composition's constructor can be used to add Compnents when it is constructed:
+ .. _Composition_Pathways_Arg:
+
+ - **pathways**
+ adds one or more `Pathways ` to the Composition; this is equivalent to constructing
+ the Composition and then calling its `add_pathways ` method, and can use the
+ same forms of specification as the **pathways** argument of that method (see `Pathway_Specification` for
+ additonal details). If any `learning Pathways ` are included, then the
+ constructor's **disable_learning** argument can be used to disable learning on those by default (though it
+ will still allow learning to occur on any other Compositions, either nested within the current one,
+ or within which the current one is nested (see `Composition_Learning` for a full description).
+
+ .. _Composition_Nodes_Arg:
+
- **nodes**
adds the specified `Nodes ` to the Composition; this is equivalent to constructing the
Composition and then calling its `add_nodes ` method, and takes the same values as the
- **nodes** argument of that method.
+ **nodes** argument of that method (note that this does *not* construct `Pathways ` for the specified
+ nodes; the **pathways** arg or `add_pathways ` method must be used to do so).
+
+ .. _Composition_Projections_Arg:
- **projections**
adds the specified `Projections ` to the Composition; this is equivalent to constructing the
Composition and then calling its `add_projections ` method, and takes the same
- values as the **projections** argument of that method.
+ values as the **projections** argument of that method. In general, this is not neded -- default Projections
+ are created for Pathways and/or Nodes added to the Composition using the methods described above; however
+ it can be useful for custom configurations, including the implementation of specific Projection `matrices
+ `.
- - **pathways**
- adds one or more `Pathways ` to the Composition; this is equivalent to constructing the
- Composition and then calling its `add_pathways ` method, and can use the same forms
- of specification as the **pathways** argument of that method. If any `learning Pathways
- ` are included, then the constructor's **disable_learning** argument can be
- used to disable learning on those by default (though it will still allow learning to occur on any other
- Compositions, either nested within the current one, or within which the current one is nested (see
- `Composition_Learning` for a full description).
+ .. _Composition_Controller_Arg:
- **controller**
adds the specified `ControlMechanism` (typically an `OptimizationControlMechanism`) as the `controller
@@ -179,10 +191,10 @@
- `add_linear_processing_pathway `
- adds and a list of `Nodes ` and `Projections ` to the Composition,
- inserting a default Projection between any adjacent pair of Nodes for which one is not otherwise specified
- (or possibly a set of Projections if either Node is a Composition -- see method documentation for details);
- returns the `Pathway` added to the Composition.
+ adds and a list of `Nodes ` and `Projections ` to the Composition, inserting
+ a default Projection between any adjacent set(s) of Nodes for which a Projection is not otherwise specified
+ (see method documentation and `Pathway_Specification` for additonal details); returns the `Pathway` added to
+ the Composition.
COMMENT:
The following set of `learning methods ` can be used to add `Pathways
@@ -2730,7 +2742,8 @@ def input_function(env, result):
from psyneulink.core.components.functions.nonstateful.transferfunctions import Identity
from psyneulink.core.components.mechanisms.mechanism import Mechanism_Base, MechanismError, MechanismList
from psyneulink.core.components.mechanisms.modulatory.control.controlmechanism import ControlMechanism
-from psyneulink.core.components.mechanisms.modulatory.control.optimizationcontrolmechanism import AGENT_REP, OptimizationControlMechanism
+from psyneulink.core.components.mechanisms.modulatory.control.optimizationcontrolmechanism import AGENT_REP, \
+ OptimizationControlMechanism
from psyneulink.core.components.mechanisms.modulatory.learning.learningmechanism import \
LearningMechanism, ACTIVATION_INPUT_INDEX, ACTIVATION_OUTPUT_INDEX, ERROR_SIGNAL, ERROR_SIGNAL_INDEX
from psyneulink.core.components.mechanisms.modulatory.modulatorymechanism import ModulatoryMechanism_Base
@@ -2747,7 +2760,8 @@ def input_function(env, result):
from psyneulink.core.components.projections.modulatory.modulatoryprojection import ModulatoryProjection_Base
from psyneulink.core.components.projections.pathway.mappingprojection import MappingProjection, MappingError
from psyneulink.core.components.projections.pathway.pathwayprojection import PathwayProjection_Base
-from psyneulink.core.components.projections.projection import Projection_Base, ProjectionError, DuplicateProjectionError
+from psyneulink.core.components.projections.projection import \
+ Projection_Base, ProjectionError, DuplicateProjectionError
from psyneulink.core.components.shellclasses import Composition_Base
from psyneulink.core.components.shellclasses import Mechanism, Projection
from psyneulink.core.compositions.report import Report, \
@@ -2764,16 +2778,16 @@ def input_function(env, result):
MONITOR, MONITOR_FOR_CONTROL, NAME, NESTED, NO_CLAMP, NODE, OBJECTIVE_MECHANISM, ONLINE, OUTCOME, \
OUTPUT, OUTPUT_CIM_NAME, OUTPUT_MECHANISM, OUTPUT_PORTS, OWNER_VALUE, \
PARAMETER, PARAMETER_CIM_NAME, PORT, \
- PROCESSING_PATHWAY, PROJECTION, PROJECTION_TYPE, PROJECTION_PARAMS, PULSE_CLAMP, \
- SAMPLE, SHADOW_INPUTS, SOFT_CLAMP, SSE, \
+ PROCESSING_PATHWAY, PROJECTION, PROJECTION_TYPE, PROJECTION_PARAMS, PULSE_CLAMP, RECEIVER, \
+ SAMPLE, SENDER, SHADOW_INPUTS, SOFT_CLAMP, SSE, \
TARGET, TARGET_MECHANISM, TEXT, VARIABLE, WEIGHT, OWNER_MECH
from psyneulink.core.globals.log import CompositionLog, LogCondition
-from psyneulink.core.globals.parameters import Parameter, ParametersBase
+from psyneulink.core.globals.parameters import Parameter, ParametersBase, check_user_specified
from psyneulink.core.globals.preferences.basepreferenceset import BasePreferenceSet
from psyneulink.core.globals.preferences.preferenceset import PreferenceLevel, _assign_prefs
from psyneulink.core.globals.registry import register_category
-from psyneulink.core.globals.utilities import \
- ContentAddressableList, call_with_pruned_args, convert_to_list, nesting_depth, convert_to_np_array, is_numeric, parse_valid_identifier
+from psyneulink.core.globals.utilities import ContentAddressableList, call_with_pruned_args, convert_to_list, \
+ nesting_depth, convert_to_np_array, is_numeric, is_matrix, parse_valid_identifier
from psyneulink.core.scheduling.condition import All, AllHaveRun, Always, Any, Condition, Never
from psyneulink.core.scheduling.scheduler import Scheduler, SchedulingMode
from psyneulink.core.scheduling.time import Time, TimeScale
@@ -3316,12 +3330,30 @@ class Composition(Composition_Base, metaclass=ComponentsMeta):
---------
pathways : Pathway specification or list[Pathway specification...]
- specifies one or more Pathways to add to the Compositions (see **pathways** argument of `add_pathways
- `Composition.add_pathways` for specification format).
+ specifies one or more Pathways to add to the Compositions. A list containing `Node `
+ and possible `Projection` specifications at its top level is treated as a single `Pathway`; a list containing
+ any nested lists or other forms of `Pathway specification ` is treated as
+ `multiple pathways ` (see `pathways ` as
+ well as `Pathway specification ` for additional details).
+
+ .. technical_note::
+
+ The design pattern for use of sets and lists in specifying the **pathways** argument are:
+ - sets comprise Nodes that all occupy the same (parallel) position within a processing Pathway;
+ - lists comprise *sequences* of Nodes; embedded list are either ignored or a generate an error (see below)
+ (this is because lists of Nodes are interpreted as Pathways and Pathways cannot be nested, which would be
+ redundant since the same can be accomplished by simply including the items "inline" within a single list)
+ - if the Pathway specification contains (in its outer list):
+ - only a single item or set of items, each is treated as a SINGLETON in a Pathway;
+ - one or more lists, the items in each list are treated as separate (parallel) pathways;
+ - singly-nested lists ([[[A,B]],[[C,D]]]}), they are collapsed and treated as a Pathway;
+ - any list with more than one list nested within it ([[[A,B],[C,D]}), an error is generated;
+ - Pathway objects are treated as a list (if its pathway attribute is a set, it is wrapped in a list)
+ (see `tests ` for examples)
nodes : `Mechanism `, `Composition` or list[`Mechanism `, `Composition`] : default None
specifies one or more `Nodes ` to add to the Composition; these are each treated as
- `SINGLETONs ` unless they are explicitly assigned `Projections `.
+ `SINGLETON `\\s unless they are explicitly assigned `Projections `.
projections : `Projection ` or list[`Projection `] : default None
specifies one or more `Projections ` to add to the Composition; these are not functional
@@ -3449,7 +3481,7 @@ class Composition(Composition_Base, metaclass=ComponentsMeta):
argument of the Composition's constructor and/or one of its `Pathway addition methods
`; each item is a list of `Nodes `
(`Mechanisms ` and/or Compositions) intercolated with the `Projection(s) ` between each
- pair of Nodes; both Nodes are Mechanism, then only a single Projection can be specified; if either is a
+ pair of Nodes; if both Nodes are Mechanisms, then only a single Projection can be specified; if either is a
Composition then, under some circumstances, there can be a set of Projections, specifying how the `INPUT
` Node(s) of the sender project to the `OUTPUT ` Node(s) of the receiver
(see `add_linear_processing_pathway` for additional details).
@@ -3711,6 +3743,7 @@ class Parameters(ParametersBase):
class _CompilationData(ParametersBase):
execution = None
+ @check_user_specified
def __init__(
self,
pathways=None,
@@ -3954,9 +3987,6 @@ def _analyze_graph(self, context=None):
self._create_CIM_ports(context=context)
# Call after above so shadow_projections have relevant organization
self._update_shadow_projections(context=context)
- # # FIX: 12/29/21 / 3/30/22: MOVE TO _update_shadow_projections
- # # Call again to accommodate any changes from _update_shadow_projections
- # self._determine_node_roles(context=context)
self._check_for_projection_assignments(context=context)
self.needs_update_graph = False
@@ -4553,7 +4583,10 @@ def _determine_origin_and_terminal_nodes_from_consideration_queue(self):
# consideration set. Identifying these assumes that graph_processing has been called/updated,
# which identifies and "breaks" cycles, and assigns FEEDBACK_SENDER to the appropriate consideration set(s).
for node in self.nodes:
- if not any([efferent for efferent in node.efferents if efferent.receiver.owner is not self.output_CIM]):
+ if not any([
+ efferent.is_active_in_composition(self) for efferent in node.efferents
+ if efferent.receiver.owner is not self.output_CIM
+ ]):
self._add_node_role(node, NodeRole.TERMINAL)
def _add_node_aux_components(self, node, context=None):
@@ -4815,14 +4848,14 @@ def _determine_node_roles(self, context=None):
this is currently the case, but is inconsistent with the analog in Control,
where monitored Mechanisms *are* allowed to be OUTPUT;
therefore, might be worth allowing TARGET_MECHANISM to be assigned as OUTPUT
- - all Nodes for which OUTPUT has been assigned as a required_node_role, inculding by user
+ - all Nodes for which OUTPUT has been assigned as a required_node_role, inclUding by user
(i.e., in self.required_node_roles[NodeRole.OUTPUT]
TERMINAL:
- all Nodes that
- are not an ObjectiveMechanism assigned the role CONTROLLER_OBJECTIVE
- or have *no* efferent projections OR
- - or for for which any efferent projections are either:
+ - or for which any efferent projections are either:
- to output_CIM OR
- assigned as feedback (i.e., self.graph.comp_to_vertex[efferent].feedback == EdgeType.FEEDBACK
.. _note::
@@ -4917,9 +4950,9 @@ def _determine_node_roles(self, context=None):
# and doesn't project to any Nodes other than its `AutoassociativeLearningMechanism`
# (this is not picked up as a `TERMINAL` since it projects to the `AutoassociativeLearningMechanism`)
# but can (or already does) project to an output_CIM
- if all((p.receiver.owner is node
+ if all((p.receiver.owner is node # <- recurrence
or isinstance(p.receiver.owner, AutoAssociativeLearningMechanism)
- or p.receiver.owner is self.output_CIM)
+ or p.receiver.owner is self.output_CIM) # <- already projects to an output_CIM
for p in node.efferents):
self._add_node_role(node, NodeRole.OUTPUT)
continue
@@ -5746,13 +5779,9 @@ def add_projection(self,
return
else:
# Initialize Projection
- projection._init_args['sender'] = sender
- projection._init_args['receiver'] = receiver
- try:
- projection._deferred_init()
- except DuplicateProjectionError:
- # return projection
- return
+ projection._init_args[SENDER] = sender
+ projection._init_args[RECEIVER] = receiver
+ projection._deferred_init()
else:
existing_projections = self._check_for_existing_projections(projection, sender=sender, receiver=receiver)
@@ -5787,6 +5816,15 @@ def add_projection(self,
projection.is_processing = False
# KDM 5/24/19: removing below rename because it results in several existing_projections
# projection.name = f'{sender} to {receiver}'
+
+ # check for required role specification of feedback projections
+ for node, role in self.required_node_roles:
+ if (
+ (node == projection.sender.owner and role == NodeRole.FEEDBACK_SENDER)
+ or (node == projection.receiver.owner and role == NodeRole.FEEDBACK_RECEIVER)
+ ):
+ feedback = True
+
self.graph.add_component(projection, feedback=feedback)
try:
@@ -6339,6 +6377,46 @@ def _get_destination(self, projection):
# region ---------------------------------- PROCESSING -----------------------------------------------------------
+ def _parse_pathway(self, pathway, name, pathway_arg_str):
+ from psyneulink.core.compositions.pathway import Pathway, _is_pathway_entry_spec
+
+ # Deal with Pathway() or tuple specifications
+ if isinstance(pathway, Pathway):
+ # Give precedence to name specified in call to add_linear_processing_pathway
+ pathway_name = name or pathway.name
+ pathway = pathway.pathway
+ else:
+ pathway_name = name
+
+ if isinstance(pathway, tuple):
+ # If tuple is just a single Node specification for a pathway, return in list:
+ if _is_pathway_entry_spec(pathway, NODE):
+ pathway = [pathway]
+ # If tuple is used to specify a sequence of nodes, convert to list (even though not documented):
+ elif all(_is_pathway_entry_spec(n, ANY) for n in pathway):
+ pathway = list(pathway)
+ # If tuple is (pathway, LearningFunction), get pathway and ignore LearningFunction
+ elif isinstance(pathway[1],type) and issubclass(pathway[1], LearningFunction):
+ warnings.warn(f"{LearningFunction.__name__} found in specification of {pathway_arg_str}: {pathway[1]}; "
+ f"it will be ignored")
+ pathway = pathway[0]
+ else:
+ raise CompositionError(f"Unrecognized tuple specification in {pathway_arg_str}: {pathway}")
+ elif not isinstance(pathway, collections.abc.Iterable) or all(_is_pathway_entry_spec(n, ANY) for n in pathway):
+ pathway = convert_to_list(pathway)
+ else:
+ bad_entry_error_msg = f"The following entries in a pathway specified for '{self.name}' are not " \
+ f"a Node (Mechanism or Composition) or a Projection nor a set of either: "
+ bad_entries = [repr(entry) for entry in pathway if not _is_pathway_entry_spec(entry, ANY)]
+ raise CompositionError(f"{bad_entry_error_msg}{','.join(bad_entries)}")
+ # raise CompositionError(f"Unrecognized specification in {pathway_arg_str}: {pathway}")
+
+ lists = [entry for entry in pathway
+ if isinstance(entry, list) and all(_is_pathway_entry_spec(node, NODE) for node in entry)]
+ if lists:
+ raise CompositionError(f"Pathway specification for {pathway_arg_str} has embedded list(s): {lists}")
+ return pathway, pathway_name
+
# FIX: REFACTOR TO TAKE Pathway OBJECT AS ARGUMENT
def add_pathway(self, pathway):
"""Add an existing `Pathway ` to the Composition
@@ -6370,51 +6448,194 @@ def add_pathway(self, pathway):
self._analyze_graph()
+ @handle_external_context()
+ def add_pathways(self, pathways, context=None):
+ """Add pathways to the Composition.
+
+ Arguments
+ ---------
+
+ pathways : Pathway or list[Pathway]
+ specifies one or more `Pathways ` to add to the Composition. Any valid form of `Pathway
+ specification ` can be used. A set can also be used, all elements of which are
+ `Nodes `, in which case a separate `Pathway` is constructed for each.
+
+ Returns
+ -------
+
+ list[Pathway] :
+ List of `Pathways ` added to the Composition.
+
+ """
+
+ # Possible specifications for **pathways** arg:
+ # Node specs (single or set):
+ # 0 Single node: NODE
+ # 1 Set: {NODE...} -> generate a Pathway for each NODE
+ # Single pathway spec (list, tuple or dict):
+ # 2 single list: PWAY = [NODE] or [NODE...] in which *all* are NODES with optional intercolated Projections
+ # 2.5 single with sets: PWAY = [NODE or {NODE...}] or [NODE or {NODE...}, NODE or {NODE...}...]
+ # 3 single tuple: (PWAY, LearningFunction) = (NODE, LearningFunction) or
+ # ([NODE...], LearningFunction)
+ # 4 single dict: {NAME: PWAY} = {NAME: NODE} or
+ # {NAME: [NODE...]} or
+ # {NAME: ([NODE...], LearningFunction)}
+ # Multiple pathway specs (in outer list):
+ # 5 list with list(s): [PWAY] = [NODE, [NODE]] or [[NODE...]...]
+ # 6 list with tuple(s): [(PWAY, LearningFunction)...] = [(NODE..., LearningFunction)...] or
+ # [([NODE...], LearningFunction)...]
+ # 7 list with dict: [{NAME: PWAY}...] = [{NAME: NODE...}...] or
+ # [{NAME: [NODE...]}...] or
+ # [{NAME: (NODE, LearningFunction)}...] or
+ # [{NAME: ([NODE...], LearningFunction)}...]
+
+ from psyneulink.core.compositions.pathway import Pathway, _is_node_spec, _is_pathway_entry_spec
+
+ if context.source == ContextFlags.COMMAND_LINE:
+ pathways_arg_str = f"'pathways' arg for the add_pathways method of {self.name}"
+ elif context.source == ContextFlags.CONSTRUCTOR:
+ pathways_arg_str = f"'pathways' arg of the constructor for {self.name}"
+ else:
+ assert False, f"PROGRAM ERROR: unrecognized context passed to add_pathways of {self.name}."
+ context.string = pathways_arg_str
+
+ if not pathways:
+ return
+
+ # Possibilities 0, 3 or 4 (single NODE, set of NODESs tuple, dict or Pathway specified, so convert to list
+ if _is_node_spec(pathways) or isinstance(pathways, (tuple, dict, Pathway)):
+ pathways = convert_to_list(pathways)
+
+ # Possibility 1 (set of Nodes): create a Pathway for each Node (since set is in pathways arg)
+ elif isinstance(pathways, set):
+ pathways = [pathways]
+
+ # Possibility 2 (list is a single pathway spec) or 2.5 (includes one or more sets):
+ if (isinstance(pathways, list) and
+ # First item must be a node_spec or set of them
+ ((_is_node_spec(pathways[0])
+ or (isinstance(pathways[0], set) and all(_is_node_spec(item) for item in pathways[0])))
+ # All other items must be either Nodes, Projections or sets
+ and all(_is_pathway_entry_spec(p, ANY) for p in pathways))):
+ # Place in outter list (to conform to processing of multiple pathways below)
+ pathways = [pathways]
+ # assert False, f"GOT TO POSSIBILITY 2" # SHOULD HAVE BEEN DONE ABOVE
+
+ # If pathways is not now a list it must be illegitimate
+ if not isinstance(pathways, list):
+ raise CompositionError(f"The {pathways_arg_str} must be a "
+ f"Node, list, set, tuple, dict or Pathway object: {pathways}.")
+
+ # pathways should now be a list in which each entry should be *some* form of pathway specification
+ # (including original spec as possibilities 5, 6, or 7)
+
+ # If there are any lists of Nodes in pathway, or a Pathway or dict with such a list,
+ # then treat ALL entries as parallel pathways, and embed in lists"
+ if (isinstance(pathways, collections.abc.Iterable)
+ and any(isinstance(pathway, (list, dict, Pathway))) for pathway in pathways):
+ pathways = [pathway if isinstance(pathway, (list, dict, Pathway)) else [pathway] for pathway in pathways]
+ else:
+ # Put single pathway in outer list for consistency of handling below (with specified pathway as pathways[0])
+ pathways = np.atleast_2d(np.array(pathways, dtype=object)).tolist()
+
+ added_pathways = []
+
+ def identify_pway_type_and_parse_tuple_prn(pway, tuple_or_dict_str):
+ """
+ Determine whether pway is PROCESSING_PATHWAY or LEARNING_PATHWAY and, if it is the latter,
+ parse tuple into pathway specification and LearningFunction.
+ Return pathway type, pathway, and learning_function or None
+ """
+ learning_function = None
+
+ if isinstance(pway, Pathway):
+ pway = pway.pathway
+
+ if (_is_node_spec(pway) or isinstance(pway, (list, set)) or
+ # Forgive use of tuple to specify a pathway, and treat as if it was a list spec
+ (isinstance(pway, tuple) and all(_is_pathway_entry_spec(n, ANY) for n in pathway))):
+ pway_type = PROCESSING_PATHWAY
+ if isinstance(pway, set):
+ pway = [pway]
+ return pway_type, pway, None
+ elif isinstance(pway, tuple):
+ pway_type = LEARNING_PATHWAY
+ if len(pway)!=2:
+ raise CompositionError(f"A tuple specified in the {pathways_arg_str}"
+ f" has more than two items: {pway}")
+ pway, learning_function = pway
+ if not (_is_node_spec(pway) or isinstance(pway, (list, Pathway))):
+ raise CompositionError(f"The 1st item in {tuple_or_dict_str} specified in the "
+ f" {pathways_arg_str} must be a node or a list: {pway}")
+ if not (isinstance(learning_function, type) and issubclass(learning_function, LearningFunction)):
+ raise CompositionError(f"The 2nd item in {tuple_or_dict_str} specified in the "
+ f"{pathways_arg_str} must be a LearningFunction: {learning_function}")
+ return pway_type, pway, learning_function
+ else:
+ assert False, f"PROGRAM ERROR: arg to identify_pway_type_and_parse_tuple_prn in {self.name}" \
+ f"is not a Node, list or tuple: {pway}"
+
+ # Validate items in pathways list and add to Composition using relevant add_linear_<> method.
+ bad_entry_error_msg = f"Every item in the {pathways_arg_str} must be a " \
+ f"Node, list, set, tuple or dict; the following are not: "
+ for pathway in pathways:
+ pathway = pathway[0] if isinstance(pathway, list) and len(pathway) == 1 else pathway
+ pway_name = None
+ if isinstance(pathway, Pathway):
+ pway_name = pathway.name
+ pathway = pathway.pathway
+ if _is_node_spec(pathway) or isinstance(pathway, (list, set, tuple)):
+ if isinstance(pathway, set):
+ bad_entries = [repr(entry) for entry in pathway if not _is_node_spec(entry)]
+ if bad_entries:
+ raise CompositionError(f"{bad_entry_error_msg}{','.join(bad_entries)}")
+ pway_type, pway, pway_learning_fct = identify_pway_type_and_parse_tuple_prn(pathway, f"a tuple")
+ elif isinstance(pathway, dict):
+ if len(pathway)!=1:
+ raise CompositionError(f"A dict specified in the {pathways_arg_str} "
+ f"contains more than one entry: {pathway}.")
+ pway_name, pway = list(pathway.items())[0]
+ if not isinstance(pway_name, str):
+ raise CompositionError(f"The key in a dict specified in the {pathways_arg_str} must be a str "
+ f"(to be used as its name): {pway_name}.")
+ if _is_node_spec(pway) or isinstance(pway, (list, tuple, Pathway)):
+ pway_type, pway, pway_learning_fct = identify_pway_type_and_parse_tuple_prn(pway,
+ f"the value of a dict")
+ else:
+ raise CompositionError(f"The value in a dict specified in the {pathways_arg_str} must be "
+ f"a pathway specification (Node, list or tuple): {pway}.")
+ else:
+ raise CompositionError(f"{bad_entry_error_msg}{repr(pathway)}")
+
+ context.source = ContextFlags.METHOD
+ if pway_type == PROCESSING_PATHWAY:
+ new_pathway = self.add_linear_processing_pathway(pathway=pway,
+ name=pway_name,
+ context=context)
+ elif pway_type == LEARNING_PATHWAY:
+ new_pathway = self.add_linear_learning_pathway(pathway=pway,
+ learning_function=pway_learning_fct,
+ name=pway_name,
+ context=context)
+ else:
+ assert False, f"PROGRAM ERROR: failure to determine pathway_type in add_pathways for {self.name}."
+
+ added_pathways.append(new_pathway)
+
+ return added_pathways
+
@handle_external_context()
def add_linear_processing_pathway(self, pathway, name:str=None, context=None, *args):
- """Add sequence of `Nodes ` with intercolated Projections.
+ """Add sequence of `Nodes ` with optionally intercolated `Projections `.
.. _Composition_Add_Linear_Processing_Pathway:
- Each `Node ` can be either a `Mechanism`, a `Composition`, or a tuple (Mechanism, `NodeRoles
- `) that can be used to assign `required_roles` to Mechanisms (see `Composition_Nodes` for additional
- details).
-
- `Projections ` can be intercolated between any pair of `Nodes `. If both Nodes
- of a pair are Mechanisms, a single `MappingProjection` can be `specified `. The
- same applies if the first Node is a `Composition` with a single `OUTPUT ` Node and/or the
- second is a `Composition` with a single `INPUT ` Node. If either has more than one `INPUT
- ` or `OUTPUT ` Node, respectively, then a list or set of Projections can be
- specified for each pair of nested Nodes. If no `Projection` is specified between a pair of contiguous Nodes,
- then default Projection(s) are constructed between them, as follows:
-
- * *One to one* - if both Nodes are Mechanisms or, if either is a Composition, the first (sender) has
- only a single `OUTPUT ` Node and the second (receiver) has only a single `INPUT
- ` Node, then a default `MappingProjection` is created from the `primary OutputPort
- ` of the sender (or of its sole `OUTPUT ` Node if the sener is a
- Composition) to the `primary InputPort ` of the receiver (or of its sole of `INPUT
- ` Node if the receiver is a Composition).
-
- * *One to many* - if the first Node (sender) is either a Mechanism or a Composition with a single
- `OUTPUT ` Node, but the second (receiver) is a Composition with more than one
- `INPUT ` Node, then a `MappingProjection` is created from the `primary OutputPort
- ` of the sender Mechanism (or of its sole `OUTPUT ` Node if the
- sender is a Compostion) to each `INPUT ` Node of the receiver, and a *set*
- containing the Projections is intercolated between the two Nodes in the `Pathway`.
-
- * *Many to one* - if the first Node (sender) is a Composition with more than one `OUTPUT `
- Node, and the second (receiver) is either a Mechanism or a Composition with a single `INPUT `
- Node, then a `MappingProjection` is created from each `OUPUT ` Node of the sender to the
- `primary InputPort ` of the receiver Mechanism (or of its sole `INPUT `
- Node if the receiver is a Composition), and a *set* containing the Projections is intercolated
- between the two Nodes in the `Pathway`.
-
- * *Many to many* - if both Nodes are Compositions in which the sender has more than one `INPUT `
- Node and the receiver has more than one `INPUT ` Node, it is not possible to determine
- the correct configuration automatically, and an error is generated. In this case, a set of Projections
- must be explicitly specified.
-
- .. _note::
+ A Pathway is specified as a list, each element of which is either a `Node ` or
+ set of Nodes, possibly intercolated with specifications of `Projections ` between them.
+ The Node(s) specified in each entry of the list project to the Node(s) specified in the next entry
+ (see `Pathway_Specification` for details).
+
+ .. note::
Any specifications of the **monitor_for_control** `argument `
of a constructor for a `ControlMechanism` or the **monitor** argument in the constructor for an
`ObjectiveMechanism` in the **objective_mechanism** `argument ` of a
@@ -6430,9 +6651,8 @@ def add_linear_processing_pathway(self, pathway, name:str=None, context=None, *a
be used, however if a 2-item (Pathway, LearningFunction) tuple is used, the `LearningFunction` is ignored
(this should be used with `add_linear_learning_pathway` if a `learning Pathway
` is desired). A `Pathway` object can also be used; again, however, any
- learning-related specifications are ignored, as are its `name ` if the **name**
- argument of add_linear_processing_pathway is specified.
- See `above ` for additional details.
+ learning-related specifications are ignored, as are its `name ` if the **name** argument
+ of add_linear_processing_pathway is specified.
name : str
species the name used for `Pathway`; supercedes `name ` of `Pathway` object if it is has one.
@@ -6442,12 +6662,15 @@ def add_linear_processing_pathway(self, pathway, name:str=None, context=None, *a
`Pathway` :
`Pathway` added to Composition.
-
"""
from psyneulink.core.compositions.pathway import Pathway, _is_node_spec, _is_pathway_entry_spec
+ def _get_spec_if_tuple(spec):
+ return spec[0] if isinstance(spec, tuple) else spec
+
nodes = []
+ node_entries = []
# If called internally, use its pathway_arg_str in error messages (in context.string)
if context.source is not ContextFlags.COMMAND_LINE:
@@ -6459,48 +6682,37 @@ def add_linear_processing_pathway(self, pathway, name:str=None, context=None, *a
context.source = ContextFlags.METHOD
context.string = pathway_arg_str
- # First, deal with Pathway() or tuple specifications
- if isinstance(pathway, Pathway):
- # Give precedence to name specified in call to add_linear_processing_pathway
- pathway_name = name or pathway.name
- pathway = pathway.pathway
- else:
- pathway_name = name
-
- if _is_pathway_entry_spec(pathway, ANY):
- pathway = convert_to_list(pathway)
- elif isinstance(pathway, tuple):
- # If tuple is used to specify a sequence of nodes, convert to list (even though not documented):
- if all(_is_pathway_entry_spec(n, ANY) for n in pathway):
- pathway = list(pathway)
- # If tuple is (pathway, LearningFunction), get pathway and ignore LearningFunction
- elif isinstance(pathway[1],type) and issubclass(pathway[1], LearningFunction):
- warnings.warn(f"{LearningFunction.__name__} found in specification of {pathway_arg_str}: {pathway[1]}; "
- f"it will be ignored")
- pathway = pathway[0]
- else:
- raise CompositionError(f"Unrecognized tuple specification in {pathway_arg_str}: {pathway}")
- else:
- raise CompositionError(f"Unrecognized specification in {pathway_arg_str}: {pathway}")
+ pathway, pathway_name = self._parse_pathway(pathway, name, pathway_arg_str)
- # Then, verify that the pathway begins with a node
+ # Verify that the pathway begins with a Node or set of Nodes
if _is_node_spec(pathway[0]):
# Use add_nodes so that node spec can also be a tuple with required_roles
- self.add_nodes(nodes=[pathway[0]],
- context=context)
+ self.add_nodes(nodes=[pathway[0]], context=context)
nodes.append(pathway[0])
+ node_entries.append(pathway[0])
+ # Or a set of Nodes
+ elif isinstance(pathway[0], set):
+ self.add_nodes(nodes=pathway[0], context=context)
+ nodes.extend(pathway[0])
+ node_entries.append(pathway[0])
else:
# 'MappingProjection has no attribute _name' error is thrown when pathway[0] is passed to the error msg
raise CompositionError(f"First item in {pathway_arg_str} must be "
f"a Node (Mechanism or Composition): {pathway}.")
- # Next, add all of the remaining nodes in the pathway
+ # Add all of the remaining nodes in the pathway
for c in range(1, len(pathway)):
- # if the current item is a Mechanism, Composition or (Mechanism, NodeRole(s)) tuple, add it
+ # if the entry is for a Node (Mechanism, Composition or (Mechanism, NodeRole(s)) tuple), add it
if _is_node_spec(pathway[c]):
self.add_nodes(nodes=[pathway[c]],
context=context)
nodes.append(pathway[c])
+ node_entries.append(pathway[c])
+ # If the entry is for a set of Nodes, add them
+ elif isinstance(pathway[c], set) and all(_is_node_spec(entry) for entry in pathway[c]):
+ self.add_nodes(nodes=pathway[c], context=context)
+ nodes.extend(pathway[c])
+ node_entries.append(pathway[c])
# Then, delete any ControlMechanism that has its monitor_for_control attribute assigned
# and any ObjectiveMechanism that projects to a ControlMechanism,
@@ -6532,146 +6744,271 @@ def add_linear_processing_pathway(self, pathway, name:str=None, context=None, *a
projections = []
for c in range(1, len(pathway)):
- # if the current item is a Node
- if _is_node_spec(pathway[c]):
- if _is_node_spec(pathway[c - 1]):
- # if the previous item was also a node, add a MappingProjection between them
- if isinstance(pathway[c - 1], tuple):
- sender = pathway[c - 1][0]
- else:
- sender = pathway[c - 1]
- if isinstance(pathway[c], tuple):
- receiver = pathway[c][0]
- else:
- receiver = pathway[c]
-
- # If sender and/or receiver is a Composition with INPUT or OUTPUT Nodes,
- # replace it with those Nodes
- senders = self._get_nested_nodes_with_same_roles_at_all_levels(sender, NodeRole.OUTPUT)
- receivers = self._get_nested_nodes_with_same_roles_at_all_levels(receiver,
- NodeRole.INPUT, NodeRole.TARGET)
- if senders or receivers:
- senders = senders or convert_to_list(sender)
- receivers = receivers or convert_to_list(receiver)
- if len(senders) > 1 and len(receivers) > 1:
- raise CompositionError(f"Pathway specified with two contiguous Compositions, the first of "
- f"which ({sender.name}) has more than one OUTPUT Node, and second "
- f"of which ({receiver.name}) has more than one INPUT Node, making "
- f"the configuration of Projections between them ambiguous; please "
- f"specify those Projections explicitly.")
- proj = {self.add_projection(sender=s, receiver=r, allow_duplicates=False)
- for r in receivers for s in senders}
- else:
- proj = self.add_projection(sender=sender, receiver=receiver)
- if proj:
- projections.append(proj)
-
- # if the current item is a Projection specification
+ # NODE ENTRY ----------------------------------------------------------------------------------------
+ def _get_node_specs_for_entry(entry, include_roles=None, exclude_roles=None):
+ """Extract Nodes from any tuple specs and replace Compositions with their INPUT Nodes
+ """
+ nodes = []
+ for node in entry:
+ # Extract Nodes from any tuple specs
+ node = _get_spec_if_tuple(node)
+ # Replace any nested Compositions with their INPUT Nodes
+ node = (self._get_nested_nodes_with_same_roles_at_all_levels(node, include_roles, exclude_roles)
+ if isinstance(node, Composition) else [node])
+ nodes.extend(node)
+ return nodes
+
+ # The current entry is a Node or a set of them:
+ # - if it is a set, list or array, leave as is, else place in set for consistency of processing below
+ current_entry = pathway[c] if isinstance(pathway[c], (set, list, np.ndarray)) else {pathway[c]}
+ if all(_is_node_spec(entry) for entry in current_entry):
+ receivers = _get_node_specs_for_entry(current_entry, NodeRole.INPUT, NodeRole.TARGET)
+ # The preceding entry is a Node or set of them:
+ # - if it is a set, list or array, leave as is, else place in set for consistnecy of processin below
+ preceding_entry = (pathway[c - 1] if isinstance(pathway[c - 1], (set, list, np.ndarray))
+ else {pathway[c - 1]})
+ if all(_is_node_spec(sender) for sender in preceding_entry):
+ senders = _get_node_specs_for_entry(preceding_entry, NodeRole.OUTPUT)
+ projs = {self.add_projection(sender=s, receiver=r, allow_duplicates=False)
+ for r in receivers for s in senders}
+ if all(projs):
+ projs = projs.pop() if len(projs) == 1 else projs
+ projections.append(projs)
+
+ # PROJECTION ENTRY --------------------------------------------------------------------------
+ # Validate that it is between two nodes, then add the Projection;
+ # note: if Projection is already instantiated and valid, it is used as is; if it is a set or list:
+ # - those are implemented between the corresponding pairs of sender and receiver Nodes
+ # - the list or set has a default Projection or matrix specification,
+ # that is used between all pairs of Nodes for which a Projection has not been specified
+
+ # The current entry is a Projection specification or a list or set of them
elif _is_pathway_entry_spec(pathway[c], PROJECTION):
- # Convert pathway[c] to list (embedding in one if matrix) for consistency of handling below
- # try:
- # proj_specs = set(convert_to_list(pathway[c]))
- # except TypeError:
- # proj_specs = [pathway[c]]
- if is_numeric(pathway[c]):
- proj_specs = [pathway[c]]
+
+ # Validate that Projection specification is not last entry
+ if c == len(pathway) - 1:
+ raise CompositionError(f"The last item in the {pathway_arg_str} cannot be a Projection: "
+ f"{pathway[c]}.")
+
+ # Validate that entry is between two Nodes (or sets of Nodes)
+ # and get all pairings of sender and receiver nodes
+ prev_entry = pathway[c - 1]
+ next_entry = pathway[c + 1]
+ if ((_is_node_spec(prev_entry) or isinstance(prev_entry, set))
+ and (_is_node_spec(next_entry) or isinstance(next_entry, set))):
+ senders = [_get_spec_if_tuple(sender) for sender in convert_to_list(prev_entry)]
+ receivers = [_get_spec_if_tuple(receiver) for receiver in convert_to_list(next_entry)]
+ node_pairs = list(itertools.product(senders,receivers))
+ else:
+ raise CompositionError(f"A Projection specified in {pathway_arg_str} "
+ f"is not between two Nodes: {pathway[c]}")
+
+ # Convert specs in entry to list (embedding in one if matrix) for consistency of handling below
+ all_proj_specs = [pathway[c]] if is_numeric(pathway[c]) else convert_to_list(pathway[c])
+
+ # Get default Projection specification
+ # Must be a matrix spec, or a Projection with no sender or receiver specified
+ # If it is:
+ # - a single Projection, not in a set or list
+ # - appears only once in the pathways arg
+ # - it is preceded by only one sender Node and followed by only one receiver Node
+ # then treat as an individual Projection specification and not a default projection specification
+ possible_default_proj_spec = [proj_spec for proj_spec in all_proj_specs
+ if (is_matrix(proj_spec)
+ or (isinstance(proj_spec, Projection)
+ and proj_spec._initialization_status & ContextFlags.DEFERRED_INIT
+ and proj_spec._init_args[SENDER] is None
+ and proj_spec._init_args[RECEIVER] is None))]
+ # Validate that there is no more than one default Projection specification
+ if len(possible_default_proj_spec) > 1:
+ raise CompositionError(f"There is more than one matrix specification in the set of Projection "
+ f"specifications for entry {c} of the {pathway_arg_str}: "
+ f"{possible_default_proj_spec}.")
+ # Get spec from list:
+ spec = possible_default_proj_spec[0] if possible_default_proj_spec else None
+ # If it appears only once on its own in the pathways arg and there is only one sender and one receiver
+ # consider it an individual Projection specification rather than a specification of the default
+ if sum(isinstance(s, Projection) and s is spec for s in pathway) == len(senders) == len(receivers) == 1:
+ default_proj_spec = None
+ proj_specs = all_proj_specs
else:
- proj_specs = convert_to_list(pathway[c])
+ # Unpack if tuple spec, and assign feedback (with False as default)
+ default_proj_spec, feedback = (spec if isinstance(spec, tuple) else (spec, False))
+ # Get all specs other than default_proj_spec
+ # proj_specs = [proj_spec for proj_spec in all_proj_specs if proj_spec not in possible_default_proj_spec]
+ proj_specs = [proj_spec for proj_spec in all_proj_specs if proj_spec is not spec]
+
+ # Collect all Projection specifications (to add to Composition at end)
proj_set = []
- for proj_spec in proj_specs:
- if c == len(pathway) - 1:
- raise CompositionError(f"The last item in the {pathway_arg_str} cannot be a Projection: "
- f"{proj_spec}.")
- # confirm that it is between two nodes, then add the projection
- if isinstance(proj_spec, tuple):
- proj = proj_spec[0]
- feedback = proj_spec[1]
- else:
- proj = proj_spec
- feedback = False
- sender = pathway[c - 1]
- receiver = pathway[c + 1]
- if _is_node_spec(sender) and _is_node_spec(receiver):
- if isinstance(sender, tuple):
- sender = sender[0]
- if isinstance(receiver, tuple):
- receiver = receiver[0]
+
+ def handle_misc_errors(proj, error):
+ raise CompositionError(f"Bad Projection specification in {pathway_arg_str} ({proj}): "
+ f"{str(error.error_value)}")
+
+ def handle_duplicates(sender, receiver):
+ duplicate = [p for p in receiver.afferents if p in sender.efferents]
+ assert len(duplicate)==1, \
+ f"PROGRAM ERROR: Could not identify duplicate on DuplicateProjectionError " \
+ f"for {Projection.__name__} between {sender.name} and {receiver.name} " \
+ f"in call to {repr('add_linear_processing_pathway')} for {self.name}."
+ duplicate = duplicate[0]
+ warning_msg = f"Projection specified between {sender.name} and {receiver.name} " \
+ f"in {pathway_arg_str} is a duplicate of one"
+ # IMPLEMENTATION NOTE: Version that allows different Projections between same
+ # sender and receiver in different Compositions
+ # if duplicate in self.projections:
+ # warnings.warn(f"{warning_msg} already in the Composition ({duplicate.name}) "
+ # f"and so will be ignored.")
+ # proj=duplicate
+ # else:
+ # if self.prefs.verbosePref:
+ # warnings.warn(f" that already exists between those nodes ({duplicate.name}). The "
+ # f"new one will be used; delete it if you want to use the existing one")
+ # Version that forbids *any* duplicate Projections between same sender and receiver
+ warnings.warn(f"{warning_msg} that already exists between those nodes ({duplicate.name}) "
+ f"and so will be ignored.")
+ proj_set.append(self.add_projection(duplicate))
+
+ # PARSE PROJECTION SPECIFICATIONS AND INSTANTIATE PROJECTIONS
+ # IMPLEMENTATION NOTE:
+ # self.add_projection is called for each Projection
+ # to catch any duplicates with exceptions below
+
+ # FIX: 4/9/22 - REFACTOR TO DO ANY SPECIFIED ASSIGNMENTS FIRST, AND THEN DEFAULT ASSIGNMENTS (IF ANY)
+ if default_proj_spec is not None and not proj_specs:
+ # If there is a default specification and no other Projection specs,
+ # use default to construct Projections for all node_pairs
+ for sender, receiver in node_pairs:
try:
- if isinstance(proj, (np.ndarray, np.matrix, list)):
- # If proj is a matrix specification, use it as the matrix arg
- proj = MappingProjection(sender=sender,
- matrix=proj,
- receiver=receiver)
+ # Default is a Projection
+ if isinstance(default_proj_spec, Projection):
+ # Copy so that assignments made to instantiated Projection don't affect default
+ projection = self.add_projection(projection=deepcopy(default_proj_spec),
+ sender=sender,
+ receiver=receiver,
+ allow_duplicates=False,
+ feedback=feedback)
else:
- # Otherwise, if it is Port specification, implement default Projection
+ # Default is a matrix_spec
+ assert is_matrix(default_proj_spec), \
+ f"PROGRAM ERROR: Expected {default_proj_spec} to be " \
+ f"a matrix specification in {pathway_arg_str}."
+ projection = self.add_projection(projection=MappingProjection(sender=sender,
+ matrix=default_proj_spec,
+ receiver=receiver),
+ allow_duplicates=False,
+ feedback=feedback)
+ proj_set.append(projection)
+
+ except (InputPortError, ProjectionError, MappingError) as error:
+ handle_misc_errors(proj, error)
+ except DuplicateProjectionError:
+ handle_duplicates(sender, receiver)
+
+ else:
+ # FIX: 4/9/22 - PUT THIS FIRST (BEFORE BLOCK JUST ABOVE) AND THEN ASSIGN TO ANY LEFT IN node_pairs
+ # Projections have been specified
+ for proj_spec in proj_specs:
+ try:
+ proj = _get_spec_if_tuple(proj_spec)
+ feedback = proj_spec[1] if isinstance(proj_spec, tuple) else False
+
+ if isinstance(proj, Projection):
+ # FIX 4/9/22 - TEST FOR DEFERRED INIT HERE (THAT IS NOT A default_proj_spec)
+ # IF JUST SENDER OR RECEIVER, TREAT AS PER PORTS BELOW
+ # Validate that Projection is between a Node in senders and one in receivers
+ if proj._initialization_status & ContextFlags.DEFERRED_INIT:
+ sender_node = senders[0]
+ receiver_node = receivers[0]
+ else:
+ sender_node = proj.sender.owner
+ receiver_node = proj.receiver.owner
+ proj_set.append(self.add_projection(proj,
+ sender = sender_node,
+ receiver = receiver_node,
+ allow_duplicates=False, feedback=feedback))
+ if default_proj_spec:
+ # If there IS a default Projection specification, remove from node_pairs
+ # only the entry for the sender-receiver pair, so that the sender is assigned
+ # a default Projection to all other receivers (to which a Projection is not
+ # explicitly specified) and the receiver is assigned a default Projection from
+ # all other senders (from which a Projection is not explicitly specified).
+ node_pairs = [pair for pair in node_pairs
+ if not all(node in pair for node in {sender_node, receiver_node})]
+ else:
+ # If there is NOT a default Projection specification, remove from node_pairs
+ # all other entries with either the same sender OR receiver, so that neither
+ # the sender nor receiver are assigned any other default Projections.
+ node_pairs = [pair for pair in node_pairs
+ if not any(node in pair for node in {sender_node, receiver_node})]
+
+ # FIX: 4/9/22 - SHOULD INCLUDE MECH SPEC (AND USE PRIMARY PORT) HERE:
+ elif isinstance(proj, Port):
+ # Implement default Projection (using matrix if specified) for all remaining specs
try:
+ # FIX: 4/9/22 - INCLUDE TEST FOR DEFERRED_INIT WITH ONLY RECEIVER SPECIFIED
if isinstance(proj, InputPort):
- proj = MappingProjection(sender=sender,
- receiver=proj)
+ for sender in senders:
+ proj_set.append(self.add_projection(
+ projection=MappingProjection(sender=sender, receiver=proj),
+ allow_duplicates=False, feedback=feedback))
+ # FIX: 4/9/22 - INCLUDE TEST FOR DEFERRED_INIT WITH ONLY SENDER SPECIFIED
elif isinstance(proj, OutputPort):
- proj = MappingProjection(sender=proj,
- receiver=receiver)
+ for receiver in receivers:
+ proj_set.append(self.add_projection(
+ projection=MappingProjection(sender=proj, receiver=receiver),
+ allow_duplicates=False, feedback=feedback))
+ # Remove from node_pairs all pairs involving the owner of the Port
+ # (since all Projections to or from it have been implemented)
+ node_pairs = [pair for pair in node_pairs if (proj.owner not in pair)]
except (InputPortError, ProjectionError) as error:
raise ProjectionError(str(error.error_value))
except (InputPortError, ProjectionError, MappingError) as error:
- raise CompositionError(f"Bad Projection specification in {pathway_arg_str} ({proj}): "
- f"{str(error.error_value)}")
-
+ handle_misc_errors(proj, error)
except DuplicateProjectionError:
- # FIX: 7/22/19 ADD WARNING HERE??
- # FIX: 7/22/19 MAKE THIS A METHOD ON Projection??
- duplicate = [p for p in receiver.afferents if p in sender.efferents]
- assert len(duplicate)==1, \
- f"PROGRAM ERROR: Could not identify duplicate on DuplicateProjectionError " \
- f"for {Projection.__name__} between {sender.name} and {receiver.name} " \
- f"in call to {repr('add_linear_processing_pathway')} for {self.name}."
- duplicate = duplicate[0]
- warning_msg = f"Projection specified between {sender.name} and {receiver.name} " \
- f"in {pathway_arg_str} is a duplicate of one"
- # IMPLEMENTATION NOTE: Version that allows different Projections between same
- # sender and receiver in different Compositions
- # if duplicate in self.projections:
- # warnings.warn(f"{warning_msg} already in the Composition ({duplicate.name}) "
- # f"and so will be ignored.")
- # proj=duplicate
- # else:
- # if self.prefs.verbosePref:
- # warnings.warn(f" that already exists between those nodes ({duplicate.name}). The "
- # f"new one will be used; delete it if you want to use the existing one")
- # Version that forbids *any* duplicate Projections between same sender and receiver
- warnings.warn(f"{warning_msg} that already exists between those nodes ({duplicate.name}) "
- f"and so will be ignored.")
- proj=duplicate
-
- proj = self.add_projection(projection=proj,
- sender=sender,
- receiver=receiver,
- feedback=feedback,
- allow_duplicates=False)
- if proj:
- proj_set.append(proj)
- else:
- raise CompositionError(f"A Projection specified in {pathway_arg_str} "
- f"is not between two Nodes: {pathway[c]}")
+ handle_duplicates(sender, receiver)
+
+ # FIX: 4/9/22 - REPLACE BELOW WITH CALL TO _assign_default_proj_spec(sender, receiver)
+ # If a default Projection is specified and any sender-receiver pairs remain, assign default
+ if default_proj_spec and node_pairs:
+ for sender, receiver in node_pairs:
+ try:
+ p = self.add_projection(projection=deepcopy(default_proj_spec),
+ sender=sender,
+ receiver=receiver,
+ allow_duplicates=False,
+ feedback=feedback)
+ proj_set.append(p)
+ except (InputPortError, ProjectionError, MappingError) as error:
+ handle_misc_errors(proj, error)
+ except DuplicateProjectionError:
+ handle_duplicates(sender, receiver)
+
+ # If there is a single Projection, extract it from list and append as Projection
+ # IMPLEMENTATION NOTE:
+ # this is to support calls to add_learing_processing_pathway by add_learning_<> methods
+ # that do not yet support a list or set of Projection specifications
if len(proj_set) == 1:
projections.append(proj_set[0])
else:
projections.append(proj_set)
+ # BAD PATHWAY ENTRY: contains neither Node nor Projection specification(s)
else:
- raise CompositionError(f"An entry in {pathway_arg_str} is not a Node (Mechanism or Composition) "
- f"or a Projection: {repr(pathway[c])}.")
+ assert False, f"PROGRAM ERROR : An entry in {pathway_arg_str} is not a Node (Mechanism " \
+ f"or Composition) or a Projection nor a set of either: {repr(pathway[c])}."
# Finally, clean up any tuple specs
- for i, n in enumerate(nodes):
- if isinstance(n, tuple):
- nodes[i] = nodes[i][0]
- # interleave nodes and projections
- explicit_pathway = [nodes[0]]
+ for i, n_e in enumerate(node_entries):
+ for n in convert_to_list(n_e):
+ if isinstance(n, tuple):
+ nodes[i] = nodes[i][0]
+ # interleave (sets of) Nodes and (sets or lists of) Projections
+ explicit_pathway = [node_entries[0]]
for i in range(len(projections)):
explicit_pathway.append(projections[i])
- explicit_pathway.append(nodes[i + 1])
+ explicit_pathway.append(node_entries[i + 1])
# If pathway is an existing one, return that
existing_pathway = next((p for p in self.pathways if explicit_pathway==p.pathway), None)
@@ -6698,7 +7035,8 @@ def add_linear_processing_pathway(self, pathway, name:str=None, context=None, *a
pass
else:
# Otherwise, something has gone wrong
- assert False, f"PROGRAM ERROR: Bad pathway specification for {self.name} in {pathway_arg_str}: {pathway}."
+ assert False, \
+ f"PROGRAM ERROR: Bad pathway specification for {self.name} in {pathway_arg_str}: {pathway}."
pathway = Pathway(pathway=explicit_pathway,
composition=self,
@@ -6710,150 +7048,6 @@ def add_linear_processing_pathway(self, pathway, name:str=None, context=None, *a
return pathway
- @handle_external_context()
- def add_pathways(self, pathways, context=None):
- """Add pathways to the Composition.
-
- Arguments
- ---------
-
- pathways : Pathway or list[Pathway]
- specifies one or more `Pathways ` to add to the Composition (see `Pathway_Specification`).
-
- Returns
- -------
-
- list[Pathway] :
- List of `Pathways ` added to the Composition.
-
- """
-
- # Possible specifications for **pathways** arg:
- # 1 Single node: NODE
- # Single pathway spec (list, tuple or dict):
- # 2 single list: PWAY = [NODE] or [NODE...] in which *all* are NODES with optional intercolated Projections
- # 3 single tuple: (PWAY, LearningFunction) = (NODE, LearningFunction) or
- # ([NODE...], LearningFunction)
- # 4 single dict: {NAME: PWAY} = {NAME: NODE} or
- # {NAME: [NODE...]} or
- # {NAME: ([NODE...], LearningFunction)}
- # Multiple pathway specs (outer list):
- # 5 list with list: [PWAY] = [NODE, [NODE]] or [[NODE...]...]
- # 6 list with tuple: [(PWAY, LearningFunction)...] = [(NODE..., LearningFunction)...] or
- # [([NODE...], LearningFunction)...]
- # 7 list with dict: [{NAME: PWAY}...] = [{NAME: NODE...}...] or
- # [{NAME: [NODE...]}...] or
- # [{NAME: (NODE, LearningFunction)}...] or
- # [{NAME: ([NODE...], LearningFunction)}...]
-
- from psyneulink.core.compositions.pathway import Pathway, _is_node_spec, _is_pathway_entry_spec
-
- if context.source == ContextFlags.COMMAND_LINE:
- pathways_arg_str = f"'pathways' arg for the add_pathways method of {self.name}"
- elif context.source == ContextFlags.CONSTRUCTOR:
- pathways_arg_str = f"'pathways' arg of the constructor for {self.name}"
- else:
- assert False, f"PROGRAM ERROR: unrecognized context pass to add_pathways of {self.name}."
- context.string = pathways_arg_str
-
- if not pathways:
- return
-
- # Possibilities 1, 3 or 4 (single NODE, tuple or dict specified, so convert to list
- elif _is_node_spec(pathways) or isinstance(pathways, (tuple, dict, Pathway)):
- pathways = convert_to_list(pathways)
-
- # Possibility 2 (list is a single pathway spec):
- if (isinstance(pathways, list)
- and _is_node_spec(pathways[0]) and all(_is_pathway_entry_spec(p, ANY) for p in pathways)):
- # Place in outter list (to conform to processing of multiple pathways below)
- pathways = [pathways]
- # If pathways is not now a list it must be illegitimate
- if not isinstance(pathways, list):
- raise CompositionError(f"The {pathways_arg_str} must be a "
- f"Node, list, tuple, dict or Pathway object: {pathways}.")
-
- # pathways should now be a list in which each entry should be *some* form of pathway specification
- # (including original spec as possibilities 5, 6, or 7)
-
- added_pathways = []
-
- def identify_pway_type_and_parse_tuple_prn(pway, tuple_or_dict_str):
- """
- Determine whether pway is PROCESSING_PATHWAY or LEARNING_PATHWAY and, if it is the latter,
- parse tuple into pathway specification and LearningFunction.
- Return pathway type, pathway, and learning_function or None
- """
- learning_function = None
-
- if isinstance(pway, Pathway):
- pway = pway.pathway
-
- if (_is_node_spec(pway) or isinstance(pway, list) or
- # Forgive use of tuple to specify a pathway, and treat as if it was a list spec
- (isinstance(pway, tuple) and all(_is_pathway_entry_spec(n, ANY) for n in pathway))):
- pway_type = PROCESSING_PATHWAY
- return pway_type, pway, None
- elif isinstance(pway, tuple):
- pway_type = LEARNING_PATHWAY
- if len(pway)!=2:
- raise CompositionError(f"A tuple specified in the {pathways_arg_str}"
- f" has more than two items: {pway}")
- pway, learning_function = pway
- if not (_is_node_spec(pway) or isinstance(pway, (list, Pathway))):
- raise CompositionError(f"The 1st item in {tuple_or_dict_str} specified in the "
- f" {pathways_arg_str} must be a node or a list: {pway}")
- if not (isinstance(learning_function, type) and issubclass(learning_function, LearningFunction)):
- raise CompositionError(f"The 2nd item in {tuple_or_dict_str} specified in the "
- f"{pathways_arg_str} must be a LearningFunction: {learning_function}")
- return pway_type, pway, learning_function
- else:
- assert False, f"PROGRAM ERROR: arg to identify_pway_type_and_parse_tuple_prn in {self.name}" \
- f"is not a Node, list or tuple: {pway}"
-
- # Validate items in pathways list and add to Composition using relevant add_linear_XXX method.
- for pathway in pathways:
- pway_name = None
- if isinstance(pathway, Pathway):
- pway_name = pathway.name
- pathway = pathway.pathway
- if _is_node_spec(pathway) or isinstance(pathway, (list, tuple)):
- pway_type, pway, pway_learning_fct = identify_pway_type_and_parse_tuple_prn(pathway, f"a tuple")
- elif isinstance(pathway, dict):
- if len(pathway)!=1:
- raise CompositionError(f"A dict specified in the {pathways_arg_str} "
- f"contains more than one entry: {pathway}.")
- pway_name, pway = list(pathway.items())[0]
- if not isinstance(pway_name, str):
- raise CompositionError(f"The key in a dict specified in the {pathways_arg_str} must be a str "
- f"(to be used as its name): {pway_name}.")
- if _is_node_spec(pway) or isinstance(pway, (list, tuple, Pathway)):
- pway_type, pway, pway_learning_fct = identify_pway_type_and_parse_tuple_prn(pway,
- f"the value of a dict")
- else:
- raise CompositionError(f"The value in a dict specified in the {pathways_arg_str} must be "
- f"a pathway specification (Node, list or tuple): {pway}.")
- else:
- raise CompositionError(f"Every item in the {pathways_arg_str} must be "
- f"a Node, list, tuple or dict: {repr(pathway)} is not.")
-
- context.source = ContextFlags.METHOD
- if pway_type == PROCESSING_PATHWAY:
- new_pathway = self.add_linear_processing_pathway(pathway=pway,
- name=pway_name,
- context=context)
- elif pway_type == LEARNING_PATHWAY:
- new_pathway = self.add_linear_learning_pathway(pathway=pway,
- learning_function=pway_learning_fct,
- name=pway_name,
- context=context)
- else:
- assert False, f"PROGRAM ERROR: failure to determine pathway_type in add_pathways for {self.name}."
-
- added_pathways.append(new_pathway)
-
- return added_pathways
-
# endregion PROCESSING PATHWAYS
# region ------------------------------------ LEARNING -------------------------------------------------------------
@@ -7446,7 +7640,7 @@ def _create_backpropagation_learning_pathway(self,
if path_length >= 3:
# get the "terminal_sequence" --
# the last 2 nodes in the back prop pathway and the projection between them
- # these components are are processed separately because
+ # these components are processed separately because
# they inform the construction of the Target and Comparator mechs
terminal_sequence = processing_pathway[path_length - 3: path_length]
else:
diff --git a/psyneulink/core/compositions/compositionfunctionapproximator.py b/psyneulink/core/compositions/compositionfunctionapproximator.py
index 0623bb72ddb..1b657ae102a 100644
--- a/psyneulink/core/compositions/compositionfunctionapproximator.py
+++ b/psyneulink/core/compositions/compositionfunctionapproximator.py
@@ -59,6 +59,8 @@
__all__ = ['CompositionFunctionApproximator']
+from psyneulink.core.globals.parameters import check_user_specified
+
class CompositionFunctionApproximatorError(Exception):
def __init__(self, error_value):
@@ -105,6 +107,7 @@ class CompositionFunctionApproximator(Composition):
componentCategory = COMPOSITION_FUNCTION_APPROXIMATOR
+ @check_user_specified
def __init__(self, name=None, **param_defaults):
# self.function = function
super().__init__(name=name, **param_defaults)
diff --git a/psyneulink/core/compositions/parameterestimationcomposition.py b/psyneulink/core/compositions/parameterestimationcomposition.py
index 0ab934d0fc2..3162eae360a 100644
--- a/psyneulink/core/compositions/parameterestimationcomposition.py
+++ b/psyneulink/core/compositions/parameterestimationcomposition.py
@@ -150,7 +150,7 @@
from psyneulink.core.compositions.composition import Composition
from psyneulink.core.globals.context import Context, ContextFlags, handle_external_context
from psyneulink.core.globals.keywords import BEFORE
-from psyneulink.core.globals.parameters import Parameter
+from psyneulink.core.globals.parameters import Parameter, check_user_specified
__all__ = ['ParameterEstimationComposition']
@@ -431,6 +431,7 @@ class Parameters(Composition.Parameters):
setter=_same_seed_for_all_parameter_combinations_setter)
@handle_external_context()
+ @check_user_specified
def __init__(self,
parameters, # OCM control_signals
outcome_variables, # OCM monitor_for_control
diff --git a/psyneulink/core/compositions/pathway.py b/psyneulink/core/compositions/pathway.py
index 951385a36bc..da18203bc84 100644
--- a/psyneulink/core/compositions/pathway.py
+++ b/psyneulink/core/compositions/pathway.py
@@ -26,6 +26,9 @@
- `Pathway_Assignment_to_Composition`
- `Pathway_Name`
- `Pathway_Specification`
+ - `Pathway_Specification_Formats`
+ - `Pathway_Specification_Projections`
+ - `Pathway_Specification_Multiple`
- `Composition_Add_Nested`
* `Pathway_Structure`
* `Pathway_Execution`
@@ -37,9 +40,9 @@
--------
A Pathway is a sequence of `Nodes ` and `Projections `. Generally, Pathways are assigned
-to `Compositions `, but a Pathway object can be created on its and used as a template for specifying a
-Pathway for a Composition, as described below. See `Pathways ` for additional information about
-Pathways in Compositions.
+to a `Compositions`, but a Pathway object can also be created on its and used as a template for specifying a Pathway for
+a Composition, as described below (see `Pathways ` for additional information about Pathways in
+Compositions).
.. _Pathway_Creation:
@@ -54,7 +57,7 @@
*Pathway as a Template*
~~~~~~~~~~~~~~~~~~~~~~~
-A Pathway created on its own, using its constructor, is a **template**, that can be used to `specifiy a Pathway
+A Pathway created on its own, using its constructor, is a **template**, that can be used to `specify a Pathway
` for one or more Compositions, as described `below `;
however, it cannot be executed on its own. When a Pathway object is used to assign a Pathway to a Composition,
its `pathway ` attribute, and its `name ` if that is not otherwise specified (see
@@ -82,7 +85,7 @@
If the **name** argument of the Pathway's constructor is used to assign it a name, this is used as the name of the
Pathway created when it is assigned to a Composition in its constructor, or using its `add_pathways
` method. This is also the case if one of the Composition's other `Pathway addition methods
-` is used, as long as the **name** argument of those methods is not specified.
+` is used, as long as the **name** argument of those methods is not specified.
However, if the **name** argument is specified in those methods, or `Pathway specification dictionary
` is used to specify the Pathway's name, that takes precedence over, and replaces
one specified in the Pathway `template's ` `name ` attribute.
@@ -93,27 +96,149 @@
*Pathway Specification*
~~~~~~~~~~~~~~~~~~~~~~~
-The following formats can be used to specify a Pathway in the **pathway** argument of the constructor for the
-Pathway, the **pathways** argument of a the constructor for a `Composition`, or the corresponding argument
+Pathway are specified as a list, each element of which is either a `Node ` or set of Nodes,
+possibly intercolated with specifications of `Projections ` between them. `Nodes `
+can be either a `Mechanism`, a `Composition`, or a tuple (Mechanism or Composition, `NodeRoles `) that can
+be used to assign `required_roles` to the Nodes in the Composition (see `Composition_Nodes` for additional details).
+The Node(s) specified in each entry of the list project to the Node(s) specified in the next entry.
+
+ .. _Pathway_Projection_List_Note:
+
+ .. note::
+ Only a *set* can be used to specify multiple Nodes for a given entry in a Pathway; a *list* can *not* be used
+ for this purpose, as a list containing Nodes is always interpreted as a Pathway. If a list *is* included in a
+ Pathway specification, then it and all other entries are considered as separate, parallel Pathways (see
+ example *vii* in the `figure ` below).
+
+.. _Pathway_Specification_Projections:
+
+*Pathway Projection Specifications*
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Where no Projections are specified between entries in the list, default Projections (using a `FULL_CONNECTIVITY_MATRIX`;
+see `MappingProjection_Matrix_Specification`) are created from each Node in the first entry, as the sender(s),
+to each Node in the second, as receiver(s) (described further `below `). Projections between
+Nodes in the two entries can also be specified explicitly, by intercolating a Projection or set of Projections between
+the two entries in the list. If the sender and receiver are both a single Mechanism, then a single `MappingProjection`
+can be `specified` between them. The same applies if the sender is a `Composition` with
+a single `OUTPUT ` Node and/or the receiver is a `Composition` with a single `INPUT `
+Node. If either is a set of Nodes, or is a `nested Composition ` with more than one `INPUT
+` or `OUTPUT ` Node, respectively, then a collection of Projections can be specified
+between any or all pairs of the Nodes in the set(s) and/or nested Composition(s), using either a set or list of
+Projections (order of specification does not matter whether a set or a list is used). The collection can contain
+`MappingProjections ` between a specific pairs of Nodes and/or a single default specification
+(either a `matrix ` specification or a MappingProjection without any `sender
+` or `receiver ` specified).
+
+ .. _Pathway_Projection_Matrix_Note:
+
+ .. note::
+ If a collection of Projection specifications includes a default matrix specification, then a list must be used
+ to specify the collection and *not* a set (since a matrix is unhashable and thus cannot be included in a set).
+
+If a default Projection specification is included in the set, it is used to implement a Projection between any pair
+of Nodes for which no MappingProjection is otherwise specified, whether within the collection or on its own; if no
+Projections are specified for any individual pairs, a default Projection is created for every pairing of senders and
+receivers. If a collection contains Projections for one or more pairs of Nodes, but does not include a default
+projection specification, then no Projection is created between any of the other pairings.
+
+If a pair of entries in a pathway has multiple sender and/or receiver Nodes specified (either in a set and/or belonging
+to `nested Composition `, and either no Projection(s) or only a default Projection is intercollated
+between them, then a default set of Projections is constructed (using the default Projection specification, if provided)
+between each pair of sender and receiver Nodes in the set(s) or nested Composition(s), as follows:
+
+.. _Pathway_Projections:
+
+* *One to one* - if both the sender and receiver entries are Mechanisms, or if either is a Composition and the
+ sender has a single `OUTPUT ` Node and the receiver has a single `INPUT `
+ Node, then a default `MappingProjection` is created from the `primary OutputPort ` of the
+ sender (or of its sole `OUTPUT ` Node, if the sender is a Composition) to the `primary InputPort
+ ` of the receiver (or of its sole of `INPUT ` Node, if the receiver is
+ a Composition), and the Projection specification is intercolated between the two entries in the `Pathway`.
+
+* *One to many* - if the sender is either a Mechanism or a Composition with a single `OUTPUT ` Node,
+ but the receiver is either a Composition with more than one `INPUT ` Node or a set of Nodes, then
+ a `MappingProjection` is created from the `primary OutputPort ` of the sender Mechanism (or of
+ its sole `OUTPUT ` Node if the sender is a Composition) to the `primary InputPort
+ ` of each `INPUT ` Node of the receiver Composition and/or Mechanism in the
+ receiver set, and a set containing the Projections is intercolated between the two entries in the `Pathway`.
+
+* *Many to one* - if the sender is a Composition with more than one `OUTPUT ` Node or a set of
+ Nodes, and the receiver is either a Mechanism or a Composition with a single `INPUT ` `OUTPUT
+ ` Node in the Composition or Mechanism in the set of sender(s), to the `primary InputPort
+ ` of the receiver Mechanism (or of its sole `INPUT ` Node if the receiver is
+ a Composition), and a set containing the Projections is intercolated between the two entries in the `Pathway`.
+
+* *Many to many* - if both the sender and receiver entries contain multiple Nodes (i.e., are sets, and/or the
+ the sender is a Composition that has more than one `INPUT ` Node and/or the receiver has more
+ than one `OUTPUT ` Node), then a Projection is constructed for every pairing of Nodes in the
+ sender and receiver entries, using the `primary OutputPort ` of each sender Node and the
+ `primary InputPort ` of each receiver node.
+
+|
+
+ .. _Pathway_Figure:
+
+ .. figure:: _static/Pathways_fig.svg
+ :scale: 50%
+
+ **Examples of Pathway specifications** (including in the **pathways** argument of a `Composition`. *i)* Set
+ of `Nodes `: each is treated as a `SINGLETON ` within a single Pathway.
+ *ii)* List of Nodes: forms a sequential Pathway. *iii)* Single Node followed by a set: one to many mapping.
+ *iv)* Set followed by a single Node: many to one mapping. *v)* Set followed by a set: many to many mapping.
+ *vi)* Set followed by a list: because there is a list in the specification (``[C,D]``) all other entries are
+ also treated as parallel Pathways (see `note ` above), so ``A`` and ``B`` in the
+ set are `SINGLETON `\\s. *vii)* Set of Projections intercolated between two sets of Nodes:
+ since the set of Projections does not include any involving ``B`` or ``E`` nor a default Projection specification,
+ they are treated as `SINGLETON `\\s (compare with *x*). *viii)* Set followed by a Node and
+ then a set: many to one to many mapping. *ix)* Node followed by one that is a `nested Composition
+ ` then another Node: one to many to one mapping. *x)* Set followed by a list of Projections
+ then another set: since the list of Projections contains a default Projection specification (``matrix``)
+ Projections are created between all pairings of nodes in the sets that precede and follow the list (compare with
+ *vii*); note that the Projections must be specified in a list because the matrix is a list (or array), which
+ cannot be included in a set (see `note ` above).
+
+ .. technical_note::
+ The full code for the examples above can be found in `test_pathways_examples`,
+ although some have been graphically rearranged for illustrative purposes.
+
+
+
+.. _Pathway_Specification_Formats:
+
+*Pathway Specification Formats*
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The following formats can be used to specify a Pathway in the **pathway** argument of the constructor for
+the Pathway, the **pathways** argument of the constructor for a `Composition`, or the corresponding argument
of any of a Composition's `Pathway addition methods `:
- * `Node `: -- assigns the Node to a `SINGLETON` Pathway.
+ * `Node `: -- assigns the Node as `SINGLETON ` in a Pathway.
..
.. _Pathway_Specification_List:
* **list**: [`Node `, <`Projection(s) `,> `Node `...] --
- each item of the list must be a `Node ` -- i.e., Mechanism or Composition, or a
- (`Mechanism `, `NodeRoles `) tuple -- or, optionally, a `Projection specification
- `, a (`Projection specification `, `feedback specification
- `) tuple, or a set of either interposed between a pair of nodes (see
+ each item of the list must be a `Node ` (i.e., Mechanism or Composition, or a
+ (`Mechanism `, `NodeRoles `) tuple) or set of Nodes, optionally with a `Projection
+ specification `, a (`Projection specification `,
+ `feedback specification `) tuple, or a set of either interposed between
+ a pair of (sets of) Nodes (see `add_linear_processing_pathway `
+ for additional details). The list must begin and end with a (set of) Node(s).
+ ..
+ * **set**: {`Node `, `Node `...} --
+ each item of the set must be a `Node ` (i.e., Mechanism or Composition, or a
+ (`Mechanism `, `NodeRoles `) tuple); each Node is treated as a `SINGLETON
+ `. Sets can also be used in a list specification (see above; and see
`add_linear_processing_pathway ` for additional details).
- The list must begin and end with a node.
..
* **2-item tuple**: (Pathway, `LearningFunction`) -- used to specify a `learning Pathway
- `; the 1st item must be a `Node ` or list, as
- described above, and the 2nd item be a subclass of `LearningFunction`.
+ `; the 1st item must be one of the forms of Pathway specification
+ described above, and the 2nd item must be a subclass of `LearningFunction`.
-.. _Multiple_Pathway_Specification:
+.. _Pathway_Specification_Multiple:
+
+*Multiple Pathway Specifications*
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
In addition to the forms of single Pathway specification `above `, where multiple Pathways
can be specified (e.g., the **pathways** argument of the constructor for a `Composition` or its `add_pathways
@@ -130,15 +255,15 @@
If any of the following is used to specify the **pathways** argument:
* a **standalone** `Node ` (i.e., not in a list), \n
* a **single Node** alone in a list, \n
+ * a **set** of Nodes, \n
* one or more Nodes with any other form of `Pathway specification ` in the list \n
- then each such Node in the list is treated as its own `SINGLETON` pathway (i.e., one containing a single
- Node that is both the `ORIGIN` and the`TERMINAL` of the Pathway). However, if the list contains only
- Nodes, then it is treated as a single Pathway (i.e., the list form of `Pathway specification
- `. Thus:
+ then each such Node in the list is assigned as a `SINGLETON ` Node in its own Pathway.
+ However, if the list contains only Nodes, then it is treated as a single Pathway (i.e., the list form of
+ `Pathway specification ` described above. Thus:
**pathway**: NODE -> single pathway \n
**pathway**: [NODE] -> single pathway \n
**pathway**: [NODE, NODE...] -> single pathway \n
- **pathway**: [NODE, NODE, () or {} or `Pathway`...] -> three or more pathways
+ **pathway**: [NODE, () or {} or `Pathway`...] -> individual Pathways for each specification.
.. _Pathway_Structure:
@@ -210,13 +335,13 @@
def _is_pathway_entry_spec(entry, desired_type:tc.enum(NODE, PROJECTION, ANY)):
"""Test whether pathway entry is specified type (NODE or PROJECTION)"""
from psyneulink.core.components.projections.projection import _is_projection_spec
- node_specs = (Mechanism, Composition)
- is_node = is_proj = False
+ node_types = (Mechanism, Composition)
+ is_node = is_proj = is_set = False
if desired_type in {NODE, ANY}:
- is_node = (isinstance(entry, node_specs)
+ is_node = (isinstance(entry, node_types)
or (isinstance(entry, tuple)
- and isinstance(entry[0], node_specs)
+ and isinstance(entry[0], node_types)
and (isinstance(entry[1], NodeRole) or
(isinstance(entry[1], list) and all(isinstance(nr, NodeRole) for nr in entry[1])))))
@@ -226,9 +351,13 @@ def _is_pathway_entry_spec(entry, desired_type:tc.enum(NODE, PROJECTION, ANY)):
and _is_projection_spec(entry[0])
and entry[1] in {True, FEEDBACK, False, MAYBE})
or (isinstance(entry, (set,list))
+ # or (isinstance(entry, set)
and all(_is_projection_spec(item) for item in entry)))
- if is_node or is_proj:
+ if desired_type in {ANY}:
+ is_set = (isinstance(entry, set) and all(_is_node_spec(item) for item in entry))
+
+ if is_node or is_proj or is_set:
return True
else:
return False
diff --git a/psyneulink/core/globals/__init__.py b/psyneulink/core/globals/__init__.py
index 2119b48aeb9..45cdf94c8fa 100644
--- a/psyneulink/core/globals/__init__.py
+++ b/psyneulink/core/globals/__init__.py
@@ -1,6 +1,6 @@
from . import context
from . import defaults
-from . import json
+from . import mdf
from . import keywords
from . import kvo
from . import log
@@ -12,10 +12,10 @@
from .context import *
from .defaults import *
-from .json import *
from .keywords import *
from .kvo import *
from .log import *
+from .mdf import *
from .parameters import *
from .preferences import *
from .registry import *
@@ -24,10 +24,10 @@
__all__ = list(context.__all__)
__all__.extend(defaults.__all__)
-__all__.extend(json.__all__)
__all__.extend(keywords.__all__)
__all__.extend(kvo.__all__)
__all__.extend(log.__all__)
+__all__.extend(mdf.__all__)
__all__.extend(parameters.__all__)
__all__.extend(preferences.__all__)
__all__.extend(registry.__all__)
diff --git a/psyneulink/core/globals/keywords.py b/psyneulink/core/globals/keywords.py
index c65e6a5c965..713e9e16dfb 100644
--- a/psyneulink/core/globals/keywords.py
+++ b/psyneulink/core/globals/keywords.py
@@ -997,10 +997,6 @@ def _is_metric(metric):
MODEL_SPEC_ID_PARAMETER_VALUE = 'value'
MODEL_SPEC_ID_PARAMETER_INITIAL_VALUE = 'default_initial_value'
-MODEL_SPEC_ID_NODES = 'nodes'
-MODEL_SPEC_ID_PROJECTIONS = 'edges'
-MODEL_SPEC_ID_COMPOSITION = 'graphs'
-
MODEL_SPEC_ID_MDF_VARIABLE = 'variable0'
MODEL_SPEC_ID_SHAPE = 'shape'
diff --git a/psyneulink/core/globals/json.py b/psyneulink/core/globals/mdf.py
similarity index 54%
rename from psyneulink/core/globals/json.py
rename to psyneulink/core/globals/mdf.py
index 4c598cc8c4d..d898bb8394a 100644
--- a/psyneulink/core/globals/json.py
+++ b/psyneulink/core/globals/mdf.py
@@ -3,36 +3,36 @@
Contents
--------
- * `JSON_Overview`
- * `JSON_Examples`
- * `JSON_Model_Specification`
+ * `MDF_Overview`
+ * `MDF_Examples`
+ * `MDF_Model_Specification`
-.. _JSON_Overview:
+.. _MDF_Overview:
Overview
--------
The developers of PsyNeuLink are collaborating with the scientific community, as part of the `OpenNeuro effort
-`_, to create a standard, JSON-based format for the description and exchange of computational
+`_, to create a standard, serialzied format for the description and exchange of computational
models of brain and psychological function across different simulation environments. As part of this effort,
PsyNeuLink supports the `ModECI Model Description Format `_ (MDF) by
including the ability to produce an MDF-compatible model from a PsyNeuLink model and to construct valid Python
scripts that express a PsyNeuLink model from an MDF model.
Any PsyNeuLink `Composition` or `Component` can be exported to MDF format using its `as_mdf_model` method or
-to JSON format using its `json_summary` method. `json_summary` generates a string that, passed into the
-`generate_script_from_json` function, produces a valid Python script replicating the original PsyNeuLink model.
-`write_json_file` can be used to write the json_summary for one or more Compositions into a specified file (though
-see `note `). `generate_script_from_json` can accept either the string returned
-by `generate_script_from_json` or the name of a file containing one.
-Calling ``exec(generate_script_from_json(