From 7238c7d08210e93db90725c37cff15baeba5838f Mon Sep 17 00:00:00 2001 From: KristenManning Date: Wed, 7 Mar 2018 14:38:21 -0500 Subject: [PATCH 001/200] moving composition into its own folder to allow for pathways, systems, and eventuall subcompositions (learning) --- psyneulink/__init__.py | 9 +- psyneulink/compositions/__init__.py | 11 ++ psyneulink/{ => compositions}/composition.py | 0 psyneulink/compositions/pathwaycomposition.py | 108 ++++++++++++++++++ psyneulink/compositions/systemcomposition.py | 22 ++++ tests/composition/test_composition.py | 3 +- tests/composition/test_graph.py | 2 +- tests/scheduling/test_condition.py | 7 +- tests/scheduling/test_scheduler.py | 3 +- 9 files changed, 152 insertions(+), 13 deletions(-) create mode 100644 psyneulink/compositions/__init__.py rename psyneulink/{ => compositions}/composition.py (100%) create mode 100644 psyneulink/compositions/pathwaycomposition.py create mode 100644 psyneulink/compositions/systemcomposition.py diff --git a/psyneulink/__init__.py b/psyneulink/__init__.py index 9ee3167b0e9..fcd509853f9 100644 --- a/psyneulink/__init__.py +++ b/psyneulink/__init__.py @@ -20,19 +20,18 @@ ''' import logging as _logging -import numpy as _numpy -from ._version import get_versions +import numpy as _numpy +from psyneulink.compositions import composition +from psyneulink.compositions.composition import * # starred imports to allow user imports from top level from . import components -from . import composition from . import globals from . import library from . import scheduling - +from ._version import get_versions from .components import * -from .composition import * from .globals import * from .library import * from .scheduling import * diff --git a/psyneulink/compositions/__init__.py b/psyneulink/compositions/__init__.py new file mode 100644 index 00000000000..7076d8d83c9 --- /dev/null +++ b/psyneulink/compositions/__init__.py @@ -0,0 +1,11 @@ +from . import composition +from . import systemcomposition +from . import pathwaycomposition + +from .composition import * +from .systemcomposition import * +from .pathwaycomposition import * + +__all__ = list(composition.__all__) +__all__.extend(systemcomposition.__all__) +__all__.extend(pathwaycomposition.__all__) \ No newline at end of file diff --git a/psyneulink/composition.py b/psyneulink/compositions/composition.py similarity index 100% rename from psyneulink/composition.py rename to psyneulink/compositions/composition.py diff --git a/psyneulink/compositions/pathwaycomposition.py b/psyneulink/compositions/pathwaycomposition.py new file mode 100644 index 00000000000..d579fac86a1 --- /dev/null +++ b/psyneulink/compositions/pathwaycomposition.py @@ -0,0 +1,108 @@ +from psyneulink.compositions.composition import Composition, MechanismRole +from psyneulink.components.mechanisms.mechanism import Mechanism +from psyneulink.components.projections.pathway.mappingprojection import MappingProjection +from psyneulink.components.projections.projection import Projection +from psyneulink.globals.keywords import SOFT_CLAMP + + +__all__ = [ + 'PathwayComposition', 'PathwayCompositionError' +] + +class PathwayCompositionError(Exception): + + def __init__(self, error_value): + self.error_value = error_value + + def __str__(self): + return repr(self.error_value) + +class PathwayComposition(Composition): + ''' + + Arguments + ---------- + + Attributes + ---------- + + Returns + ---------- + ''' + + def __init__(self): + super(PathwayComposition, self).__init__() + + def add_linear_processing_pathway(self, pathway): + # First, verify that the pathway begins with a mechanism + if isinstance(pathway[0], Mechanism): + self.add_mechanism(pathway[0]) + else: + # 'MappingProjection has no attribute _name' error is thrown when pathway[0] is passed to the error msg + raise PathwayCompositionError("The first item in a linear processing pathway must be a " + "mechanism.") + # Then, add all of the remaining mechanisms in the pathway + for c in range(1, len(pathway)): + # if the current item is a mechanism, add it + if isinstance(pathway[c], Mechanism): + self.add_mechanism(pathway[c]) + + # Then, loop through and validate that the mechanism-projection relationships make sense + # and add MappingProjections where needed + for c in range(1, len(pathway)): + if isinstance(pathway[c], Mechanism): + if isinstance(pathway[c - 1], Mechanism): + # if the previous item was also a mechanism, add a mapping projection between them + self.add_projection( + pathway[c - 1], + MappingProjection( + sender=pathway[c - 1], + receiver=pathway[c] + ), + pathway[c] + ) + # if the current item is a projection + elif isinstance(pathway[c], Projection): + if c == len(pathway) - 1: + raise PathwayCompositionError("{} is the last item in the pathway. A projection cannot be the last item in" + " a linear processing pathway.".format(pathway[c])) + # confirm that it is between two mechanisms, then add the projection + if isinstance(pathway[c - 1], Mechanism) and isinstance(pathway[c + 1], Mechanism): + self.add_projection(pathway[c - 1], pathway[c], pathway[c + 1]) + else: + raise PathwayCompositionError( + "{} is not between two mechanisms. A projection in a linear processing pathway must be preceded" + " by a mechanism and followed by a mechanism".format(pathway[c])) + else: + raise PathwayCompositionError("{} is not a projection or mechanism. A linear processing pathway must be made " + "up of projections and mechanisms.".format(pathway[c])) + + def execute( + self, + inputs, + scheduler_processing=None, + scheduler_learning=None, + execution_id=None, + call_before_time_step=None, + call_before_pass=None, + call_after_time_step=None, + call_after_pass=None, + clamp_input=SOFT_CLAMP, + targets=None + ): + + if isinstance(inputs, list): + inputs = {self.get_mechanisms_by_role(MechanismRole.ORIGIN).pop(): inputs} + + output = super(PathwayComposition, self).execute( + inputs, + scheduler_processing, + scheduler_learning, + execution_id, + call_after_time_step, + call_before_pass, + call_after_time_step, + call_after_pass, + clamp_input, + ) + return output diff --git a/psyneulink/compositions/systemcomposition.py b/psyneulink/compositions/systemcomposition.py new file mode 100644 index 00000000000..f57328c79d4 --- /dev/null +++ b/psyneulink/compositions/systemcomposition.py @@ -0,0 +1,22 @@ +from psyneulink.compositions.composition import Composition, MechanismRole +from psyneulink.components.mechanisms.mechanism import Mechanism +from psyneulink.components.projections.pathway.mappingprojection import MappingProjection +from psyneulink.components.projections.projection import Projection +from psyneulink.globals.keywords import SOFT_CLAMP + +class SystemComposition(Composition): + ''' + + Arguments + ---------- + + Attributes + ---------- + + Returns + ---------- + ''' + + def __init__(self): + super(SystemComposition, self).__init__() + diff --git a/tests/composition/test_composition.py b/tests/composition/test_composition.py index 36ddef307ff..2f2d685b39c 100644 --- a/tests/composition/test_composition.py +++ b/tests/composition/test_composition.py @@ -1,6 +1,5 @@ import functools import logging - from timeit import timeit import numpy as np @@ -10,7 +9,7 @@ from psyneulink.components.mechanisms.processing import integratormechanism from psyneulink.components.mechanisms.processing.transfermechanism import TransferMechanism from psyneulink.components.projections.pathway.mappingprojection import MappingProjection -from psyneulink.composition import Composition, CompositionError, MechanismRole +from psyneulink.compositions.composition import Composition, CompositionError, MechanismRole from psyneulink.scheduling.condition import EveryNCalls from psyneulink.scheduling.scheduler import Scheduler from psyneulink.scheduling.time import TimeScale diff --git a/tests/composition/test_graph.py b/tests/composition/test_graph.py index b458d01cdd4..c835e96ca64 100644 --- a/tests/composition/test_graph.py +++ b/tests/composition/test_graph.py @@ -1,6 +1,6 @@ import pytest -from psyneulink.composition import Graph, Vertex +from psyneulink.compositions.composition import Graph, Vertex @pytest.mark.skip diff --git a/tests/scheduling/test_condition.py b/tests/scheduling/test_condition.py index c1afeb96db4..b8ce327292e 100644 --- a/tests/scheduling/test_condition.py +++ b/tests/scheduling/test_condition.py @@ -5,13 +5,14 @@ from psyneulink.components.functions.function import Linear from psyneulink.components.mechanisms.processing.transfermechanism import TransferMechanism from psyneulink.components.projections.pathway.mappingprojection import MappingProjection -from psyneulink.composition import Composition -from psyneulink.scheduling.condition import AfterCall, AfterNCalls, AfterNCallsCombined, AfterNPasses, AfterNTrials, AfterPass, AfterTrial, All, AllHaveRun, Always, Any, AtPass, AtTrial, BeforeNCalls, BeforePass, BeforeTrial, EveryNCalls, EveryNPasses, NWhen, Not, WhenFinished, WhenFinishedAll, WhenFinishedAny, WhileNot +from psyneulink.compositions.composition import Composition +from psyneulink.scheduling.condition import AfterCall, AfterNCalls, AfterNCallsCombined, AfterNPasses, AfterNTrials, \ + AfterPass, AfterTrial, All, AllHaveRun, Always, Any, AtPass, AtTrial, BeforeNCalls, BeforePass, BeforeTrial, \ + EveryNCalls, EveryNPasses, NWhen, Not, WhenFinished, WhenFinishedAll, WhenFinishedAny, WhileNot from psyneulink.scheduling.condition import ConditionError, ConditionSet from psyneulink.scheduling.scheduler import Scheduler from psyneulink.scheduling.time import TimeScale - logger = logging.getLogger(__name__) diff --git a/tests/scheduling/test_scheduler.py b/tests/scheduling/test_scheduler.py index 144c407eb46..b8e0848223d 100644 --- a/tests/scheduling/test_scheduler.py +++ b/tests/scheduling/test_scheduler.py @@ -5,13 +5,12 @@ from psyneulink.components.functions.function import Linear from psyneulink.components.mechanisms.processing.transfermechanism import TransferMechanism from psyneulink.components.projections.pathway.mappingprojection import MappingProjection -from psyneulink.composition import Composition +from psyneulink.compositions.composition import Composition from psyneulink.scheduling.condition import AfterNCalls, AfterNTrials, AfterPass, All, Always, Any, AtPass, \ BeforePass, EveryNCalls, EveryNPasses, JustRan, WhenFinished from psyneulink.scheduling.scheduler import Scheduler from psyneulink.scheduling.time import TimeScale - logger = logging.getLogger(__name__) From a172478af98d54a69f65bd0dbbe2a9fecf339a93 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Wed, 7 Mar 2018 14:41:21 -0500 Subject: [PATCH 002/200] selectively bringing in pieces of composition branch (excluding some learning code that we will not move forward with) --- psyneulink/compositions/composition.py | 329 ++++++++++++++++++++----- 1 file changed, 268 insertions(+), 61 deletions(-) diff --git a/psyneulink/compositions/composition.py b/psyneulink/compositions/composition.py index ee04b73a0f8..f9ea4c218ff 100644 --- a/psyneulink/compositions/composition.py +++ b/psyneulink/compositions/composition.py @@ -338,7 +338,9 @@ def __init__(self): self.graph = Graph() # Graph of the Composition self._graph_processing = None self.mechanisms = [] - self.input_mechanisms = {} + self.CIM = CompositionInterfaceMechanism(name="Stimulus_CIM") + self.CIM_output_states = {} + self.execution_ids = [] self._scheduler_processing = None self._scheduler_learning = None @@ -388,7 +390,12 @@ def scheduler_processing(self): :getter: Returns the default processing scheduler, and builds it if it needs updating since the last access. ''' if self.needs_update_scheduler_processing or self._scheduler_processing is None: + old_scheduler = self._scheduler_processing self._scheduler_processing = Scheduler(graph=self.graph_processing) + + if old_scheduler is not None: + self._scheduler_processing.add_condition_set(old_scheduler.condition_set) + self.needs_update_scheduler_processing = False return self._scheduler_processing @@ -402,7 +409,12 @@ def scheduler_learning(self): :getter: Returns the default learning scheduler, and builds it if it needs updating since the last access. ''' if self.needs_update_scheduler_learning or self._scheduler_learning is None: + old_scheduler = self._scheduler_learning self._scheduler_learning = Scheduler(graph=self.graph) + + if old_scheduler is not None: + self._scheduler_learning.add_condition_set(old_scheduler.condition_set) + self.needs_update_scheduler_learning = False return self._scheduler_learning @@ -428,6 +440,8 @@ def add_mechanism(self, mech): self.needs_update_graph = True self.needs_update_graph_processing = True + self.needs_update_scheduler_processing = True + self.needs_update_scheduler_learning = True def add_projection(self, sender, projection, receiver): ''' @@ -457,6 +471,36 @@ def add_projection(self, sender, projection, receiver): self.needs_update_graph = True self.needs_update_graph_processing = True + self.needs_update_scheduler_processing = True + self.needs_update_scheduler_learning = True + def add_pathway(self, path): + ''' + Adds an existing Pathway to the current Composition + + Arguments + --------- + + path: the Pathway (Composition) to be added + + ''' + + # identify mechanisms and projections + mechanisms, projections = [], [] + for c in path.graph.vertices: + if isinstance(c.component, Mechanism): + mechanisms.append(c.component) + elif isinstance(c.component, Projection): + projections.append(c.component) + + # add all mechanisms first + for m in mechanisms: + self.add_mechanism(m) + + # then projections + for p in projections: + self.add_projection(p.sender.owner, p, p.receiver.owner) + + self._analyze_graph() def add_linear_processing_pathway(self, pathway): # First, verify that the pathway begins with a mechanism @@ -600,6 +644,8 @@ def _analyze_graph(self, graph=None): elif child not in visited: next_visit_stack.append(child) + self._create_CIM_output_states() + self.needs_update_graph = False def _update_processing_graph(self): @@ -669,6 +715,12 @@ def get_mechanisms_by_role(self, role): except KeyError as e: raise CompositionError('Mechanism not assigned to role in mechanisms_to_roles: {0}'.format(e)) + def get_roles_by_mechanism(self, mechanism): + try: + return self.mechanisms_to_roles[mechanism] + except KeyError: + raise CompositionError('Mechanism {0} not found in {1}.mechanisms_to_roles'.format(mechanism, self)) + def _set_mechanism_roles(self, mech, roles): self.clear_mechanism_role(mech) for role in roles: @@ -720,39 +772,72 @@ def _validate_feed_dict(self, feed_dict, mech_type_list, mech_type): "{!s} where the InputState takes values of length {!s}". format(i, mech.name, val_length, state_length)) - def _create_input_mechanisms(self): + def _create_CIM_output_states(self): ''' - builds a dictionary of { Mechanism : InputMechanism } pairs where each 'ORIGIN' Mechanism has a - corresponding InputMechanism + builds a dictionary of { Mechanism : OutputState } pairs where each origin mechanism has at least one + corresponding OutputState on the CompositionInterfaceMechanism ''' - is_origin = self.get_mechanisms_by_role(MechanismRole.ORIGIN) - has_input_mechanism = self.input_mechanisms.keys() + # FIX BUG: stimulus CIM output states are not properly destroyed when analyze graph is run multiple times + # (extra mechanisms are marked as CIMs when graph is analyzed too early, so they create CIM output states) + + # loop over all origin mechanisms + current_input_states = set() + for mech in self.get_mechanisms_by_role(MechanismRole.ORIGIN): + + for input_state in mech.input_states: + # add it to our set of current input states + current_input_states.add(input_state) + + # if there is not a corresponding CIM output state, add one + if input_state not in set(self.CIM_output_states.keys()): + interface_output_state = OutputState(owner=self.CIM, + variable=input_state.variable, + reference_value= input_state.variable, + name="STIMULUS_CIM_" + mech.name + "_" + input_state.name) + # self.CIM.add_states(interface_output_state) + self.CIM.output_states.append(interface_output_state) + self.CIM_output_states[input_state] = interface_output_state + MappingProjection(sender=interface_output_state, + receiver=input_state, + matrix= IDENTITY_MATRIX, + name="("+interface_output_state.name + ") to (" + + input_state.owner.name + "-" + input_state.name+")") + + sends_to_input_states = set(self.CIM_output_states.keys()) + # For any output state still registered on the CIM that does not map to a corresponding ORIGIN mech I.S.: + for input_state in sends_to_input_states.difference(current_input_states): + for projection in input_state.path_afferents: + if projection.sender == self.CIM_output_states[input_state]: + # remove the corresponding projection from the ORIGIN mechanism's path afferents + input_state.path_afferents.remove(projection) + projection = None + + # remove the output state associated with this input state (this iteration) from the CIM output states + self.CIM.output_states.remove(self.CIM_output_states[input_state]) + + # and from the dictionary of CIM output state/input state pairs + del self.CIM_output_states[input_state] + + def _assign_values_to_CIM_output_states(self, inputs): + current_mechanisms = set() + for key in inputs: + if isinstance(key, Mechanism): + self.CIM_output_states[key.input_state].value = inputs[key] + current_mechanisms.add(key) + else: + self.CIM_output_states[key].value = inputs[key] + current_mechanisms.add(key.owner) - # consider all of the mechanisms that are only origins OR have input mechanisms - for mech in is_origin.difference(has_input_mechanism): + origins = self.get_mechanisms_by_role(MechanismRole.ORIGIN) - # If mech IS AN ORIGIN mechanism but it doesn't have an input mechanism, ADD input mechanism - if mech not in has_input_mechanism: - new_input_mech = CompositionInterfaceMechanism() - self.input_mechanisms[mech] = new_input_mech - MappingProjection(sender=new_input_mech, receiver=mech) + # NOTE: This may need to change from default_variable to wherever a default value of the mechanism's variable + # is stored -- the point is that if an input is not supplied for an origin mechanism, the mechanism should use + # its default variable value + for mech in origins.difference(set(current_mechanisms)): + self.CIM_output_states[mech.input_state].value = mech.instance_defaults.variable - # If mech HAS AN INPUT mechanism but isn't an origin, REMOVE the input mechanism - else: - del self.input_mechanisms[mech] - def _assign_values_to_input_mechanisms(self, input_dict): - ''' - loops over the input values in the inputs dictionary and assigns each value directly to the output state of - its corresponding input Mechanism - ''' - for mech in self.input_mechanisms.keys(): - if mech in input_dict.keys(): - self.input_mechanisms[mech]._output_states[0].value = np.array(input_dict[mech]) - else: - self.input_mechanisms[mech]._output_states[0].value = np.array(mech.instance_defaults.variable) - - def _assign_execution_ids(self, execution_id): + def _assign_execution_ids(self, execution_id=None): ''' assigns the same uuid to each Mechanism in the composition's processing graph as well as all input mechanisms for this composition. The uuid is either specified in the user's call to run(), or generated @@ -760,12 +845,35 @@ def _assign_execution_ids(self, execution_id): ''' # Traverse processing graph and assign one uuid to all of its mechanisms - self._execution_id = execution_id or self._get_unique_id() + if execution_id is None: + execution_id = self._get_unique_id() + + if execution_id not in self.execution_ids: + self.execution_ids.append(execution_id) + for v in self._graph_processing.vertices: - v.component._execution_id = self._execution_id + v.component._execution_id = execution_id + # Assign the uuid to all input mechanisms - for k in self.input_mechanisms.keys(): - self.input_mechanisms[k]._execution_id = self._execution_id + # for k in self.input_mechanisms.keys(): + # self.input_mechanisms[k]._execution_id = execution_id + + self.CIM._execution_id = execution_id + # self.target_CIM._execution_id = execution_id + + self._execution_id = execution_id + return execution_id + + def _identify_clamp_inputs(self, list_type, input_type, origins): + # clamp type of this list is same as the one the user set for the whole composition; return all mechanisms + if list_type == input_type: + return origins + # the user specified different types of clamps for each origin mechanism; generate a list accordingly + elif isinstance(input_type, dict): + return [k for k, v in input_type.items() if list_type == v] + # clamp type of this list is NOT same as the one the user set for the whole composition; return empty list + else: + return [] def execute( self, @@ -776,7 +884,9 @@ def execute( call_before_pass=None, call_after_time_step=None, call_after_pass=None, - execution_id=None + execution_id=None, + clamp_input=SOFT_CLAMP, + targets=None ): ''' Passes inputs to any Mechanisms receiving inputs directly from the user, then coordinates with the Scheduler @@ -818,47 +928,88 @@ def execute( output value of the final Mechanism executed in the Composition : various ''' + if targets is None: + targets = {} + execution_id = self._assign_execution_ids(execution_id) + origin_mechanisms = self.get_mechanisms_by_role(MechanismRole.ORIGIN) + if scheduler_processing is None: scheduler_processing = self.scheduler_processing if scheduler_learning is None: scheduler_learning = self.scheduler_learning - self._create_input_mechanisms() - self._assign_values_to_input_mechanisms(inputs) - self._assign_execution_ids(execution_id) + self._assign_values_to_CIM_output_states(inputs) + # self._assign_values_to_target_CIM_output_states(targets) next_pass_before = 1 next_pass_after = 1 + if clamp_input: + soft_clamp_inputs = self._identify_clamp_inputs(SOFT_CLAMP, clamp_input, origin_mechanisms) + hard_clamp_inputs = self._identify_clamp_inputs(HARD_CLAMP, clamp_input, origin_mechanisms) + pulse_clamp_inputs = self._identify_clamp_inputs(PULSE_CLAMP, clamp_input, origin_mechanisms) + no_clamp_inputs = self._identify_clamp_inputs(NO_CLAMP, clamp_input, origin_mechanisms) # run scheduler to receive sets of mechanisms that may be executed at this time step in any order execution_scheduler = scheduler_processing + execution_scheduler._init_counts(execution_id=execution_id) num = None if call_before_pass: call_before_pass() - for next_execution_set in execution_scheduler.run(): + for next_execution_set in execution_scheduler.run(execution_id=execution_id): if call_after_pass: - if next_pass_after == execution_scheduler.clock.time.pass_: - logger.debug('next_pass_after {0}\tscheduler pass {1}'.format(next_pass_after, execution_scheduler.clock.current_pass())) + if next_pass_after == execution_scheduler.times[execution_id][TimeScale.TRIAL][TimeScale.PASS]: + logger.debug('next_pass_after {0}\tscheduler pass {1}'.format(next_pass_after, execution_scheduler.times[execution_id][TimeScale.TRIAL][TimeScale.PASS])) call_after_pass() next_pass_after += 1 if call_before_pass: - if next_pass_before == execution_scheduler.clock.time.pass_: + if next_pass_before == execution_scheduler.times[execution_id][TimeScale.TRIAL][TimeScale.PASS]: call_before_pass() - logger.debug('next_pass_before {0}\tscheduler pass {1}'.format(next_pass_before, execution_scheduler.clock.current_pass())) + logger.debug('next_pass_before {0}\tscheduler pass {1}'.format(next_pass_before, execution_scheduler.times[execution_id][TimeScale.TRIAL][TimeScale.PASS])) next_pass_before += 1 if call_before_time_step: call_before_time_step() # execute each mechanism with EXECUTING in context for mechanism in next_execution_set: + + if mechanism in origin_mechanisms: + # KAM 8/28 commenting out the below code because it's not necessarily how we want to handle + # a recurrent projection on the first time step (meaning, before its mechanism has executed) + # FIX: determine the correct behavior for this case & document it + + # if ( + # scheduler_processing.times[execution_id][TimeScale.TRIAL][TimeScale.TIME_STEP] == 0 + # and hasattr(mechanism, "recurrent_projection") + # ): + # mechanism.recurrent_projection.sender.value = [0.0] + if clamp_input: + if mechanism in hard_clamp_inputs: + # clamp = HARD_CLAMP --> "turn off" recurrent projection + if hasattr(mechanism, "recurrent_projection"): + mechanism.recurrent_projection.sender.value = [0.0] + elif mechanism in no_clamp_inputs: + for input_state in mechanism.input_states: + self.CIM_output_states[input_state].value = 0.0 + # self.input_mechanisms[mechanism]._output_states[0].value = 0.0 + if isinstance(mechanism, Mechanism): - num = mechanism.execute(context=EXECUTING + "composition") - print(" -------------- EXECUTING ", mechanism.name, " -------------- ") - print("result = ", num) - print() - print() + current_context = EXECUTING + "composition " + # if isinstance(mechanism, LearningMechanism) or isinstance(mechanism, ComparatorMechanism): + # current_context += "LEARNING " + if any(isinstance(m, LearningMechanism) for m in self.mechanisms): + current_context += " LEARNING " + num = mechanism.execute(context=current_context) + + + if mechanism in origin_mechanisms: + if clamp_input: + if mechanism in pulse_clamp_inputs: + for input_state in mechanism.input_states: + # clamp = None --> "turn off" input mechanism + # self.input_mechanisms[mechanism]._output_states[0].value = 0 + self.CIM_output_states[input_state].value = 0 if call_after_time_step: call_after_time_step() @@ -883,6 +1034,8 @@ def run( call_after_pass=None, call_before_trial=None, call_after_trial=None, + clamp_input=SOFT_CLAMP, + targets=None ): ''' Passes inputs to any mechanisms receiving inputs directly from the user, then coordinates with the scheduler @@ -935,6 +1088,7 @@ def run( output value of the final Mechanism executed in the composition : various ''' + reuse_inputs = False if scheduler_processing is None: @@ -943,54 +1097,77 @@ def run( if scheduler_learning is None: scheduler_learning = self.scheduler_learning + self._analyze_graph() + + execution_id = self._assign_execution_ids(execution_id) + + scheduler_processing._init_counts(execution_id=execution_id) + scheduler_learning._init_counts(execution_id=execution_id) scheduler_processing.update_termination_conditions(termination_processing) scheduler_learning.update_termination_conditions(termination_learning) - if inputs is None: + if inputs is not None: + len_inputs = self._process_inputs(inputs) + else: inputs = {} len_inputs = 1 - else: - len_inputs = len(list(inputs.values())[0]) + if targets is None: + targets = {} # check whether the num trials given in the input dict matches the num_trials param if num_trials is not None: if len_inputs != num_trials: - # if one set of inputs was provided for many trials, set 'reuse_inputs' flag + # if one set of inputs was provided for many trials, set 'reuse_inputs' flag and re-set len_inputs to + # the number of trials given by the user if len_inputs == 1: reuse_inputs = True + len_inputs = num_trials # otherwise, warn user that there is something wrong with their input specification else: raise CompositionError( "The number of trials [{}] specified for the composition [{}] does not match the " - "length [{}] of the inputs specified in the inputs dictionary [{}]. " + "number [{}] of inputs specified per mechanism (or input state) in the inputs dictionary [{}]. " .format(num_trials, self, len_inputs, inputs) ) input_indices = range(len_inputs) - scheduler_processing._reset_counts_total(TimeScale.RUN) - - # TBI: Handle learning graph + scheduler_processing._reset_counts_total(TimeScale.RUN, execution_id) + scheduler_processing._reset_time(TimeScale.RUN, execution_id) # TBI: Handle runtime params? result = None - # loop over the length of the list of inputs (# of trials) + # --- RESET FOR NEXT TRIAL --- + # by looping over the length of the list of inputs - each input represents a TRIAL for input_index in input_indices: + + # Execute call before trial "hook" (user defined function) if call_before_trial: call_before_trial() - if scheduler_processing.termination_conds[TimeScale.RUN].is_satisfied(): + + if scheduler_processing.termination_conds[TimeScale.RUN].is_satisfied(scheduler=scheduler_processing, + execution_id=execution_id): break - execution_inputs = {} + # PROCESSING ------------------------------------------------------------------------ - # loop over all mechanisms that receive inputs from the outside world + # Prepare stimuli from the outside world -- collect the inputs for this TRIAL and store them in a dict + execution_stimuli = {} + + # loop over all mechanisms that receive stimuli from the outside world for mech in inputs.keys(): - execution_inputs[mech] = inputs[mech][0 if reuse_inputs else input_index] + if isinstance(inputs[mech], dict): + for input_state in inputs[mech].keys(): + execution_stimuli[input_state] = inputs[mech][input_state][0 if reuse_inputs else input_index] + else: + execution_stimuli[mech] = inputs[mech][0 if reuse_inputs else input_index] + # execute processing + # pass along the stimuli for this trial num = self.execute( - execution_inputs, + execution_stimuli, scheduler_processing, scheduler_learning, call_before_time_step, @@ -998,15 +1175,45 @@ def run( call_after_time_step, call_after_pass, execution_id, + clamp_input, ) + # --------------------------------------------------------------------------------- + # store the result of this execute in case it will be the final result if num is not None: result = num + # LEARNING ------------------------------------------------------------------------ + # Prepare targets from the outside world -- collect the targets for this TRIAL and store them in a dict + execution_targets = {} + + # loop over all mechanisms that receive targets from the outside world + for mech in targets.keys(): + if callable(targets[mech]): + execution_targets[mech] = targets[mech] + elif len(targets[mech]) == 1: + execution_targets[mech] = targets[mech][0] + else: + execution_targets[mech] = targets[mech][input_index] + + # execute learning + # pass along the targets for this trial + self.learning_composition.execute(execution_targets, + scheduler_processing, + scheduler_learning, + call_before_time_step, + call_before_pass, + call_after_time_step, + call_after_pass, + execution_id, + clamp_input, + ) + if call_after_trial: call_after_trial() - scheduler_processing.clock._increment_time(TimeScale.RUN) + # --------------------------------------------------------------------------------- + scheduler_processing._increment_time(TimeScale.RUN, execution_id=execution_id) # return the output of the LAST mechanism executed in the composition return result From 09b3107cb502a80199d9a8bcc381dd4a6acbf2ff Mon Sep 17 00:00:00 2001 From: KristenManning Date: Wed, 7 Mar 2018 15:10:12 -0500 Subject: [PATCH 003/200] un-skipping composition related pytests --- tests/composition/test_composition.py | 22 +++------------------- tests/composition/test_graph.py | 1 - 2 files changed, 3 insertions(+), 20 deletions(-) diff --git a/tests/composition/test_composition.py b/tests/composition/test_composition.py index 2f2d685b39c..eed99f7939c 100644 --- a/tests/composition/test_composition.py +++ b/tests/composition/test_composition.py @@ -21,8 +21,7 @@ # Unit tests for each function of the Composition class ####################### -# Unit tests for Composition.Composition() -@pytest.mark.skip +# Unit tests for Composition.Composition( class TestConstructor: def test_no_args(self): @@ -48,8 +47,7 @@ def test_timing_no_args(self, count): logger.info('completed {0} creation{2} of Composition() in {1:.8f}s'.format(count, t, 's' if count != 1 else '')) -# Unit tests for Composition.add_mechanism -@pytest.mark.skip +# Unit tests for Composition.add_mechanis class TestAddMechanism: def test_add_once(self): @@ -92,8 +90,7 @@ def test_timing_stress(self, count): logger.info('completed {0} addition{2} of a Mechanism to a Composition in {1:.8f}s'. format(count, t, 's' if count != 1 else '')) -# Unit tests for Composition.add_projection -@pytest.mark.skip +# Unit tests for Composition.add_projectio class TestAddProjection: def test_add_once(self): @@ -152,8 +149,6 @@ def test_timing_stress(self, count): print() logger.info('completed {0} addition{2} of a projection to a composition in {1:.8f}s'.format(count, t, 's' if count != 1 else '')) - -@pytest.mark.skip class TestAnalyzeGraph: def test_empty_call(self): @@ -231,8 +226,6 @@ def test_two_origins_pointing_to_recursive_pair(self): assert B in comp.get_mechanisms_by_role(MechanismRole.CYCLE) assert C in comp.get_mechanisms_by_role(MechanismRole.RECURRENT_INIT) - -@pytest.mark.skip class TestValidateFeedDict: def test_empty_feed_dicts(self): @@ -465,8 +458,6 @@ def test_multiple_time_steps_2(self): comp._validate_feed_dict(feed_dict_origin, comp.get_mechanisms_by_role(MechanismRole.ORIGIN), "origin") comp._validate_feed_dict(feed_dict_terminal, comp.get_mechanisms_by_role(MechanismRole.TERMINAL), "terminal") - -@pytest.mark.skip class TestGetMechanismsByRole: def test_multiple_roles(self): @@ -499,11 +490,8 @@ def test_nonexistent_role(self): with pytest.raises(CompositionError): comp.get_mechanisms_by_role(None) - -@pytest.mark.skip class TestGraph: - @pytest.mark.skip class TestProcessingGraph: def test_all_mechanisms(self): @@ -709,8 +697,6 @@ def test_cycle_x_multiple_incoming(self): comp.graph_processing.comp_to_vertex[B], ]) - -@pytest.mark.skip class TestRun: def test_run_2_mechanisms_default_input_1(self): @@ -1076,8 +1062,6 @@ def test_LPP_two_origins_one_terminal(self): ) assert 250 == output[0][0] - -@pytest.mark.skip class TestCallBeforeAfterTimescale: def test_call_before_record_timescale(self): diff --git a/tests/composition/test_graph.py b/tests/composition/test_graph.py index c835e96ca64..40e7c8d9f94 100644 --- a/tests/composition/test_graph.py +++ b/tests/composition/test_graph.py @@ -3,7 +3,6 @@ from psyneulink.compositions.composition import Graph, Vertex -@pytest.mark.skip class TestGraph: class DummyComponent: From 86a974bfc3ca11a0810f17f0911c234123778469 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Wed, 7 Mar 2018 15:14:19 -0500 Subject: [PATCH 004/200] fixing import errors and other setup bugs --- psyneulink/compositions/composition.py | 2 +- psyneulink/compositions/systemcomposition.py | 20 +++++++++++++++----- 2 files changed, 16 insertions(+), 6 deletions(-) diff --git a/psyneulink/compositions/composition.py b/psyneulink/compositions/composition.py index f9ea4c218ff..c5a5663a21d 100644 --- a/psyneulink/compositions/composition.py +++ b/psyneulink/compositions/composition.py @@ -57,7 +57,7 @@ from psyneulink.components.mechanisms.processing.compositioninterfacemechanism import CompositionInterfaceMechanism from psyneulink.components.projections.pathway.mappingprojection import MappingProjection from psyneulink.components.shellclasses import Mechanism, Projection -from psyneulink.globals.keywords import EXECUTING +from psyneulink.globals.keywords import EXECUTING, SOFT_CLAMP from psyneulink.scheduling.scheduler import Scheduler from psyneulink.scheduling.time import TimeScale diff --git a/psyneulink/compositions/systemcomposition.py b/psyneulink/compositions/systemcomposition.py index f57328c79d4..941a439604e 100644 --- a/psyneulink/compositions/systemcomposition.py +++ b/psyneulink/compositions/systemcomposition.py @@ -1,8 +1,18 @@ -from psyneulink.compositions.composition import Composition, MechanismRole -from psyneulink.components.mechanisms.mechanism import Mechanism -from psyneulink.components.projections.pathway.mappingprojection import MappingProjection -from psyneulink.components.projections.projection import Projection -from psyneulink.globals.keywords import SOFT_CLAMP +from psyneulink.compositions.composition import Composition + +__all__ = [ + 'SystemComposition', 'SystemCompositionError' +] + + +class SystemCompositionError(Exception): + + def __init__(self, error_value): + self.error_value = error_value + + def __str__(self): + return repr(self.error_value) + class SystemComposition(Composition): ''' From 65357a920efde558823b7b78776f9d29781e4602 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Thu, 8 Mar 2018 12:56:02 -0500 Subject: [PATCH 005/200] updating composition interface mechanism file with default_variable changes on devel, imports, and CIM changes from composition branch --- .../compositioninterfacemechanism.py | 42 +++++++++++-------- 1 file changed, 25 insertions(+), 17 deletions(-) diff --git a/psyneulink/components/mechanisms/processing/compositioninterfacemechanism.py b/psyneulink/components/mechanisms/processing/compositioninterfacemechanism.py index 24183b89b0d..2dc4a8d9b3a 100644 --- a/psyneulink/components/mechanisms/processing/compositioninterfacemechanism.py +++ b/psyneulink/components/mechanisms/processing/compositioninterfacemechanism.py @@ -54,7 +54,7 @@ from psyneulink.globals.preferences.preferenceset import PreferenceEntry, PreferenceLevel from psyneulink.scheduling.time import TimeScale -__all__ = [] +__all__ = ['CompositionInterfaceMechanism'] class CompositionInterfaceMechanism(ProcessingMechanism_Base): @@ -82,40 +82,43 @@ class CompositionInterfaceMechanism(ProcessingMechanism_Base): size : int, list or np.ndarray of ints specifies default_input_value as array(s) of zeros if **default_input_value** is not passed as an argument; if **default_input_value** is specified, it takes precedence over the specification of **size**. - As an example, the following mechanisms are equivalent:: - T1 = TransferMechanism(size = [3, 2]) - T2 = TransferMechanism(default_variable = [[0, 0, 0], [0, 0]]) function : IntegratorFunction : default Integrator specifies the function used to integrate the input. Must take a single numeric value, or a list or np.array of values, and return one of the same form. - params : Dict[param keyword: param value] : default None + params : Optional[Dict[param keyword, param value]] a `parameter dictionary ` that can be used to specify the parameters for the `Mechanism `, parameters for its `function `, and/or a custom function and its parameters. Values specified for parameters in the dictionary override any assigned to those parameters in arguments of the constructor. - name : str : default see `name ` - specifies the name of the CompositionInterfaceMechanism. + name : str : default CompositionInterfaceMechanism- + a string used for the name of the Mechanism. + If not is specified, a default is assigned by `MechanismRegistry` + (see :doc:`Registry ` for conventions used in naming, including for default and duplicate names). - prefs : PreferenceSet or specification dict : default Mechanism.classPreferences - specifies the `PreferenceSet` for the CompositionInterfaceMechanism; see `prefs ` for details. + prefs : Optional[PreferenceSet or specification dict : Mechanism.classPreferences] + the `PreferenceSet` for Mechanism. + If it is not specified, a default is assigned using `classPreferences` defined in __init__.py + (see :doc:`PreferenceSet ` for details). Attributes ---------- variable : value: default the input to Mechanism's ``function``. - name : str - the name of the CompositionInterfaceMechanism; if it is not specified in the **name** argument of the - constructor, a default is assigned by MechanismRegistry (see `Naming` for conventions used for default and - duplicate names). + name : str : default CompositionInterfaceMechanism- + the name of the Mechanism. + Specified in the **name** argument of the constructor for the Mechanism; + if not is specified, a default is assigned by `MechanismRegistry` + (see :doc:`Registry ` for conventions used in naming, including for default and duplicate names). - prefs : PreferenceSet or specification dict - the `PreferenceSet` for the CompositionInterfaceMechanism; if it is not specified in the **prefs** argument of - the constructor, a default is assigned using `classPreferences` defined in __init__.py (see :doc:`PreferenceSet - ` for details). + prefs : Optional[PreferenceSet or specification dict : Mechanism.classPreferences] + the `PreferenceSet` for Mechanism. + Specified in the **prefs** argument of the constructor for the Mechanism; + if it is not specified, a default is assigned using `classPreferences` defined in ``__init__.py`` + (see :doc:`PreferenceSet ` for details). """ @@ -127,6 +130,10 @@ class CompositionInterfaceMechanism(ProcessingMechanism_Base): kwPreferenceSetName: 'CompositionInterfaceMechanismCustomClassPreferences', kpReportOutputPref: PreferenceEntry(True, PreferenceLevel.INSTANCE)} + class ClassDefaults(ProcessingMechanism_Base.ClassDefaults): + # Sets template for variable (input) + variable = [[0]] + paramClassDefaults = Mechanism_Base.paramClassDefaults.copy() paramClassDefaults.update({}) paramNames = paramClassDefaults.keys() @@ -136,6 +143,7 @@ def __init__(self, default_input_value=None, size=None, function = Linear(slope = 1, intercept=0.0), + time_scale=TimeScale.TRIAL, params=None, name=None, prefs:is_pref_set=None, From 0f840a7baae6ed34a72cafa5a32255be0298fc7c Mon Sep 17 00:00:00 2001 From: KristenManning Date: Thu, 8 Mar 2018 16:45:15 -0500 Subject: [PATCH 006/200] continuing to fix small bugs in composition and CIM in order to get composition tests up and running --- .../compositioninterfacemechanism.py | 34 ++--- psyneulink/compositions/composition.py | 9 +- tests/composition/test_composition.py | 139 ++++++++---------- 3 files changed, 84 insertions(+), 98 deletions(-) diff --git a/psyneulink/components/mechanisms/processing/compositioninterfacemechanism.py b/psyneulink/components/mechanisms/processing/compositioninterfacemechanism.py index 2dc4a8d9b3a..f626f4f4a69 100644 --- a/psyneulink/components/mechanisms/processing/compositioninterfacemechanism.py +++ b/psyneulink/components/mechanisms/processing/compositioninterfacemechanism.py @@ -52,7 +52,6 @@ from psyneulink.globals.keywords import COMPOSITION_INTERFACE_MECHANISM, kwPreferenceSetName from psyneulink.globals.preferences.componentpreferenceset import is_pref_set, kpReportOutputPref from psyneulink.globals.preferences.preferenceset import PreferenceEntry, PreferenceLevel -from psyneulink.scheduling.time import TimeScale __all__ = ['CompositionInterfaceMechanism'] @@ -60,7 +59,7 @@ class CompositionInterfaceMechanism(ProcessingMechanism_Base): """ CompositionInterfaceMechanism( \ - default_input_value=None, \ + default_variable=None, \ size=None, \ function=Linear(slope = 1.0, intercept = 0.0), \ params=None, \ @@ -72,7 +71,7 @@ class CompositionInterfaceMechanism(ProcessingMechanism_Base): Arguments --------- - default_input_value : number, list or np.ndarray + default_variable : number, list or np.ndarray the input to the Mechanism to use if none is provided in a call to its `execute ` or `run ` methods; also serves as a template to specify the length of `variable ` for @@ -80,8 +79,8 @@ class CompositionInterfaceMechanism(ProcessingMechanism_Base): Mechanism. size : int, list or np.ndarray of ints - specifies default_input_value as array(s) of zeros if **default_input_value** is not passed as an argument; - if **default_input_value** is specified, it takes precedence over the specification of **size**. + specifies default_variable as array(s) of zeros if **default_variable** is not passed as an argument; + if **default_variable** is specified, it takes precedence over the specification of **size**. function : IntegratorFunction : default Integrator specifies the function used to integrate the input. Must take a single numeric value, or a list or np.array @@ -130,37 +129,32 @@ class CompositionInterfaceMechanism(ProcessingMechanism_Base): kwPreferenceSetName: 'CompositionInterfaceMechanismCustomClassPreferences', kpReportOutputPref: PreferenceEntry(True, PreferenceLevel.INSTANCE)} - class ClassDefaults(ProcessingMechanism_Base.ClassDefaults): - # Sets template for variable (input) - variable = [[0]] - paramClassDefaults = Mechanism_Base.paramClassDefaults.copy() paramClassDefaults.update({}) paramNames = paramClassDefaults.keys() @tc.typecheck def __init__(self, - default_input_value=None, + default_variable=None, size=None, - function = Linear(slope = 1, intercept=0.0), - time_scale=TimeScale.TRIAL, + function=Linear(slope=1, intercept=0.0), params=None, name=None, prefs:is_pref_set=None, context=None): - if default_input_value is None and size is None: - default_input_value = self.ClassDefaults.variable + if default_variable is None and size is None: + default_variable = self.ClassDefaults.variable params = self._assign_args_to_param_dicts(function=function, params=params) - super(CompositionInterfaceMechanism, self).__init__(variable=default_input_value, - size=size, - params=params, - name=name, - prefs=prefs, - context=self) + super(CompositionInterfaceMechanism, self).__init__(default_variable=default_variable, + size=size, + params=params, + name=name, + prefs=prefs, + context=self) diff --git a/psyneulink/compositions/composition.py b/psyneulink/compositions/composition.py index c5a5663a21d..7dc251f410f 100644 --- a/psyneulink/compositions/composition.py +++ b/psyneulink/compositions/composition.py @@ -56,8 +56,9 @@ from psyneulink.components.mechanisms.processing.compositioninterfacemechanism import CompositionInterfaceMechanism from psyneulink.components.projections.pathway.mappingprojection import MappingProjection +from psyneulink.components.states.outputstate import OutputState from psyneulink.components.shellclasses import Mechanism, Projection -from psyneulink.globals.keywords import EXECUTING, SOFT_CLAMP +from psyneulink.globals.keywords import EXECUTING, SOFT_CLAMP, IDENTITY_MATRIX from psyneulink.scheduling.scheduler import Scheduler from psyneulink.scheduling.time import TimeScale @@ -1101,8 +1102,10 @@ def run( execution_id = self._assign_execution_ids(execution_id) - scheduler_processing._init_counts(execution_id=execution_id) - scheduler_learning._init_counts(execution_id=execution_id) + # scheduler_processing._init_counts(execution_id=execution_id) + # scheduler_learning._init_counts(execution_id=execution_id) + scheduler_processing._init_counts() + scheduler_learning._init_counts() scheduler_processing.update_termination_conditions(termination_processing) scheduler_learning.update_termination_conditions(termination_learning) diff --git a/tests/composition/test_composition.py b/tests/composition/test_composition.py index eed99f7939c..5c595c519c2 100644 --- a/tests/composition/test_composition.py +++ b/tests/composition/test_composition.py @@ -6,7 +6,7 @@ import pytest from psyneulink.components.functions.function import Linear, SimpleIntegrator -from psyneulink.components.mechanisms.processing import integratormechanism +from psyneulink.components.mechanisms.processing.integratormechanism import IntegratorMechanism from psyneulink.components.mechanisms.processing.transfermechanism import TransferMechanism from psyneulink.components.projections.pathway.mappingprojection import MappingProjection from psyneulink.compositions.composition import Composition, CompositionError, MechanismRole @@ -35,19 +35,17 @@ def test_two_calls_no_args(self): comp_2 = Composition() assert isinstance(comp, Composition) - @pytest.mark.stress - @pytest.mark.parametrize( - 'count', [ - 10000, - ] - ) - def test_timing_no_args(self, count): - t = timeit('comp = Composition()', setup='from psyneulink.composition import Composition', number=count) - print() - logger.info('completed {0} creation{2} of Composition() in {1:.8f}s'.format(count, t, 's' if count != 1 else '')) + # @pytest.mark.stress + # @pytest.mark.parametrize( + # 'count', [ + # 10000, + # ] + # ) + # def test_timing_no_args(self, count): + # t = timeit('comp = Composition()', setup='from psyneulink.compositions.composition import Composition', number=count) + # print() + # logger.info('completed {0} creation{2} of Composition() in {1:.8f}s'.format(count, t, 's' if count != 1 else '')) - -# Unit tests for Composition.add_mechanis class TestAddMechanism: def test_add_once(self): @@ -65,32 +63,27 @@ def test_add_same_twice(self): comp.add_mechanism(mech) comp.add_mechanism(mech) - @pytest.mark.stress - @pytest.mark.parametrize( - 'count', [ - 100, - ] - ) - def test_timing_stress(self, count): - t = timeit( - 'comp.add_mechanism(TransferMechanism())', - setup=''' -<<<<<<< HEAD -from psyneulink.components.mechanisms.Mechanism import mechanism -from PsyNeuLink.Composition import Composition -======= -from psyNeuLink.components.mechanisms.processing.transfermechanism import TransferMechanism -from psyneulink.composition import Composition ->>>>>>> devel -comp = Composition() -''', - number=count - ) - print() - logger.info('completed {0} addition{2} of a Mechanism to a Composition in {1:.8f}s'. - format(count, t, 's' if count != 1 else '')) + # @pytest.mark.stress + # @pytest.mark.parametrize( + # 'count', [ + # 100, + # ] + # ) +# def test_timing_stress(self, count): +# t = timeit( +# 'comp.add_mechanism(TransferMechanism())', +# setup=''' +# +# from psyNeuLink.components.mechanisms.processing.transfermechanism import TransferMechanism +# from psyneulink.compositions.composition import Composition +# comp = Composition() +# ''', +# number=count +# ) +# print() +# logger.info('completed {0} addition{2} of a Mechanism to a Composition in {1:.8f}s'. +# format(count, t, 's' if count != 1 else '')) -# Unit tests for Composition.add_projectio class TestAddProjection: def test_add_once(self): @@ -120,34 +113,30 @@ def test_add_same_twice(self): comp.add_projection(A, proj, B) comp.add_projection(A, proj, B) - @pytest.mark.stress - @pytest.mark.parametrize( - 'count', [ - 1000, - ] - ) - def test_timing_stress(self, count): - t = timeit('comp.add_projection(A, MappingProjection(), B)', - setup=''' -<<<<<<< HEAD -from psyneulink.components.mechanisms.ProcessingMechanisms.TransferMechanism import TransferMechanism -from psyneulink.components.Projections.PathwayProjections.MappingProjection import MappingProjection -from PsyNeuLink.Composition import Composition -======= -from psyneulink.components.mechanisms.processingmechanisms.transfermechanism import TransferMechanism -from psyneulink.components.projections.pathwayprojections.mappingprojection import MappingProjection -from psyneulink.composition import Composition ->>>>>>> devel -comp = Composition() -A = TransferMechanism(name='A') -B = TransferMechanism(name='B') -comp.add_mechanism(A) -comp.add_mechanism(B) -''', - number=count - ) - print() - logger.info('completed {0} addition{2} of a projection to a composition in {1:.8f}s'.format(count, t, 's' if count != 1 else '')) + # @pytest.mark.stress + # @pytest.mark.parametrize( + # 'count', [ + # 1000, + # ] + # ) +# def test_timing_stress(self, count): +# t = timeit('comp.add_projection(A, MappingProjection(), B)', +# setup=''' +# +# from psyneulink.components.mechanisms.processingmechanisms.transfermechanism import TransferMechanism +# from psyneulink.components.projections.pathwayprojections.mappingprojection import MappingProjection +# from psyneulink.compositions.composition import Composition +# +# comp = Composition() +# A = TransferMechanism(name='A') +# B = TransferMechanism(name='B') +# comp.add_mechanism(A) +# comp.add_mechanism(B) +# ''', +# number=count +# ) +# print() +# logger.info('completed {0} addition{2} of a projection to a composition in {1:.8f}s'.format(count, t, 's' if count != 1 else '')) class TestAnalyzeGraph: @@ -701,7 +690,7 @@ class TestRun: def test_run_2_mechanisms_default_input_1(self): comp = Composition() - A = integratormechanism(default_variable=1.0, function=Linear(slope=5.0)) + A = IntegratorMechanism(default_variable=1.0, function=Linear(slope=5.0)) B = TransferMechanism(function=Linear(slope=5.0)) comp.add_mechanism(A) comp.add_mechanism(B) @@ -715,7 +704,7 @@ def test_run_2_mechanisms_default_input_1(self): def test_run_2_mechanisms_input_5(self): comp = Composition() - A = integratormechanism(default_variable=1.0, function=Linear(slope=5.0)) + A = IntegratorMechanism(default_variable=1.0, function=Linear(slope=5.0)) B = TransferMechanism(function=Linear(slope=5.0)) comp.add_mechanism(A) comp.add_mechanism(B) @@ -804,7 +793,7 @@ def test_run_5_mechanisms_2_origins_1_terminal(self): def test_run_2_mechanisms_with_scheduling_AAB_integrator(self): comp = Composition() - A = integratormechanism(name="A [integrator]", default_variable=2.0, function=SimpleIntegrator(rate=1.0)) + A = IntegratorMechanism(name="A [integrator]", default_variable=2.0, function=SimpleIntegrator(rate=1.0)) # (1) value = 0 + (5.0 * 1.0) + 0 --> return 5.0 # (2) value = 5.0 + (5.0 * 1.0) + 0 --> return 10.0 B = TransferMechanism(name="B [transfer]", function=Linear(slope=5.0)) @@ -882,7 +871,7 @@ def test_sender_receiver_not_specified(self): def test_run_2_mechanisms_reuse_input(self): comp = Composition() - A = integratormechanism(default_variable=1.0, function=Linear(slope=5.0)) + A = IntegratorMechanism(default_variable=1.0, function=Linear(slope=5.0)) B = TransferMechanism(function=Linear(slope=5.0)) comp.add_mechanism(A) comp.add_mechanism(B) @@ -899,7 +888,7 @@ def test_run_2_mechanisms_reuse_input(self): def test_run_2_mechanisms_incorrect_trial_spec(self): comp = Composition() - A = integratormechanism(default_variable=1.0, function=Linear(slope=5.0)) + A = IntegratorMechanism(default_variable=1.0, function=Linear(slope=5.0)) B = TransferMechanism(function=Linear(slope=5.0)) comp.add_mechanism(A) comp.add_mechanism(B) @@ -917,7 +906,7 @@ def test_run_2_mechanisms_incorrect_trial_spec(self): def test_run_2_mechanisms_double_trial_specs(self): comp = Composition() - A = integratormechanism(default_variable=1.0, function=Linear(slope=5.0)) + A = IntegratorMechanism(default_variable=1.0, function=Linear(slope=5.0)) B = TransferMechanism(function=Linear(slope=5.0)) comp.add_mechanism(A) comp.add_mechanism(B) @@ -934,7 +923,7 @@ def test_run_2_mechanisms_double_trial_specs(self): def test_execute_composition(self): comp = Composition() - A = integratormechanism(default_variable=1.0, function=Linear(slope=5.0)) + A = IntegratorMechanism(default_variable=1.0, function=Linear(slope=5.0)) B = TransferMechanism(function=Linear(slope=5.0)) comp.add_mechanism(A) comp.add_mechanism(B) @@ -1212,8 +1201,8 @@ def record_values(d, time_scale, *mechs): comp = Composition() - A = integratormechanism(name="A [transfer]", function=SimpleIntegrator(rate=1)) - B = integratormechanism(name="B [transfer]", function=SimpleIntegrator(rate=2)) + A = IntegratorMechanism(name="A [transfer]", function=SimpleIntegrator(rate=1)) + B = IntegratorMechanism(name="B [transfer]", function=SimpleIntegrator(rate=2)) comp.add_mechanism(A) comp.add_mechanism(B) comp.add_projection(A, MappingProjection(sender=A, receiver=B), B) From 71a7a3ad381703bbc037257dcd4e9c6efc256665 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Fri, 9 Mar 2018 17:18:31 -0500 Subject: [PATCH 007/200] updating composition run method with improved input and target specification from devel --- psyneulink/compositions/composition.py | 187 ++++++++++++++++++------- 1 file changed, 140 insertions(+), 47 deletions(-) diff --git a/psyneulink/compositions/composition.py b/psyneulink/compositions/composition.py index 7dc251f410f..7858d8df995 100644 --- a/psyneulink/compositions/composition.py +++ b/psyneulink/compositions/composition.py @@ -53,12 +53,12 @@ from enum import Enum import numpy as np - +from psyneulink.components.component import function_type from psyneulink.components.mechanisms.processing.compositioninterfacemechanism import CompositionInterfaceMechanism from psyneulink.components.projections.pathway.mappingprojection import MappingProjection from psyneulink.components.states.outputstate import OutputState from psyneulink.components.shellclasses import Mechanism, Projection -from psyneulink.globals.keywords import EXECUTING, SOFT_CLAMP, IDENTITY_MATRIX +from psyneulink.globals.keywords import SYSTEM, EXECUTING, SOFT_CLAMP, IDENTITY_MATRIX from psyneulink.scheduling.scheduler import Scheduler from psyneulink.scheduling.time import TimeScale @@ -336,6 +336,7 @@ class Composition(object): def __init__(self): # core attributes + self.name = "Composition-TestName" self.graph = Graph() # Graph of the Composition self._graph_processing = None self.mechanisms = [] @@ -1090,8 +1091,6 @@ def run( output value of the final Mechanism executed in the composition : various ''' - reuse_inputs = False - if scheduler_processing is None: scheduler_processing = self.scheduler_processing @@ -1109,42 +1108,42 @@ def run( scheduler_processing.update_termination_conditions(termination_processing) scheduler_learning.update_termination_conditions(termination_learning) - if inputs is not None: - len_inputs = self._process_inputs(inputs) - else: - inputs = {} - len_inputs = 1 + # ------------------------------------ FROM DEVEL START ------------------------------------ + origin_mechanisms = self.get_mechanisms_by_role(MechanismRole.ORIGIN) + # if there is only one origin mechanism, allow inputs to be specified in a list + if isinstance(inputs, (list, np.ndarray)): + if len(origin_mechanisms) == 1: + inputs = {next(iter(origin_mechanisms)): inputs} + else: + raise CompositionError("Inputs to {} must be specified in a dictionary with a key for each of its {} origin " + "mechanisms.".format(self.name, len(origin_mechanisms))) + elif not isinstance(inputs, dict): + if len(origin_mechanisms) == 1: + raise CompositionError( + "Inputs to {} must be specified in a list or in a dictionary with the origin mechanism({}) " + "as its only key".format(self.name, next(iter(origin_mechanisms)).name)) + else: + raise CompositionError("Inputs to {} must be specified in a dictionary with a key for each of its {} origin " + "mechanisms.".format(self.name, len(origin_mechanisms))) - if targets is None: - targets = {} + inputs, num_inputs_sets = self._adjust_stimulus_dict(self, inputs) - # check whether the num trials given in the input dict matches the num_trials param if num_trials is not None: - if len_inputs != num_trials: - # if one set of inputs was provided for many trials, set 'reuse_inputs' flag and re-set len_inputs to - # the number of trials given by the user - if len_inputs == 1: - reuse_inputs = True - len_inputs = num_trials - # otherwise, warn user that there is something wrong with their input specification - else: - raise CompositionError( - "The number of trials [{}] specified for the composition [{}] does not match the " - "number [{}] of inputs specified per mechanism (or input state) in the inputs dictionary [{}]. " - .format(num_trials, self, len_inputs, inputs) - ) + num_trials = num_trials + else: + num_trials = num_inputs_sets - input_indices = range(len_inputs) + if targets is None: + targets = {} - scheduler_processing._reset_counts_total(TimeScale.RUN, execution_id) - scheduler_processing._reset_time(TimeScale.RUN, execution_id) + scheduler_processing._reset_counts_total(TimeScale.RUN) # TBI: Handle runtime params? result = None # --- RESET FOR NEXT TRIAL --- # by looping over the length of the list of inputs - each input represents a TRIAL - for input_index in input_indices: + for trial_num in range(num_trials): # Execute call before trial "hook" (user defined function) if call_before_trial: @@ -1158,18 +1157,13 @@ def run( # Prepare stimuli from the outside world -- collect the inputs for this TRIAL and store them in a dict execution_stimuli = {} - - # loop over all mechanisms that receive stimuli from the outside world - for mech in inputs.keys(): - if isinstance(inputs[mech], dict): - for input_state in inputs[mech].keys(): - execution_stimuli[input_state] = inputs[mech][input_state][0 if reuse_inputs else input_index] - else: - execution_stimuli[mech] = inputs[mech][0 if reuse_inputs else input_index] + stimulus_index = trial_num % num_inputs_sets + for mech in inputs: + execution_stimuli[mech] = inputs[mech][stimulus_index] # execute processing # pass along the stimuli for this trial - num = self.execute( + trial_output = self.execute( execution_stimuli, scheduler_processing, scheduler_learning, @@ -1183,21 +1177,28 @@ def run( # --------------------------------------------------------------------------------- # store the result of this execute in case it will be the final result - if num is not None: - result = num + if trial_output is not None: + result = trial_output # LEARNING ------------------------------------------------------------------------ # Prepare targets from the outside world -- collect the targets for this TRIAL and store them in a dict execution_targets = {} + target_index = trial_num % num_inputs_sets + # Assign targets: + if targets is not None: - # loop over all mechanisms that receive targets from the outside world - for mech in targets.keys(): - if callable(targets[mech]): - execution_targets[mech] = targets[mech] - elif len(targets[mech]) == 1: - execution_targets[mech] = targets[mech][0] + if isinstance(targets, function_type): + self.target = targets else: - execution_targets[mech] = targets[mech][input_index] + for mech in targets: + if callable(targets[mech]): + execution_targets[mech] = targets[mech] + else: + execution_targets[mech] = targets[mech][target_index] + + # devel needs the lines below because target and current_targets are attrs of system + # self.target = execution_targets + # self.current_targets = execution_targets # execute learning # pass along the targets for this trial @@ -1220,3 +1221,95 @@ def run( # return the output of the LAST mechanism executed in the composition return result + + def _input_matches_variable(self, input_value, var): + # input_value states are uniform + if np.shape(np.atleast_2d(input_value)) == np.shape(var): + return "homogeneous" + # input_value states have different lengths + elif len(np.shape(var)) == 1 and isinstance(var[0], (list, np.ndarray)): + for i in range(len(input_value)): + if len(input_value[i]) != len(var[i]): + return False + return "heterogeneous" + return False + + def _adjust_stimulus_dict(self, stimuli): + + # STEP 1: validate that there is a one-to-one mapping of input entries to origin mechanisms + + + # Check that all of the mechanisms listed in the inputs dict are ORIGIN mechanisms in the self + origin_mechanisms = self.get_mechanisms_by_role(MechanismRole.ORIGIN) + for mech in stimuli.keys(): + if not mech in origin_mechanisms.mechanisms: + raise CompositionError("{} in inputs dict for {} is not one of its ORIGIN mechanisms". + format(mech.name, self.name)) + # Check that all of the ORIGIN mechanisms in the self are represented by entries in the inputs dict + for mech in origin_mechanisms: + if not mech in stimuli: + raise RunError("Entry for ORIGIN Mechanism {} is missing from the inputs dict for {}". + format(mech.name, self.name)) + + # STEP 2: Loop over all dictionary entries to validate their content and adjust any convenience notations: + + # (1) Replace any user provided convenience notations with values that match the following specs: + # a - all dictionary values are lists containing and input value on each trial (even if only one trial) + # b - each input value is a 2d array that matches variable + # example: { Mech1: [Fully_specified_input_for_mech1_on_trial_1, Fully_specified_input_for_mech1_on_trial_2 … ], + # Mech2: [Fully_specified_input_for_mech2_on_trial_1, Fully_specified_input_for_mech2_on_trial_2 … ]} + # (2) Verify that all mechanism values provide the same number of inputs (check length of each dictionary value) + + adjusted_stimuli = {} + num_input_sets = -1 + + for mech, stim_list in stimuli.items(): + + check_spec_type = self._input_matches_variable(stim_list, mech.instance_defaults.variable) + # If a mechanism provided a single input, wrap it in one more list in order to represent trials + if check_spec_type == "homogeneous" or check_spec_type == "heterogeneous": + if check_spec_type == "homogeneous": + # np.atleast_2d will catch any single-input states specified without an outer list + # e.g. [2.0, 2.0] --> [[2.0, 2.0]] + adjusted_stimuli[mech] = [np.atleast_2d(stim_list)] + else: + adjusted_stimuli[mech] = [stim_list] + + # verify that all mechanisms have provided the same number of inputs + if num_input_sets == -1: + num_input_sets = 1 + elif num_input_sets != 1: + raise RunError("Input specification for {} is not valid. The number of inputs (1) provided for {}" + "conflicts with at least one other mechanism's input specification.".format(self.name, + mech.name)) + else: + adjusted_stimuli[mech] = [] + for stim in stimuli[mech]: + check_spec_type = _input_matches_variable(stim, mech.instance_defaults.variable) + # loop over each input to verify that it matches variable + if check_spec_type == False: + err_msg = "Input stimulus ({}) for {} is incompatible with its variable ({}).".\ + format(stim, mech.name, mech.instance_defaults.variable) + # 8/3/17 CW: I admit the error message implementation here is very hacky; but it's at least not a hack + # for "functionality" but rather a hack for user clarity + if "KWTA" in str(type(mech)): + err_msg = err_msg + " For KWTA mechanisms, remember to append an array of zeros (or other values)" \ + " to represent the outside stimulus for the inhibition input state, and " \ + "for systems, put your inputs" + raise RunError(err_msg) + elif check_spec_type == "homogeneous": + # np.atleast_2d will catch any single-input states specified without an outer list + # e.g. [2.0, 2.0] --> [[2.0, 2.0]] + adjusted_stimuli[mech].append(np.atleast_2d(stim)) + else: + adjusted_stimuli[mech].append(stim) + + # verify that all mechanisms have provided the same number of inputs + if num_input_sets == -1: + num_input_sets = len(stimuli[mech]) + elif num_input_sets != len(stimuli[mech]): + raise RunError("Input specification for {} is not valid. The number of inputs ({}) provided for {}" + "conflicts with at least one other mechanism's input specification." + .format(self.name, (stimuli[mech]), mech.name)) + + return adjusted_stimuli, num_input_sets \ No newline at end of file From 8307b333a51f1add5ccdfdde0d6dcf09637ad484 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Thu, 5 Apr 2018 17:48:57 -0400 Subject: [PATCH 008/200] adding pytests for new input/output/target value --> label option on mechanisms --- tests/mechanisms/test_input_output_labels.py | 66 ++++++++++++++++++++ 1 file changed, 66 insertions(+) create mode 100644 tests/mechanisms/test_input_output_labels.py diff --git a/tests/mechanisms/test_input_output_labels.py b/tests/mechanisms/test_input_output_labels.py new file mode 100644 index 00000000000..38dfc2d80c6 --- /dev/null +++ b/tests/mechanisms/test_input_output_labels.py @@ -0,0 +1,66 @@ +import numpy as np +import pytest + +from psyneulink.components.component import ComponentError +from psyneulink.components.functions.function import FunctionError +from psyneulink.components.functions.function import ConstantIntegrator, Exponential, Linear, Logistic, Reduce, Reinforcement, SoftMax +from psyneulink.components.functions.function import ExponentialDist, GammaDist, NormalDist, UniformDist, WaldDist, UniformToNormalDist +from psyneulink.components.mechanisms.mechanism import MechanismError +from psyneulink.components.mechanisms.processing.processingmechanism import ProcessingMechanism +from psyneulink.components.mechanisms.processing.transfermechanism import TransferMechanism +from psyneulink.globals.keywords import INPUT_LABELS_DICT +from psyneulink.components.process import Process +from psyneulink.components.system import System + +class TestMechanismInputLabels: + def test_dict_of_floats(self): + input_labels_dict = {"red": 1, + "green":0} + M = ProcessingMechanism(params={INPUT_LABELS_DICT:input_labels_dict}) + P = Process(pathway=[M]) + S = System(processes=[P]) + + S.run(inputs=['red', 'green', 'green', 'red']) + assert np.allclose(S.results, [[[1.]], [[0.]], [[0.]], [[1.]]]) + + def test_dict_of_arrays(self): + input_labels_dict = {"red": [1, 0, 0], + "green": [0, 1, 0], + "blue": [0, 0, 1]} + M = ProcessingMechanism(default_variable=[[0, 0, 0]], + params={INPUT_LABELS_DICT: input_labels_dict}) + P = Process(pathway=[M]) + S = System(processes=[P]) + + S.run(inputs=['red', 'green', 'blue', 'red']) + assert np.allclose(S.results, [[[1, 0, 0]], [[0, 1, 0]], [[0, 0, 1]], [[1, 0, 0]]]) + + def test_dict_of_2d_arrays(self): + input_labels_dict = {"red": [[1, 0], [1, 0]], + "green": [[0, 1], [0, 1]], + "blue": [[0, 1], [1, 0]]} + M = TransferMechanism(default_variable=[[0, 0], [0, 0]], + params={INPUT_LABELS_DICT: input_labels_dict}) + P = Process(pathway=[M]) + S = System(processes=[P]) + + S.run(inputs=['red', 'green', 'blue']) + assert np.allclose(S.results, [[[1, 0], [1, 0]], [[0, 1], [0, 1]], [[0, 1], [1, 0]]]) + + def test_dict_of_dicts(self): + input_labels_dict = {0: {"red": [1, 0], + "green": [0, 1]}, + 1: {"red": [0, 1], + "green": [1, 0]}} + + + M = TransferMechanism(default_variable=[[0, 0], [0, 0]], + params={INPUT_LABELS_DICT: input_labels_dict}) + P = Process(pathway=[M]) + S = System(processes=[P]) + + S.run(inputs=[['red', 'green'], ['green', 'red'], ['green', 'green']]) + assert np.allclose(S.results, [[[1, 0], [1, 0]], [[0, 1], [0, 1]], [[0, 1], [1, 0]]]) + + # class TestTargetLabels: +# class TestMechanismOutputLabels: \ No newline at end of file From 1c3290b26311c3c52b906c290450f421c207c15c Mon Sep 17 00:00:00 2001 From: KristenManning Date: Mon, 9 Apr 2018 17:23:53 -0400 Subject: [PATCH 009/200] refactoring parsing of input labels to cover case of multiple input states (and nested dictionaries of label to value mappings) --- psyneulink/globals/environment.py | 158 +++++++++---------- tests/mechanisms/test_input_output_labels.py | 13 ++ 2 files changed, 88 insertions(+), 83 deletions(-) diff --git a/psyneulink/globals/environment.py b/psyneulink/globals/environment.py index e1d5f7a811a..347b4a9061a 100644 --- a/psyneulink/globals/environment.py +++ b/psyneulink/globals/environment.py @@ -989,98 +989,90 @@ def _adjust_target_dict(component, target_dict): adjusted_targets[mech] = target_list return adjusted_targets, num_targets - @tc.typecheck def _parse_input_labels(obj, stimuli:dict): - from psyneulink.components.states.inputstate import InputState - - # def get_input_for_label(mech, key, input_array=None): - def get_input_for_label(mech, key, subdicts, input_array=None): - """check mech.input_labels_dict for key - If input_array is passed, need to check for subdicts (should be one for each InputState of mech)""" - - # FIX: FOR SOME REASON dict IN TEST BELOW IS TREATED AS AN UNBOUND LOCAL VARIABLE - # subdicts = isinstance(list(mech.input_labels_dict.keys())[0], dict) - - if input_array is None: - if subdicts: - raise RunError("Attempt to reference a label for a stimulus at top level of {} for {}," - "which contains subdictionaries for each of its {}s". - format(INPUT_LABELS_DICT, mech.name, InputState)) - try: - return mech.input_labels_dict[key] - except KeyError: - raise RunError("No entry \'{}\' found for input to {} in {} for mech.name". - format(key, obj.name, INPUT_LABELS_DICT, mech.name)) - else: - if not subdicts: - try: - return mech.input_labels_dict[key] - except KeyError: - raise RunError("No entry \'{}\' found for input to {} in {} for mech.name". - format(key, obj.name, INPUT_LABELS_DICT, mech.name)) - else: - # if subdicts, look exhaustively for any instances of the label in keys of all subdicts - name_value_pairs = [] - for name, dict in mech.input_labels.items(): - if key in dict: - name_value_pairs.append((name,dict[key])) - if len(name_value_pairs)==1: - # if only one found, use its value - return name_value_pairs[0][1] - else: - # if more than one is found, now know that "convenience notation" has not been used - # check that number of items in input_array == number of states - if len(input_array) != len(mech.input_states): - raise RunError("Number of items in input for {} of {} ({}) " - "does not match the number of its {}s ({})". - format(mech.name, obj.name, len(input_array), - InputState, len(mech.input_states))) - # use index of item in outer array and key (int or name of state) to determine which subdict to use - input_index = input_array.index(key) - - # try to match input_index against index in name_value_pairs[0]; - value = [item[1] for item in name_value_pairs if item[0]==input_index] - if value: - return value[0] - else: - # otherwise, match against index associated with name of state in name_value_pairs - value = [item[1] for item in name_value_pairs if mech.input_states.index(item[0])==input_index] - if value: - return value[0] - else: - raise RunError("Unable to find value for label ({}) in {} for {} of {}". - format(key, INPUT_LABELS_DICT, mech.name, obj.name)) - for mech, inputs in stimuli.items(): + def get_input_for_label(mech, key): + """check mech.input_labels_dict for key""" - subdicts = isinstance(list(mech.input_labels_dict.keys())[0], dict) + try: + return mech.input_labels_dict[key] + except KeyError: + raise RunError("No entry \'{}\' found for input to {} in {} for mech.name". + format(key, obj.name, INPUT_LABELS_DICT, mech.name)) + + for mech, inputs in stimuli.items(): if any(isinstance(input, str) for input in inputs) and not mech.input_labels_dict: raise RunError("Labels can not be used to specify the inputs to {} since it does not have an {}". format(mech.name, INPUT_LABELS_DICT)) - for i, stim in enumerate(inputs): - # "Burrow" down to determine whether there's a number at the "bottom"; - # if so, leave as is; otherwise, check if its a string and, if so, get value for label - if isinstance(stim, (list, np.ndarray)): # format of stimuli dict is at least: [[???]...?] - for j, item in enumerate(stim): - if isinstance(item, (Number, list, np.ndarray)): # format of stimuli dict is [[int or []...?]] - continue # leave input item as is - elif isinstance(item, str): # format of stimuli dict is [[label]...] - # inputs[i][j] = get_input_for_label(mech, item, stim) - inputs[i][j] = get_input_for_label(mech, item, subdicts, stim) - else: - raise RunError("Unrecognized specification ({}) in stimulus {} of entry " - "for {} in inputs dictionary specified for {}". - format(item, i, mech.name, obj.name)) - elif isinstance(stim, str): - # Don't pass input_array as no need to check for subdicts - # inputs[i] = get_input_for_label(mech, stim) - inputs[i] = get_input_for_label(mech, stim, subdicts) + + # Check for subdicts + subdicts = False + for k in mech.input_labels_dict: + value = mech.input_labels_dict[k] + if isinstance(value, dict): + subdicts = True + break + + if subdicts: # If there are subdicts, validate + if len(mech.input_labels_dict) != len(mech.input_states): + raise RunError("If input labels are specified at the level of input states, then one input state label " + "sub-dictionary must be provided for each input state. {} has {} input state label " + "sub-dictionaries, but {} input states.".format(mech.name, + len(mech.input_labels_dict), + len(mech.input_states))) + for k in mech.input_labels_dict: + value = mech.input_labels_dict[k] + if not isinstance(value, dict): + raise RunError("If input labels are specified at the level of input states, then one input state " + "label sub-dictionary must be provided for each input state. A sub-dictionary was " + "not specified for the input state {} of {}".format(k, mech.name)) + + # If there is only one subdict, then we already know that we are in the correct input state + num_input_labels = len(mech.input_labels_dict) + if num_input_labels == 1: + # there is only one key, but we don't know what it is + for k in mech.input_labels_dict: + for i in range(len(inputs)): + # we can use [0] because we know that there is only one input state + if isinstance(inputs[i][0], str): + inputs[i][0] = mech.input_labels_dict[k][inputs[i][0]] + else: - raise RunError("Unrecognized specification ({}) for stimulus {} in entry " - "for {} of inputs dictionary specified for {}". - format(stim, i, mech.name, obj.name)) + for trial_stimulus in inputs: + for input_state_index in range(len(trial_stimulus)): + if isinstance(trial_stimulus[input_state_index], str): + label_to_parse = trial_stimulus[input_state_index] + input_state_name = mech.input_states[input_state_index].name + if input_state_index in mech.input_labels_dict: + trial_stimulus[input_state_index] = \ + mech.input_labels_dict[input_state_index][label_to_parse] + elif input_state_name in mech.input_labels_dict: + trial_stimulus[input_state_index] = \ + mech.input_labels_dict[input_state_name][label_to_parse] + + else: + for i, stim in enumerate(inputs): + # "Burrow" down to determine whether there's a number at the "bottom"; + # if so, leave as is; otherwise, check if its a string and, if so, get value for label + if isinstance(stim, (list, np.ndarray)): # format of stimuli dict is at least: [[???]...?] + for j, item in enumerate(stim): + if isinstance(item, (Number, list, np.ndarray)): # format of stimuli dict is [[int or []...?]] + continue # leave input item as is + elif isinstance(item, str): # format of stimuli dict is [[label]...] + # inputs[i][j] = get_input_for_label(mech, item, stim) + inputs[i][j] = get_input_for_label(mech, item) + else: + raise RunError("Unrecognized specification ({}) in stimulus {} of entry " + "for {} in inputs dictionary specified for {}". + format(item, i, mech.name, obj.name)) + elif isinstance(stim, str): + inputs[i] = get_input_for_label(mech, stim) + else: + raise RunError("Unrecognized specification ({}) for stimulus {} in entry " + "for {} of inputs dictionary specified for {}". + format(stim, i, mech.name, obj.name)) def _validate_target_function(target_function, target_mechanism, sample_mechanism): diff --git a/tests/mechanisms/test_input_output_labels.py b/tests/mechanisms/test_input_output_labels.py index 38dfc2d80c6..7c518b7e80d 100644 --- a/tests/mechanisms/test_input_output_labels.py +++ b/tests/mechanisms/test_input_output_labels.py @@ -47,6 +47,19 @@ def test_dict_of_2d_arrays(self): S.run(inputs=['red', 'green', 'blue']) assert np.allclose(S.results, [[[1, 0], [1, 0]], [[0, 1], [0, 1]], [[0, 1], [1, 0]]]) + def test_dict_of_dicts_1_input_state(self): + input_labels_dict = {0: {"red": [1, 0], + "green": [0, 1]}} + + M = TransferMechanism(default_variable=[[0, 0]], + params={INPUT_LABELS_DICT: input_labels_dict}) + P = Process(pathway=[M]) + S = System(processes=[P]) + + S.run(inputs=[['red'], ['green'], ['green']]) + assert np.allclose(S.results, [[[1, 0]], [[0, 1]], [[0, 1]]]) + + # class TestMechanismOutputLabels: def test_dict_of_dicts(self): input_labels_dict = {0: {"red": [1, 0], "green": [0, 1]}, From a325e3911d2695d77dfa8cd600bc88ed05636126 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Tue, 10 Apr 2018 11:23:52 -0400 Subject: [PATCH 010/200] fixing bug in _adjust_stimulus_dict that led to attr error - must check whether mechanisms have input_labels_dict attr --- psyneulink/globals/environment.py | 133 +++++++++++++++--------------- 1 file changed, 66 insertions(+), 67 deletions(-) diff --git a/psyneulink/globals/environment.py b/psyneulink/globals/environment.py index 347b4a9061a..e779c49fdaa 100644 --- a/psyneulink/globals/environment.py +++ b/psyneulink/globals/environment.py @@ -837,7 +837,7 @@ def _target_matches_input_state_variable(target, input_state_variable): def _adjust_stimulus_dict(obj, stimuli): # STEP 0: parse any labels into array entries - if any(mech.input_labels_dict for mech in obj.origin_mechanisms): + if any(hasattr(mech, "input_labels_dict") for mech in obj.origin_mechanisms): _parse_input_labels(obj, stimuli) # STEP 1: validate that there is a one-to-one mapping of input entries to origin mechanisms @@ -1002,78 +1002,77 @@ def get_input_for_label(mech, key): format(key, obj.name, INPUT_LABELS_DICT, mech.name)) for mech, inputs in stimuli.items(): - - if any(isinstance(input, str) for input in inputs) and not mech.input_labels_dict: - raise RunError("Labels can not be used to specify the inputs to {} since it does not have an {}". - format(mech.name, INPUT_LABELS_DICT)) - - # Check for subdicts - subdicts = False - for k in mech.input_labels_dict: - value = mech.input_labels_dict[k] - if isinstance(value, dict): - subdicts = True - break - - if subdicts: # If there are subdicts, validate - if len(mech.input_labels_dict) != len(mech.input_states): - raise RunError("If input labels are specified at the level of input states, then one input state label " - "sub-dictionary must be provided for each input state. {} has {} input state label " - "sub-dictionaries, but {} input states.".format(mech.name, - len(mech.input_labels_dict), - len(mech.input_states))) + if hasattr(mech, "input_labels_dict"): + # Check for subdicts + subdicts = False for k in mech.input_labels_dict: value = mech.input_labels_dict[k] - if not isinstance(value, dict): - raise RunError("If input labels are specified at the level of input states, then one input state " - "label sub-dictionary must be provided for each input state. A sub-dictionary was " - "not specified for the input state {} of {}".format(k, mech.name)) - - # If there is only one subdict, then we already know that we are in the correct input state - num_input_labels = len(mech.input_labels_dict) - if num_input_labels == 1: - # there is only one key, but we don't know what it is + if isinstance(value, dict): + subdicts = True + break + + if subdicts: # If there are subdicts, validate + if len(mech.input_labels_dict) != len(mech.input_states): + raise RunError("If input labels are specified at the level of input states, then one input state label " + "sub-dictionary must be provided for each input state. {} has {} input state label " + "sub-dictionaries, but {} input states.".format(mech.name, + len(mech.input_labels_dict), + len(mech.input_states))) for k in mech.input_labels_dict: - for i in range(len(inputs)): - # we can use [0] because we know that there is only one input state - if isinstance(inputs[i][0], str): - inputs[i][0] = mech.input_labels_dict[k][inputs[i][0]] - - else: - for trial_stimulus in inputs: - for input_state_index in range(len(trial_stimulus)): - if isinstance(trial_stimulus[input_state_index], str): - label_to_parse = trial_stimulus[input_state_index] - input_state_name = mech.input_states[input_state_index].name - if input_state_index in mech.input_labels_dict: - trial_stimulus[input_state_index] = \ - mech.input_labels_dict[input_state_index][label_to_parse] - elif input_state_name in mech.input_labels_dict: - trial_stimulus[input_state_index] = \ - mech.input_labels_dict[input_state_name][label_to_parse] + value = mech.input_labels_dict[k] + if not isinstance(value, dict): + raise RunError("If input labels are specified at the level of input states, then one input state " + "label sub-dictionary must be provided for each input state. A sub-dictionary was " + "not specified for the input state {} of {}".format(k, mech.name)) + + # If there is only one subdict, then we already know that we are in the correct input state + num_input_labels = len(mech.input_labels_dict) + if num_input_labels == 1: + # there is only one key, but we don't know what it is + for k in mech.input_labels_dict: + for i in range(len(inputs)): + # we can use [0] because we know that there is only one input state + if isinstance(inputs[i][0], str): + inputs[i][0] = mech.input_labels_dict[k][inputs[i][0]] - else: - for i, stim in enumerate(inputs): - # "Burrow" down to determine whether there's a number at the "bottom"; - # if so, leave as is; otherwise, check if its a string and, if so, get value for label - if isinstance(stim, (list, np.ndarray)): # format of stimuli dict is at least: [[???]...?] - for j, item in enumerate(stim): - if isinstance(item, (Number, list, np.ndarray)): # format of stimuli dict is [[int or []...?]] - continue # leave input item as is - elif isinstance(item, str): # format of stimuli dict is [[label]...] - # inputs[i][j] = get_input_for_label(mech, item, stim) - inputs[i][j] = get_input_for_label(mech, item) - else: - raise RunError("Unrecognized specification ({}) in stimulus {} of entry " - "for {} in inputs dictionary specified for {}". - format(item, i, mech.name, obj.name)) - elif isinstance(stim, str): - inputs[i] = get_input_for_label(mech, stim) else: - raise RunError("Unrecognized specification ({}) for stimulus {} in entry " - "for {} of inputs dictionary specified for {}". - format(stim, i, mech.name, obj.name)) + for trial_stimulus in inputs: + for input_state_index in range(len(trial_stimulus)): + if isinstance(trial_stimulus[input_state_index], str): + label_to_parse = trial_stimulus[input_state_index] + input_state_name = mech.input_states[input_state_index].name + if input_state_index in mech.input_labels_dict: + trial_stimulus[input_state_index] = \ + mech.input_labels_dict[input_state_index][label_to_parse] + elif input_state_name in mech.input_labels_dict: + trial_stimulus[input_state_index] = \ + mech.input_labels_dict[input_state_name][label_to_parse] + else: + for i, stim in enumerate(inputs): + # "Burrow" down to determine whether there's a number at the "bottom"; + # if so, leave as is; otherwise, check if its a string and, if so, get value for label + if isinstance(stim, (list, np.ndarray)): # format of stimuli dict is at least: [[???]...?] + for j, item in enumerate(stim): + if isinstance(item, (Number, list, np.ndarray)): # format of stimuli dict is [[int or []...?]] + continue # leave input item as is + elif isinstance(item, str): # format of stimuli dict is [[label]...] + # inputs[i][j] = get_input_for_label(mech, item, stim) + inputs[i][j] = get_input_for_label(mech, item) + else: + raise RunError("Unrecognized specification ({}) in stimulus {} of entry " + "for {} in inputs dictionary specified for {}". + format(item, i, mech.name, obj.name)) + elif isinstance(stim, str): + inputs[i] = get_input_for_label(mech, stim) + else: + raise RunError("Unrecognized specification ({}) for stimulus {} in entry " + "for {} of inputs dictionary specified for {}". + format(stim, i, mech.name, obj.name)) + else: + if any(isinstance(input, str) for input in inputs) and not mech.input_labels_dict: + raise RunError("Labels can not be used to specify the inputs to {} since it does not have an {}". + format(mech.name, INPUT_LABELS_DICT)) def _validate_target_function(target_function, target_mechanism, sample_mechanism): generated_targets = np.atleast_1d(target_function()) From 95ca4d33eceb15942aad8b1bf3ea0ec8efc937ab Mon Sep 17 00:00:00 2001 From: KristenManning Date: Tue, 10 Apr 2018 17:16:37 -0400 Subject: [PATCH 011/200] small adjustments to parse_input_labels to allow for the same convenience notations as with value inputs, plus tests for these cases --- psyneulink/globals/environment.py | 160 ++++++++++--------- tests/mechanisms/test_input_output_labels.py | 68 ++++++-- 2 files changed, 145 insertions(+), 83 deletions(-) diff --git a/psyneulink/globals/environment.py b/psyneulink/globals/environment.py index e779c49fdaa..da3319936a2 100644 --- a/psyneulink/globals/environment.py +++ b/psyneulink/globals/environment.py @@ -627,7 +627,7 @@ def run(object, else: raise RunError("Inputs to {} must be specified in a dictionary with a key for each of its {} origin " "mechanisms.".format(object.name, len(object.origin_mechanisms))) - elif not isinstance(inputs, dict): + elif not isinstance(inputs, dict) and not isinstance(inputs, str): if len(object.origin_mechanisms) == 1: raise RunError("Inputs to {} must be specified in a list or in a dictionary with the origin mechanism({}) " "as its only key".format(object.name, object.origin_mechanisms[0].name)) @@ -837,8 +837,13 @@ def _target_matches_input_state_variable(target, input_state_variable): def _adjust_stimulus_dict(obj, stimuli): # STEP 0: parse any labels into array entries - if any(hasattr(mech, "input_labels_dict") for mech in obj.origin_mechanisms): - _parse_input_labels(obj, stimuli) + need_parse_input_labels = [] + for mech in obj.origin_mechanisms: + if hasattr(mech, "input_labels_dict"): + if mech.input_labels_dict is not None and mech.input_labels_dict != {}: + need_parse_input_labels.append(mech) + if len(need_parse_input_labels) > 0: + stimuli = _parse_input_labels(obj, stimuli, need_parse_input_labels) # STEP 1: validate that there is a one-to-one mapping of input entries to origin mechanisms @@ -919,8 +924,13 @@ def _adjust_stimulus_dict(obj, stimuli): def _adjust_target_dict(component, target_dict): # STEP 0: parse any labels into array entries - if any(mech.input_labels_dict for mech in component.target_mechanisms): - _parse_input_labels(component, target_dict) + need_parse_input_labels = [] + for mech in component.target_mechanisms: + if hasattr(mech, "input_labels_dict"): + if mech.input_labels_dict is not None and mech.input_labels_dict != {}: + need_parse_input_labels.append(mech) + if len(need_parse_input_labels) > 0: + _parse_input_labels(component, target_dict, need_parse_input_labels) # STEP 1: validate that there is a one-to-one mapping of target entries and target mechanisms for target_mechanism in component.target_mechanisms: @@ -990,7 +1000,7 @@ def _adjust_target_dict(component, target_dict): return adjusted_targets, num_targets @tc.typecheck -def _parse_input_labels(obj, stimuli:dict): +def _parse_input_labels(obj, stimuli, mechanisms_to_parse): def get_input_for_label(mech, key): """check mech.input_labels_dict for key""" @@ -1000,79 +1010,83 @@ def get_input_for_label(mech, key): except KeyError: raise RunError("No entry \'{}\' found for input to {} in {} for mech.name". format(key, obj.name, INPUT_LABELS_DICT, mech.name)) - - for mech, inputs in stimuli.items(): - if hasattr(mech, "input_labels_dict"): - # Check for subdicts - subdicts = False + if len(mechanisms_to_parse) == 1: + if isinstance(stimuli, float): + return stimuli + elif isinstance(stimuli, str): + stimuli = {mechanisms_to_parse[0]: [stimuli]} + # for mech, inputs in stimuli.items(): + for mech in mechanisms_to_parse: + inputs = stimuli[mech] + # Check for subdicts + subdicts = False + for k in mech.input_labels_dict: + value = mech.input_labels_dict[k] + if isinstance(value, dict): + subdicts = True + break + + if subdicts: # If there are subdicts, validate + if len(mech.input_labels_dict) != len(mech.input_states): + raise RunError("If input labels are specified at the level of input states, then one input state label " + "sub-dictionary must be provided for each input state. {} has {} input state label " + "sub-dictionaries, but {} input states.".format(mech.name, + len(mech.input_labels_dict), + len(mech.input_states))) for k in mech.input_labels_dict: value = mech.input_labels_dict[k] - if isinstance(value, dict): - subdicts = True - break - - if subdicts: # If there are subdicts, validate - if len(mech.input_labels_dict) != len(mech.input_states): - raise RunError("If input labels are specified at the level of input states, then one input state label " - "sub-dictionary must be provided for each input state. {} has {} input state label " - "sub-dictionaries, but {} input states.".format(mech.name, - len(mech.input_labels_dict), - len(mech.input_states))) + if not isinstance(value, dict): + raise RunError("If input labels are specified at the level of input states, then one input state " + "label sub-dictionary must be provided for each input state. A sub-dictionary was " + "not specified for the input state {} of {}".format(k, mech.name)) + + # If there is only one subdict, then we already know that we are in the correct input state + num_input_labels = len(mech.input_labels_dict) + if num_input_labels == 1: + # there is only one key, but we don't know what it is for k in mech.input_labels_dict: - value = mech.input_labels_dict[k] - if not isinstance(value, dict): - raise RunError("If input labels are specified at the level of input states, then one input state " - "label sub-dictionary must be provided for each input state. A sub-dictionary was " - "not specified for the input state {} of {}".format(k, mech.name)) - - # If there is only one subdict, then we already know that we are in the correct input state - num_input_labels = len(mech.input_labels_dict) - if num_input_labels == 1: - # there is only one key, but we don't know what it is - for k in mech.input_labels_dict: - for i in range(len(inputs)): - # we can use [0] because we know that there is only one input state - if isinstance(inputs[i][0], str): - inputs[i][0] = mech.input_labels_dict[k][inputs[i][0]] - - else: - for trial_stimulus in inputs: - for input_state_index in range(len(trial_stimulus)): - if isinstance(trial_stimulus[input_state_index], str): - label_to_parse = trial_stimulus[input_state_index] - input_state_name = mech.input_states[input_state_index].name - if input_state_index in mech.input_labels_dict: - trial_stimulus[input_state_index] = \ - mech.input_labels_dict[input_state_index][label_to_parse] - elif input_state_name in mech.input_labels_dict: - trial_stimulus[input_state_index] = \ - mech.input_labels_dict[input_state_name][label_to_parse] + for i in range(len(inputs)): + if isinstance(inputs[i], str): + inputs[i] = mech.input_labels_dict[k][inputs[i]] + # we can use [0] because we know that there is only one input state + elif isinstance(inputs[i][0], str): + inputs[i][0] = mech.input_labels_dict[k][inputs[i][0]] else: - for i, stim in enumerate(inputs): - # "Burrow" down to determine whether there's a number at the "bottom"; - # if so, leave as is; otherwise, check if its a string and, if so, get value for label - if isinstance(stim, (list, np.ndarray)): # format of stimuli dict is at least: [[???]...?] - for j, item in enumerate(stim): - if isinstance(item, (Number, list, np.ndarray)): # format of stimuli dict is [[int or []...?]] - continue # leave input item as is - elif isinstance(item, str): # format of stimuli dict is [[label]...] - # inputs[i][j] = get_input_for_label(mech, item, stim) - inputs[i][j] = get_input_for_label(mech, item) - else: - raise RunError("Unrecognized specification ({}) in stimulus {} of entry " - "for {} in inputs dictionary specified for {}". - format(item, i, mech.name, obj.name)) - elif isinstance(stim, str): - inputs[i] = get_input_for_label(mech, stim) - else: - raise RunError("Unrecognized specification ({}) for stimulus {} in entry " - "for {} of inputs dictionary specified for {}". - format(stim, i, mech.name, obj.name)) + for trial_stimulus in inputs: + for input_state_index in range(len(trial_stimulus)): + if isinstance(trial_stimulus[input_state_index], str): + label_to_parse = trial_stimulus[input_state_index] + input_state_name = mech.input_states[input_state_index].name + if input_state_index in mech.input_labels_dict: + trial_stimulus[input_state_index] = \ + mech.input_labels_dict[input_state_index][label_to_parse] + elif input_state_name in mech.input_labels_dict: + trial_stimulus[input_state_index] = \ + mech.input_labels_dict[input_state_name][label_to_parse] + else: - if any(isinstance(input, str) for input in inputs) and not mech.input_labels_dict: - raise RunError("Labels can not be used to specify the inputs to {} since it does not have an {}". - format(mech.name, INPUT_LABELS_DICT)) + for i, stim in enumerate(inputs): + # "Burrow" down to determine whether there's a number at the "bottom"; + # if so, leave as is; otherwise, check if its a string and, if so, get value for label + if isinstance(stim, (list, np.ndarray)): # format of stimuli dict is at least: [[???]...?] + for j, item in enumerate(stim): + if isinstance(item, (Number, list, np.ndarray)): # format of stimuli dict is [[int or []...?]] + continue # leave input item as is + elif isinstance(item, str): # format of stimuli dict is [[label]...] + # inputs[i][j] = get_input_for_label(mech, item, stim) + inputs[i][j] = get_input_for_label(mech, item) + else: + raise RunError("Unrecognized specification ({}) in stimulus {} of entry " + "for {} in inputs dictionary specified for {}". + format(item, i, mech.name, obj.name)) + elif isinstance(stim, str): + inputs[i] = get_input_for_label(mech, stim) + else: + raise RunError("Unrecognized specification ({}) for stimulus {} in entry " + "for {} of inputs dictionary specified for {}". + format(stim, i, mech.name, obj.name)) + return stimuli def _validate_target_function(target_function, target_mechanism, sample_mechanism): generated_targets = np.atleast_1d(target_function()) diff --git a/tests/mechanisms/test_input_output_labels.py b/tests/mechanisms/test_input_output_labels.py index 7c518b7e80d..17641b5b1df 100644 --- a/tests/mechanisms/test_input_output_labels.py +++ b/tests/mechanisms/test_input_output_labels.py @@ -1,14 +1,9 @@ import numpy as np import pytest -from psyneulink.components.component import ComponentError -from psyneulink.components.functions.function import FunctionError -from psyneulink.components.functions.function import ConstantIntegrator, Exponential, Linear, Logistic, Reduce, Reinforcement, SoftMax -from psyneulink.components.functions.function import ExponentialDist, GammaDist, NormalDist, UniformDist, WaldDist, UniformToNormalDist -from psyneulink.components.mechanisms.mechanism import MechanismError from psyneulink.components.mechanisms.processing.processingmechanism import ProcessingMechanism from psyneulink.components.mechanisms.processing.transfermechanism import TransferMechanism -from psyneulink.globals.keywords import INPUT_LABELS_DICT +from psyneulink.globals.keywords import INPUT_LABELS_DICT, ENABLED from psyneulink.components.process import Process from psyneulink.components.system import System @@ -16,7 +11,7 @@ class TestMechanismInputLabels: def test_dict_of_floats(self): input_labels_dict = {"red": 1, "green":0} - M = ProcessingMechanism(params={INPUT_LABELS_DICT:input_labels_dict}) + M = ProcessingMechanism(params={INPUT_LABELS_DICT: input_labels_dict}) P = Process(pathway=[M]) S = System(processes=[P]) @@ -35,6 +30,29 @@ def test_dict_of_arrays(self): S.run(inputs=['red', 'green', 'blue', 'red']) assert np.allclose(S.results, [[[1, 0, 0]], [[0, 1, 0]], [[0, 0, 1]], [[1, 0, 0]]]) + S.run(inputs='red') + assert np.allclose(S.results, [[[1, 0, 0]], [[0, 1, 0]], [[0, 0, 1]], [[1, 0, 0]], [[1, 0, 0]]]) + + S.run(inputs=['red']) + assert np.allclose(S.results, [[[1, 0, 0]], [[0, 1, 0]], [[0, 0, 1]], [[1, 0, 0]], [[1, 0, 0]], [[1, 0, 0]]]) + + def test_dict_of_arrays_2_input_states(self): + input_labels_dict = {"red": [0], + "green": [1]} + + M = ProcessingMechanism(default_variable=[[0], [0]], + params={INPUT_LABELS_DICT: input_labels_dict}) + P = Process(pathway=[M]) + S = System(processes=[P]) + + M_output = [] + def call_after_trial(): + M_output.append(M.value) + S.run(inputs=[['red', 'green'], ['green', 'red']], + call_after_trial=call_after_trial) + + assert np.allclose(M_output, [[[0], [1]], [[1], [0]]]) + def test_dict_of_2d_arrays(self): input_labels_dict = {"red": [[1, 0], [1, 0]], "green": [[0, 1], [0, 1]], @@ -47,6 +65,9 @@ def test_dict_of_2d_arrays(self): S.run(inputs=['red', 'green', 'blue']) assert np.allclose(S.results, [[[1, 0], [1, 0]], [[0, 1], [0, 1]], [[0, 1], [1, 0]]]) + S.run(inputs='red') + assert np.allclose(S.results, [[[1, 0], [1, 0]], [[0, 1], [0, 1]], [[0, 1], [1, 0]], [[1, 0], [1, 0]]]) + def test_dict_of_dicts_1_input_state(self): input_labels_dict = {0: {"red": [1, 0], "green": [0, 1]}} @@ -59,7 +80,12 @@ def test_dict_of_dicts_1_input_state(self): S.run(inputs=[['red'], ['green'], ['green']]) assert np.allclose(S.results, [[[1, 0]], [[0, 1]], [[0, 1]]]) - # class TestMechanismOutputLabels: + S.run(inputs='red') + assert np.allclose(S.results, [[[1, 0]], [[0, 1]], [[0, 1]], [[1, 0]]]) + + S.run(inputs=['red']) + assert np.allclose(S.results, [[[1, 0]], [[0, 1]], [[0, 1]], [[1, 0]], [[1, 0]]]) + def test_dict_of_dicts(self): input_labels_dict = {0: {"red": [1, 0], "green": [0, 1]}, @@ -75,5 +101,27 @@ def test_dict_of_dicts(self): S.run(inputs=[['red', 'green'], ['green', 'red'], ['green', 'green']]) assert np.allclose(S.results, [[[1, 0], [1, 0]], [[0, 1], [0, 1]], [[0, 1], [1, 0]]]) - # class TestTargetLabels: -# class TestMechanismOutputLabels: \ No newline at end of file +# class TestMechanismTargetLabels: +# def test_dict_of_floats(self): +# input_labels_dict_M1 = {"red": 1, +# "green": 0} +# input_labels_dict_M2 = {"red": 0, +# "green": 1} +# +# M1 = ProcessingMechanism(params={INPUT_LABELS_DICT: input_labels_dict_M1}) +# M2 = ProcessingMechanism(params={INPUT_LABELS_DICT: input_labels_dict_M2}) +# P = Process(pathway=[M1, M2], +# learning=ENABLED, +# learning_rate=0.25) +# S = System(processes=[P]) +# +# learned_matrix = [] +# def record_matrix_after_trial(): +# learned_matrix.append(M2.path_afferents[0].mod_matrix) +# S.run(inputs=['red', 'green', 'green', 'red'], +# # targets=['red', 'green', 'green', 'red'], +# call_after_trial=record_matrix_after_trial, +# targets=['red', 'green', 'green', 'red']) +# +# assert np.allclose(S.results, [[[1.]], [[0.]], [[0.]], [[0.75]]]) +# assert np.allclose(learned_matrix, [[[0.75]], [[0.75]], [[0.75]], [[0.5625]]]) From 31da536ea64578f5110d14ea9b7df124dcd0eb04 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Fri, 13 Apr 2018 17:08:01 -0400 Subject: [PATCH 012/200] continuing to refactor adjust_target_dicts so that it can accurately parse output state labels on the terminal mechanism of a learned process into target values --- psyneulink/globals/environment.py | 80 +++++++++++-- tests/mechanisms/test_input_output_labels.py | 120 +++++++++++++++---- 2 files changed, 168 insertions(+), 32 deletions(-) diff --git a/psyneulink/globals/environment.py b/psyneulink/globals/environment.py index da3319936a2..ce679a9a522 100644 --- a/psyneulink/globals/environment.py +++ b/psyneulink/globals/environment.py @@ -924,13 +924,13 @@ def _adjust_stimulus_dict(obj, stimuli): def _adjust_target_dict(component, target_dict): # STEP 0: parse any labels into array entries - need_parse_input_labels = [] - for mech in component.target_mechanisms: - if hasattr(mech, "input_labels_dict"): - if mech.input_labels_dict is not None and mech.input_labels_dict != {}: - need_parse_input_labels.append(mech) - if len(need_parse_input_labels) > 0: - _parse_input_labels(component, target_dict, need_parse_input_labels) + need_parse_target_labels = [] + for mech in target_dict: + if hasattr(mech, "output_labels_dict"): + if mech.output_labels_dict is not None and mech.output_labels_dict != {}: + need_parse_target_labels.append(mech) + if len(need_parse_target_labels) > 0: + target_dict = _parse_target_labels(component, target_dict, need_parse_target_labels) # STEP 1: validate that there is a one-to-one mapping of target entries and target mechanisms for target_mechanism in component.target_mechanisms: @@ -1087,6 +1087,72 @@ def get_input_for_label(mech, key): "for {} of inputs dictionary specified for {}". format(stim, i, mech.name, obj.name)) return stimuli + +def _parse_target_labels(obj, target_dict, mechanisms_to_parse): + if len(mechanisms_to_parse) == 1: + if isinstance(target_dict, float): + return target_dict + elif isinstance(target_dict, str): + target_dict= {mechanisms_to_parse[0]: [target_dict]} + elif isinstance(target_dict, (list, np.ndarray)): + target_dict = {mechanisms_to_parse[0]: target_dict} + def get_target_for_label(mech, key): + """check mech.input_labels_dict for key""" + + try: + return mech.output_labels_dict[key] + except KeyError: + raise RunError("No entry \'{}\' found for input to {} in {} for mech.name". + format(key, obj.name, OUTPUT_LABELS_DICT, mech.name)) + + for mech in mechanisms_to_parse: + targets = target_dict[mech] + # Check for subdicts + subdicts = False + for k in mech.output_labels_dict: + value = mech.output_labels_dict[k] + if isinstance(value, dict): + subdicts = True + break + + if subdicts: # If there are subdicts, validate + for key in mech.output_labels_dict: + output_state = mech.output_states[key] + for proj in output_state.efferents: + if proj.receiver.name == SAMPLE: + output_state_index = mech.output_states.index(output_state) + output_state_name = output_state.name + + for i in range(len(targets)): + trial_target = targets[i] + if isinstance(trial_target, str): + if output_state_index in mech.output_labels_dict: + targets[i] = mech.output_labels_dict[output_state_index][trial_target] + elif output_state_name in mech.output_labels_dict: + targets[i] = mech.output_labels_dict[output_state_name][trial_target] + + else: + for i, stim in enumerate(targets): + # "Burrow" down to determine whether there's a number at the "bottom"; + # if so, leave as is; otherwise, check if its a string and, if so, get value for label + if isinstance(stim, (list, np.ndarray)): # format of stimuli dict is at least: [[???]...?] + for j, item in enumerate(stim): + if isinstance(item, (Number, list, np.ndarray)): # format of stimuli dict is [[int or []...?]] + continue # leave input item as is + elif isinstance(item, str): # format of stimuli dict is [[label]...] + # targets[i][j] = get_input_for_label(mech, item, stim) + targets[i][j] = get_target_for_label(mech, item) + else: + raise RunError("Unrecognized specification ({}) in stimulus {} of entry " + "for {} in targets dictionary specified for {}". + format(item, i, mech.name, obj.name)) + elif isinstance(stim, str): + targets[i] = get_target_for_label(mech, stim) + else: + raise RunError("Unrecognized specification ({}) for stimulus {} in entry " + "for {} of targets dictionary specified for {}". + format(stim, i, mech.name, obj.name)) + return target_dict def _validate_target_function(target_function, target_mechanism, sample_mechanism): generated_targets = np.atleast_1d(target_function()) diff --git a/tests/mechanisms/test_input_output_labels.py b/tests/mechanisms/test_input_output_labels.py index 17641b5b1df..461f5547ea7 100644 --- a/tests/mechanisms/test_input_output_labels.py +++ b/tests/mechanisms/test_input_output_labels.py @@ -3,7 +3,7 @@ from psyneulink.components.mechanisms.processing.processingmechanism import ProcessingMechanism from psyneulink.components.mechanisms.processing.transfermechanism import TransferMechanism -from psyneulink.globals.keywords import INPUT_LABELS_DICT, ENABLED +from psyneulink.globals.keywords import INPUT_LABELS_DICT, OUTPUT_LABELS_DICT, ENABLED from psyneulink.components.process import Process from psyneulink.components.system import System @@ -101,27 +101,97 @@ def test_dict_of_dicts(self): S.run(inputs=[['red', 'green'], ['green', 'red'], ['green', 'green']]) assert np.allclose(S.results, [[[1, 0], [1, 0]], [[0, 1], [0, 1]], [[0, 1], [1, 0]]]) -# class TestMechanismTargetLabels: -# def test_dict_of_floats(self): -# input_labels_dict_M1 = {"red": 1, -# "green": 0} -# input_labels_dict_M2 = {"red": 0, -# "green": 1} -# -# M1 = ProcessingMechanism(params={INPUT_LABELS_DICT: input_labels_dict_M1}) -# M2 = ProcessingMechanism(params={INPUT_LABELS_DICT: input_labels_dict_M2}) -# P = Process(pathway=[M1, M2], -# learning=ENABLED, -# learning_rate=0.25) -# S = System(processes=[P]) -# -# learned_matrix = [] -# def record_matrix_after_trial(): -# learned_matrix.append(M2.path_afferents[0].mod_matrix) -# S.run(inputs=['red', 'green', 'green', 'red'], -# # targets=['red', 'green', 'green', 'red'], -# call_after_trial=record_matrix_after_trial, -# targets=['red', 'green', 'green', 'red']) -# -# assert np.allclose(S.results, [[[1.]], [[0.]], [[0.]], [[0.75]]]) -# assert np.allclose(learned_matrix, [[[0.75]], [[0.75]], [[0.75]], [[0.5625]]]) +class TestMechanismTargetLabels: + def test_dict_of_floats(self): + input_labels_dict_M1 = {"red": 1, + "green": 0} + output_labels_dict_M2 = {"red": 0, + "green": 1} + + M1 = ProcessingMechanism(params={INPUT_LABELS_DICT: input_labels_dict_M1}) + M2 = ProcessingMechanism(params={OUTPUT_LABELS_DICT: output_labels_dict_M2}) + P = Process(pathway=[M1, M2], + learning=ENABLED, + learning_rate=0.25) + S = System(processes=[P]) + + learned_matrix = [] + def record_matrix_after_trial(): + learned_matrix.append(M2.path_afferents[0].mod_matrix) + S.run(inputs=['red', 'green', 'green', 'red'], + targets=['red', 'green', 'green', 'red'], + call_after_trial=record_matrix_after_trial) + + assert np.allclose(S.results, [[[1.]], [[0.]], [[0.]], [[0.75]]]) + assert np.allclose(learned_matrix, [[[0.75]], [[0.75]], [[0.75]], [[0.5625]]]) + + def test_dict_of_arrays(self): + input_labels_dict_M1 = {"red": [1, 1], + "green": [0, 0]} + output_labels_dict_M2 = {"red": [0, 0], + "green": [1, 1]} + + M1 = ProcessingMechanism(size=2, + params={INPUT_LABELS_DICT: input_labels_dict_M1}) + M2 = ProcessingMechanism(size=2, + params={OUTPUT_LABELS_DICT: output_labels_dict_M2}) + P = Process(pathway=[M1, M2], + learning=ENABLED, + learning_rate=0.25) + S = System(processes=[P]) + + learned_matrix = [] + count = [] + def record_matrix_after_trial(): + learned_matrix.append(M2.path_afferents[0].mod_matrix) + count.append(1) + + + S.run(inputs=['red', 'green', 'green', 'red'], + targets=['red', 'green', 'green', 'red'], + # inputs=[[1, 1], [0, 0], [0, 0], [1, 1]], + # targets=[[0, 0], [1, 1], [1, 1], [0, 0]], + call_after_trial=record_matrix_after_trial) + print(S.results) + print(learned_matrix) + assert np.allclose(S.results, [[[1, 1]], [[0., 0.]], [[0., 0.]], [[0.5, 0.5]]]) + assert np.allclose(learned_matrix, [np.array([[0.75, -0.25], [-0.25, 0.75]]), + np.array([[0.75, -0.25], [-0.25, 0.75]]), + np.array([[0.75, -0.25], [-0.25, 0.75]]), + np.array([[0.625, -0.375], [-0.375, 0.625]])]) + + def test_dict_of_subdicts(self): + input_labels_dict_M1 = {"red": [1, 1], + "green": [0, 0]} + output_labels_dict_M2 = {0: {"red": [0, 0], + "green": [1, 1]} + } + + M1 = ProcessingMechanism(size=2, + params={INPUT_LABELS_DICT: input_labels_dict_M1}) + M2 = ProcessingMechanism(size=2, + params={OUTPUT_LABELS_DICT: output_labels_dict_M2}) + P = Process(pathway=[M1, M2], + learning=ENABLED, + learning_rate=0.25) + S = System(processes=[P]) + + learned_matrix = [] + count = [] + def record_matrix_after_trial(): + learned_matrix.append(M2.path_afferents[0].mod_matrix) + count.append(1) + + + S.run(inputs=['red', 'green', 'green', 'red'], + targets=['red', 'green', 'green', 'red'], + # inputs=[[1, 1], [0, 0], [0, 0], [1, 1]], + # targets=[[0, 0], [1, 1], [1, 1], [0, 0]], + call_after_trial=record_matrix_after_trial) + print(S.results) + print(learned_matrix) + assert np.allclose(S.results, [[[1, 1]], [[0., 0.]], [[0., 0.]], [[0.5, 0.5]]]) + assert np.allclose(learned_matrix, [np.array([[0.75, -0.25], [-0.25, 0.75]]), + np.array([[0.75, -0.25], [-0.25, 0.75]]), + np.array([[0.75, -0.25], [-0.25, 0.75]]), + np.array([[0.625, -0.375], [-0.375, 0.625]])]) From 7bc9b7e171a8df5fbcf4f9b8c9c56b6e7365e3ea Mon Sep 17 00:00:00 2001 From: KristenManning Date: Mon, 16 Apr 2018 13:43:11 -0400 Subject: [PATCH 013/200] removing case from _parse_input_labels that rejected valid numeric input specification (_parse_input_labels should translate strings, but ignore numeric values) --- psyneulink/globals/environment.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/psyneulink/globals/environment.py b/psyneulink/globals/environment.py index ce679a9a522..54f72f43fc8 100644 --- a/psyneulink/globals/environment.py +++ b/psyneulink/globals/environment.py @@ -1082,10 +1082,7 @@ def get_input_for_label(mech, key): format(item, i, mech.name, obj.name)) elif isinstance(stim, str): inputs[i] = get_input_for_label(mech, stim) - else: - raise RunError("Unrecognized specification ({}) for stimulus {} in entry " - "for {} of inputs dictionary specified for {}". - format(stim, i, mech.name, obj.name)) + return stimuli def _parse_target_labels(obj, target_dict, mechanisms_to_parse): From fe75b16f4b335622284cef102a2bde4cbf451660 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Mon, 16 Apr 2018 13:47:33 -0400 Subject: [PATCH 014/200] adding pytests for input arrays that include both string labels and values --- tests/mechanisms/test_input_output_labels.py | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/tests/mechanisms/test_input_output_labels.py b/tests/mechanisms/test_input_output_labels.py index 461f5547ea7..17269cc969c 100644 --- a/tests/mechanisms/test_input_output_labels.py +++ b/tests/mechanisms/test_input_output_labels.py @@ -18,6 +18,10 @@ def test_dict_of_floats(self): S.run(inputs=['red', 'green', 'green', 'red']) assert np.allclose(S.results, [[[1.]], [[0.]], [[0.]], [[1.]]]) + S.run(inputs=[1, 'green', 0, 'red']) + assert np.allclose(S.results, [[[1.]], [[0.]], [[0.]], [[1.]], [[1.]], [[0.]], [[0.]], [[1.]]]) + + def test_dict_of_arrays(self): input_labels_dict = {"red": [1, 0, 0], "green": [0, 1, 0], @@ -53,6 +57,11 @@ def call_after_trial(): assert np.allclose(M_output, [[[0], [1]], [[1], [0]]]) + S.run(inputs=[[[0], 'green'], [[1], 'red']], + call_after_trial=call_after_trial) + + assert np.allclose(M_output, [[[0], [1]], [[1], [0]], [[0], [1]], [[1], [0]]]) + def test_dict_of_2d_arrays(self): input_labels_dict = {"red": [[1, 0], [1, 0]], "green": [[0, 1], [0, 1]], @@ -101,6 +110,9 @@ def test_dict_of_dicts(self): S.run(inputs=[['red', 'green'], ['green', 'red'], ['green', 'green']]) assert np.allclose(S.results, [[[1, 0], [1, 0]], [[0, 1], [0, 1]], [[0, 1], [1, 0]]]) + S.run(inputs=[['red', [1, 0]], ['green', 'red'], [[0,1], 'green']]) + assert np.allclose(S.results, [[[1, 0], [1, 0]], [[0, 1], [0, 1]], [[0, 1], [1, 0]], [[1, 0], [1, 0]], [[0, 1], [0, 1]], [[0, 1], [1, 0]]]) + class TestMechanismTargetLabels: def test_dict_of_floats(self): input_labels_dict_M1 = {"red": 1, @@ -149,11 +161,7 @@ def record_matrix_after_trial(): S.run(inputs=['red', 'green', 'green', 'red'], targets=['red', 'green', 'green', 'red'], - # inputs=[[1, 1], [0, 0], [0, 0], [1, 1]], - # targets=[[0, 0], [1, 1], [1, 1], [0, 0]], call_after_trial=record_matrix_after_trial) - print(S.results) - print(learned_matrix) assert np.allclose(S.results, [[[1, 1]], [[0., 0.]], [[0., 0.]], [[0.5, 0.5]]]) assert np.allclose(learned_matrix, [np.array([[0.75, -0.25], [-0.25, 0.75]]), np.array([[0.75, -0.25], [-0.25, 0.75]]), @@ -185,8 +193,6 @@ def record_matrix_after_trial(): S.run(inputs=['red', 'green', 'green', 'red'], targets=['red', 'green', 'green', 'red'], - # inputs=[[1, 1], [0, 0], [0, 0], [1, 1]], - # targets=[[0, 0], [1, 1], [1, 1], [0, 0]], call_after_trial=record_matrix_after_trial) print(S.results) print(learned_matrix) From 2bb1b783237258e1041da98e10ad6403de94969e Mon Sep 17 00:00:00 2001 From: KristenManning Date: Tue, 17 Apr 2018 15:44:31 -0400 Subject: [PATCH 015/200] adding tests for mechanism output labels --- tests/mechanisms/test_input_output_labels.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/tests/mechanisms/test_input_output_labels.py b/tests/mechanisms/test_input_output_labels.py index 17269cc969c..be5fc41120b 100644 --- a/tests/mechanisms/test_input_output_labels.py +++ b/tests/mechanisms/test_input_output_labels.py @@ -201,3 +201,22 @@ def record_matrix_after_trial(): np.array([[0.75, -0.25], [-0.25, 0.75]]), np.array([[0.75, -0.25], [-0.25, 0.75]]), np.array([[0.625, -0.375], [-0.375, 0.625]])]) + +class TestMechanismOutputLabels: + def test_dict_of_floats(self): + input_labels_dict = {"red": 1, + "green": 0} + output_labels_dict = {"red": 1, + "green":0} + M = ProcessingMechanism(params={INPUT_LABELS_DICT: input_labels_dict, + OUTPUT_LABELS_DICT: output_labels_dict}) + P = Process(pathway=[M]) + S = System(processes=[P]) + + S.run(inputs=['red', 'green', 'green', 'red']) + assert np.allclose(S.results, [[[1.]], [[0.]], [[0.]], [[1.]]]) + + S.run(inputs=[1, 'green', 0, 'red']) + assert np.allclose(S.results, [[[1.]], [[0.]], [[0.]], [[1.]], [[1.]], [[0.]], [[0.]], [[1.]]]) + print(M.output_labels) + # S.show_graph(show_values=True) From ecd25473acd61ad537c75270c7075a950bb6a16c Mon Sep 17 00:00:00 2001 From: KristenManning Date: Thu, 19 Apr 2018 17:14:44 -0400 Subject: [PATCH 016/200] fixing various small bugs /outdated code related to composition in the process of getting composition working on the devel branch --- psyneulink/compositions/composition.py | 52 ++++++++++++----------- psyneulink/globals/keywords.py | 2 + tests/composition/test_composition.py | 59 +++++++++----------------- 3 files changed, 50 insertions(+), 63 deletions(-) diff --git a/psyneulink/compositions/composition.py b/psyneulink/compositions/composition.py index 9164366addd..b31b11ee8f2 100644 --- a/psyneulink/compositions/composition.py +++ b/psyneulink/compositions/composition.py @@ -58,7 +58,7 @@ from psyneulink.components.projections.pathway.mappingprojection import MappingProjection from psyneulink.components.states.outputstate import OutputState from psyneulink.components.shellclasses import Mechanism, Projection -from psyneulink.globals.keywords import SYSTEM, EXECUTING, SOFT_CLAMP, IDENTITY_MATRIX +from psyneulink.globals.keywords import SYSTEM, EXECUTING, SOFT_CLAMP, HARD_CLAMP, PULSE_CLAMP, NO_CLAMP, IDENTITY_MATRIX from psyneulink.scheduling.scheduler import Scheduler from psyneulink.scheduling.time import TimeScale @@ -155,6 +155,13 @@ def __init__(self, error_value): def __str__(self): return repr(self.error_value) +class RunError(Exception): + + def __init__(self, error_value): + self.error_value = error_value + + def __str__(self): + return repr(self.error_value) class Vertex(object): ''' @@ -968,15 +975,15 @@ def execute( for next_execution_set in execution_scheduler.run(termination_conds=termination_processing, execution_id=execution_id): if call_after_pass: - if next_pass_after == execution_scheduler.times[execution_id][TimeScale.TRIAL][TimeScale.PASS]: - logger.debug('next_pass_after {0}\tscheduler pass {1}'.format(next_pass_after, execution_scheduler.times[execution_id][TimeScale.TRIAL][TimeScale.PASS])) + if next_pass_after == execution_scheduler.clocks[execution_id].get_total_times_relative(TimeScale.PASS, TimeScale.TRIAL): + logger.debug('next_pass_after {0}\tscheduler pass {1}'.format(next_pass_after, execution_scheduler.clocks[execution_id].get_total_times_relative(TimeScale.PASS, TimeScale.TRIAL))) call_after_pass() next_pass_after += 1 if call_before_pass: - if next_pass_before == execution_scheduler.times[execution_id][TimeScale.TRIAL][TimeScale.PASS]: + if next_pass_before == execution_scheduler.clocks[execution_id].get_total_times_relative(TimeScale.PASS, TimeScale.TRIAL): call_before_pass() - logger.debug('next_pass_before {0}\tscheduler pass {1}'.format(next_pass_before, execution_scheduler.times[execution_id][TimeScale.TRIAL][TimeScale.PASS])) + logger.debug('next_pass_before {0}\tscheduler pass {1}'.format(next_pass_before, execution_scheduler.clocks[execution_id].get_total_times_relative(TimeScale.PASS, TimeScale.TRIAL))) next_pass_before += 1 if call_before_time_step: @@ -1006,13 +1013,8 @@ def execute( if isinstance(mechanism, Mechanism): current_context = EXECUTING + "composition " - # if isinstance(mechanism, LearningMechanism) or isinstance(mechanism, ComparatorMechanism): - # current_context += "LEARNING " - if any(isinstance(m, LearningMechanism) for m in self.mechanisms): - current_context += " LEARNING " num = mechanism.execute(context=current_context) - if mechanism in origin_mechanisms: if clamp_input: if mechanism in pulse_clamp_inputs: @@ -1117,6 +1119,7 @@ def run( # ------------------------------------ FROM DEVEL START ------------------------------------ origin_mechanisms = self.get_mechanisms_by_role(MechanismRole.ORIGIN) + # if there is only one origin mechanism, allow inputs to be specified in a list if isinstance(inputs, (list, np.ndarray)): if len(origin_mechanisms) == 1: @@ -1125,6 +1128,7 @@ def run( raise CompositionError("Inputs to {} must be specified in a dictionary with a key for each of its {} origin " "mechanisms.".format(self.name, len(origin_mechanisms))) elif not isinstance(inputs, dict): + print(inputs) if len(origin_mechanisms) == 1: raise CompositionError( "Inputs to {} must be specified in a list or in a dictionary with the origin mechanism({}) " @@ -1133,7 +1137,7 @@ def run( raise CompositionError("Inputs to {} must be specified in a dictionary with a key for each of its {} origin " "mechanisms.".format(self.name, len(origin_mechanisms))) - inputs, num_inputs_sets = self._adjust_stimulus_dict(self, inputs) + inputs, num_inputs_sets = self._adjust_stimulus_dict(inputs) if num_trials is not None: num_trials = num_trials @@ -1209,18 +1213,18 @@ def run( # self.target = execution_targets # self.current_targets = execution_targets - # execute learning + # TBI execute learning # pass along the targets for this trial - self.learning_composition.execute(execution_targets, - scheduler_processing, - scheduler_learning, - call_before_time_step, - call_before_pass, - call_after_time_step, - call_after_pass, - execution_id, - clamp_input, - ) + # self.learning_composition.execute(execution_targets, + # scheduler_processing, + # scheduler_learning, + # call_before_time_step, + # call_before_pass, + # call_after_time_step, + # call_after_pass, + # execution_id, + # clamp_input, + # ) if call_after_trial: call_after_trial() @@ -1250,7 +1254,7 @@ def _adjust_stimulus_dict(self, stimuli): # Check that all of the mechanisms listed in the inputs dict are ORIGIN mechanisms in the self origin_mechanisms = self.get_mechanisms_by_role(MechanismRole.ORIGIN) for mech in stimuli.keys(): - if not mech in origin_mechanisms.mechanisms: + if not mech in origin_mechanisms: raise CompositionError("{} in inputs dict for {} is not one of its ORIGIN mechanisms". format(mech.name, self.name)) # Check that all of the ORIGIN mechanisms in the self are represented by entries in the inputs dict @@ -1293,7 +1297,7 @@ def _adjust_stimulus_dict(self, stimuli): else: adjusted_stimuli[mech] = [] for stim in stimuli[mech]: - check_spec_type = _input_matches_variable(stim, mech.instance_defaults.variable) + check_spec_type = self._input_matches_variable(stim, mech.instance_defaults.variable) # loop over each input to verify that it matches variable if check_spec_type == False: err_msg = "Input stimulus ({}) for {} is incompatible with its variable ({}).".\ diff --git a/psyneulink/globals/keywords.py b/psyneulink/globals/keywords.py index c20f19bebf4..dd168da0707 100644 --- a/psyneulink/globals/keywords.py +++ b/psyneulink/globals/keywords.py @@ -592,6 +592,8 @@ def _is_metric(metric): CLAMP_INPUT = "clamp_input" SOFT_CLAMP = "soft_clamp" HARD_CLAMP = "hard_clamp" +PULSE_CLAMP = "pulse_clamp" +NO_CLAMP = "no_clamp" LEARNING = 'LEARNING' LEARNING_RATE = "learning_rate" CONTROL = 'CONTROL' diff --git a/tests/composition/test_composition.py b/tests/composition/test_composition.py index 347428d6350..a12d6894e1f 100644 --- a/tests/composition/test_composition.py +++ b/tests/composition/test_composition.py @@ -734,19 +734,19 @@ def test_cycle_x_multiple_incoming(self): class TestRun: - def test_run_2_mechanisms_default_input_1(self): - comp = Composition() - A = IntegratorMechanism(default_variable=1.0, function=Linear(slope=5.0)) - B = TransferMechanism(function=Linear(slope=5.0)) - comp.add_mechanism(A) - comp.add_mechanism(B) - comp.add_projection(A, MappingProjection(sender=A, receiver=B), B) - comp._analyze_graph() - sched = Scheduler(composition=comp) - output = comp.run( - scheduler_processing=sched - ) - assert 25 == output[0][0] + # def test_run_2_mechanisms_default_input_1(self): + # comp = Composition() + # A = IntegratorMechanism(default_variable=1.0, function=Linear(slope=5.0)) + # B = TransferMechanism(function=Linear(slope=5.0)) + # comp.add_mechanism(A) + # comp.add_mechanism(B) + # comp.add_projection(A, MappingProjection(sender=A, receiver=B), B) + # comp._analyze_graph() + # sched = Scheduler(composition=comp) + # output = comp.run( + # scheduler_processing=sched + # ) + # assert 25 == output[0][0] def test_run_2_mechanisms_input_5(self): comp = Composition() @@ -778,8 +778,7 @@ def test_projection_assignment_mistake_swap(self): comp.add_projection(A, MappingProjection(sender=A, receiver=C), C) with pytest.raises(CompositionError) as error_text: comp.add_projection(B, MappingProjection(sender=B, receiver=D), C) - - assert "is incompatible with the positions of these components in their composition" in str(error_text.value) + assert "is incompatible with the positions of these Components in their Composition" in str(error_text.value) def test_projection_assignment_mistake_swap2(self): # A ----> C -- @@ -800,7 +799,7 @@ def test_projection_assignment_mistake_swap2(self): with pytest.raises(CompositionError) as error_text: comp.add_projection(B, MappingProjection(sender=B, receiver=C), D) - assert "is incompatible with the positions of these components in their composition" in str(error_text.value) + assert "is incompatible with the positions of these Components in their Composition" in str(error_text.value) def test_run_5_mechanisms_2_origins_1_terminal(self): # A ----> C -- @@ -932,24 +931,6 @@ def test_run_2_mechanisms_reuse_input(self): ) assert 125 == output[0][0] - def test_run_2_mechanisms_incorrect_trial_spec(self): - comp = Composition() - A = IntegratorMechanism(default_variable=1.0, function=Linear(slope=5.0)) - B = TransferMechanism(function=Linear(slope=5.0)) - comp.add_mechanism(A) - comp.add_mechanism(B) - comp.add_projection(A, MappingProjection(sender=A, receiver=B), B) - comp._analyze_graph() - inputs_dict = {A: [[5], [4], [3]]} - sched = Scheduler(composition=comp) - with pytest.raises(CompositionError) as error_text: - comp.run( - inputs=inputs_dict, - scheduler_processing=sched, - num_trials=5 - ) - assert "number of trials" in str(error_text.value) and "does not match the length" in str(error_text.value) - def test_run_2_mechanisms_double_trial_specs(self): comp = Composition() A = IntegratorMechanism(default_variable=1.0, function=Linear(slope=5.0)) @@ -1044,7 +1025,7 @@ def test_LPP_two_projections_in_a_row(self): with pytest.raises(CompositionError) as error_text: comp.add_linear_processing_pathway([A, B_to_C, A_to_B, B, C]) - assert "A projection in a linear processing pathway must be preceded by a Mechanism and followed by a " \ + assert "A Projection in a linear processing pathway must be preceded by a Mechanism and followed by a " \ "Mechanism" \ in str(error_text.value) @@ -1067,7 +1048,7 @@ def test_LPP_wrong_component(self): with pytest.raises(CompositionError) as error_text: comp.add_linear_processing_pathway([A, Nonsense, B]) - assert "A linear processing pathway must be made up of projections and mechanisms." in str( + assert "A linear processing pathway must be made up of Projections and Mechanisms." in str( error_text.value) def test_LPP_two_origins_one_terminal(self): @@ -1141,7 +1122,7 @@ def record_trial(): comp.run( inputs=inputs_dict, scheduler_processing=sched, - call_before_time_step=cb_timestep(sched, time_step_array), + call_after_time_step=cb_timestep(sched, time_step_array), call_before_trial=cb_trial(sched, trial_array), call_before_pass=cb_pass(sched, pass_array) ) @@ -1161,7 +1142,7 @@ def record_values(d, time_scale, *mechs): if mech.value is None: d[time_scale][mech].append(np.nan) else: - d[time_scale][mech].append(mech.value) + d[time_scale][mech].append(mech.value[0]) comp = Composition() @@ -1243,7 +1224,7 @@ def record_values(d, time_scale, *mechs): if mech.value is None: d[time_scale][mech].append(np.nan) else: - d[time_scale][mech].append(mech.value) + d[time_scale][mech].append(mech.value[0]) comp = Composition() From 3987d2565ea06bd6276a238f55920f81c4cae6c4 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Tue, 24 Apr 2018 16:44:19 -0400 Subject: [PATCH 017/200] setting new context object to 'processing' on each mechanism that is called by composition's run method - resolves bug where variable = 0 everywhere --- psyneulink/compositions/composition.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/psyneulink/compositions/composition.py b/psyneulink/compositions/composition.py index f5b9b1033ae..a9f9cafde5c 100644 --- a/psyneulink/compositions/composition.py +++ b/psyneulink/compositions/composition.py @@ -60,6 +60,7 @@ from psyneulink.globals.keywords import SYSTEM, EXECUTING, SOFT_CLAMP, HARD_CLAMP, PULSE_CLAMP, NO_CLAMP, IDENTITY_MATRIX from psyneulink.scheduling.scheduler import Scheduler from psyneulink.scheduling.time import TimeScale +from psyneulink.globals.context import ContextFlags, ContextStatus __all__ = [ 'Composition', 'CompositionError', 'MechanismRole', @@ -1012,6 +1013,8 @@ def execute( if isinstance(mechanism, Mechanism): current_context = EXECUTING + "composition " + mechanism.context.execution_phase = ContextFlags.PROCESSING + num = mechanism.execute(context=EXECUTING + "composition") num = mechanism.execute(context=current_context) if mechanism in origin_mechanisms: From d585ff1b16db4e9cef5d7ac2cca86a24034633c1 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Tue, 24 Apr 2018 16:51:04 -0400 Subject: [PATCH 018/200] another context fix during composition run method (setting deferred init for initialization_status on projection) --- psyneulink/compositions/composition.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/psyneulink/compositions/composition.py b/psyneulink/compositions/composition.py index a9f9cafde5c..ed2988b5bbb 100644 --- a/psyneulink/compositions/composition.py +++ b/psyneulink/compositions/composition.py @@ -573,6 +573,7 @@ def _validate_projection(self, sender, projection, receiver): # assign them based on the sender and receiver passed into add_projection() projection.init_args['sender'] = sender projection.init_args['receiver'] = receiver + projection.context.initialization_status = ContextFlags.DEFERRED_INIT projection._deferred_init(context=" INITIALIZING ") if projection.sender.owner != sender: @@ -1014,7 +1015,6 @@ def execute( if isinstance(mechanism, Mechanism): current_context = EXECUTING + "composition " mechanism.context.execution_phase = ContextFlags.PROCESSING - num = mechanism.execute(context=EXECUTING + "composition") num = mechanism.execute(context=current_context) if mechanism in origin_mechanisms: From 3aefa1273ff40dfbc6b13209c0a626d3364408f2 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Tue, 24 Apr 2018 17:22:51 -0400 Subject: [PATCH 019/200] fixing broken pytest: need execution_id in order to look up correct time values from scheduler --- tests/composition/test_composition.py | 35 +++++++++++++++------------ 1 file changed, 19 insertions(+), 16 deletions(-) diff --git a/tests/composition/test_composition.py b/tests/composition/test_composition.py index a12d6894e1f..fdd0758f775 100644 --- a/tests/composition/test_composition.py +++ b/tests/composition/test_composition.py @@ -1081,14 +1081,27 @@ def test_LPP_two_origins_one_terminal(self): class TestCallBeforeAfterTimescale: def test_call_before_record_timescale(self): + + comp = Composition() + + A = TransferMechanism(name="A [transfer]", function=Linear(slope=2.0)) + B = TransferMechanism(name="B [transfer]", function=Linear(slope=5.0)) + comp.add_mechanism(A) + comp.add_mechanism(B) + comp.add_projection(A, MappingProjection(sender=A, receiver=B), B) + comp._analyze_graph() + inputs_dict = {A: [1, 2, 3, 4]} + sched = Scheduler(composition=comp) + time_step_array = [] trial_array = [] pass_array = [] def cb_timestep(scheduler, arr): - def record_timestep(): - arr.append(scheduler.clock.get_total_times_relative(TimeScale.TIME_STEP, TimeScale.TRIAL)) + def record_timestep(): + + arr.append(scheduler.clocks[comp._execution_id].get_total_times_relative(TimeScale.TIME_STEP, TimeScale.TRIAL)) return record_timestep @@ -1096,7 +1109,7 @@ def cb_pass(scheduler, arr): def record_pass(): - arr.append(scheduler.clock.get_total_times_relative(TimeScale.PASS, TimeScale.RUN)) + arr.append(scheduler.clocks[comp._execution_id].get_total_times_relative(TimeScale.PASS, TimeScale.RUN)) return record_pass @@ -1104,21 +1117,10 @@ def cb_trial(scheduler, arr): def record_trial(): - arr.append(scheduler.clock.get_total_times_relative(TimeScale.TRIAL, TimeScale.LIFE)) + arr.append(scheduler.clocks[comp._execution_id].get_total_times_relative(TimeScale.TRIAL, TimeScale.LIFE)) return record_trial - comp = Composition() - - A = TransferMechanism(name="A [transfer]", function=Linear(slope=2.0)) - B = TransferMechanism(name="B [transfer]", function=Linear(slope=5.0)) - comp.add_mechanism(A) - comp.add_mechanism(B) - comp.add_projection(A, MappingProjection(sender=A, receiver=B), B) - comp._analyze_graph() - inputs_dict = {A: [1, 2, 3, 4]} - sched = Scheduler(composition=comp) - comp.run( inputs=inputs_dict, scheduler_processing=sched, @@ -1126,7 +1128,8 @@ def record_trial(): call_before_trial=cb_trial(sched, trial_array), call_before_pass=cb_pass(sched, pass_array) ) - + from pprint import pprint + pprint(comp.scheduler_processing.clocks) assert time_step_array == [0, 1, 0, 1, 0, 1, 0, 1] assert trial_array == [0, 1, 2, 3] assert pass_array == [0, 1, 2, 3] From 74ea34fc69fc2e24da8f5589e4f4cfb94c993ea5 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Tue, 24 Apr 2018 17:23:32 -0400 Subject: [PATCH 020/200] fixing broken pytest: need execution_id in order to look up correct time values from scheduler --- tests/composition/test_composition.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/tests/composition/test_composition.py b/tests/composition/test_composition.py index fdd0758f775..c515f7e9cad 100644 --- a/tests/composition/test_composition.py +++ b/tests/composition/test_composition.py @@ -1100,7 +1100,7 @@ def test_call_before_record_timescale(self): def cb_timestep(scheduler, arr): def record_timestep(): - + arr.append(scheduler.clocks[comp._execution_id].get_total_times_relative(TimeScale.TIME_STEP, TimeScale.TRIAL)) return record_timestep @@ -1128,8 +1128,6 @@ def record_trial(): call_before_trial=cb_trial(sched, trial_array), call_before_pass=cb_pass(sched, pass_array) ) - from pprint import pprint - pprint(comp.scheduler_processing.clocks) assert time_step_array == [0, 1, 0, 1, 0, 1, 0, 1] assert trial_array == [0, 1, 2, 3] assert pass_array == [0, 1, 2, 3] From 181f4084d2444b515c325885514274ee856f6afa Mon Sep 17 00:00:00 2001 From: KristenManning Date: Tue, 24 Apr 2018 17:48:00 -0400 Subject: [PATCH 021/200] renaming pytest mechanisms more specifically in order to avoid name registry problems (in doctests) --- tests/composition/test_composition.py | 238 +++++++++++++------------- tests/scheduling/test_scheduler.py | 170 +++++++++--------- 2 files changed, 204 insertions(+), 204 deletions(-) diff --git a/tests/composition/test_composition.py b/tests/composition/test_composition.py index c515f7e9cad..70c25e3f2a1 100644 --- a/tests/composition/test_composition.py +++ b/tests/composition/test_composition.py @@ -109,16 +109,16 @@ class TestAddProjection: def test_add_once(self): comp = Composition() - A = TransferMechanism(name='A') - B = TransferMechanism(name='B') + A = TransferMechanism(name='composition-pytests-A') + B = TransferMechanism(name='composition-pytests-B') comp.add_mechanism(A) comp.add_mechanism(B) comp.add_projection(A, MappingProjection(), B) def test_add_twice(self): comp = Composition() - A = TransferMechanism(name='A') - B = TransferMechanism(name='B') + A = TransferMechanism(name='composition-pytests-A') + B = TransferMechanism(name='composition-pytests-B') comp.add_mechanism(A) comp.add_mechanism(B) comp.add_projection(A, MappingProjection(), B) @@ -126,8 +126,8 @@ def test_add_twice(self): def test_add_same_twice(self): comp = Composition() - A = TransferMechanism(name='A') - B = TransferMechanism(name='B') + A = TransferMechanism(name='composition-pytests-A') + B = TransferMechanism(name='composition-pytests-B') comp.add_mechanism(A) comp.add_mechanism(B) proj = MappingProjection() @@ -149,8 +149,8 @@ def test_timing_stress(self, count): from psyneulink.compositions.composition import Composition comp = Composition() -A = TransferMechanism(name='A') -B = TransferMechanism(name='B') +A = TransferMechanism(name='composition-pytests-A') +B = TransferMechanism(name='composition-pytests-B') comp.add_mechanism(A) comp.add_mechanism(B) ''', @@ -172,8 +172,8 @@ def test_timing_stress(self, count): from psyneulink.components.projections.pathway.mappingprojection import MappingProjection from psyneulink.composition import Composition comp = Composition() -A = TransferMechanism(name='A') -B = TransferMechanism(name='B') +A = TransferMechanism(name='composition-pytests-A') +B = TransferMechanism(name='composition-pytests-B') comp.add_mechanism(A) comp.add_mechanism(B) ''', @@ -192,7 +192,7 @@ def test_empty_call(self): def test_singleton(self): comp = Composition() - A = TransferMechanism(name='A') + A = TransferMechanism(name='composition-pytests-A') comp.add_mechanism(A) comp._analyze_graph() assert A in comp.get_mechanisms_by_role(MechanismRole.ORIGIN) @@ -200,8 +200,8 @@ def test_singleton(self): def test_two_independent(self): comp = Composition() - A = TransferMechanism(name='A') - B = TransferMechanism(name='B') + A = TransferMechanism(name='composition-pytests-A') + B = TransferMechanism(name='composition-pytests-B') comp.add_mechanism(A) comp.add_mechanism(B) comp._analyze_graph() @@ -212,8 +212,8 @@ def test_two_independent(self): def test_two_in_a_row(self): comp = Composition() - A = TransferMechanism(name='A') - B = TransferMechanism(name='B') + A = TransferMechanism(name='composition-pytests-A') + B = TransferMechanism(name='composition-pytests-B') comp.add_mechanism(A) comp.add_mechanism(B) comp.add_projection(A, MappingProjection(), B) @@ -226,8 +226,8 @@ def test_two_in_a_row(self): # (A)<->(B) def test_two_recursive(self): comp = Composition() - A = TransferMechanism(name='A') - B = TransferMechanism(name='B') + A = TransferMechanism(name='composition-pytests-A') + B = TransferMechanism(name='composition-pytests-B') comp.add_mechanism(A) comp.add_mechanism(B) comp.add_projection(A, MappingProjection(), B) @@ -243,10 +243,10 @@ def test_two_recursive(self): # (A)->(B)<->(C)<-(D) def test_two_origins_pointing_to_recursive_pair(self): comp = Composition() - A = TransferMechanism(name='A') - B = TransferMechanism(name='B') - C = TransferMechanism(name='C') - D = TransferMechanism(name='D') + A = TransferMechanism(name='composition-pytests-A') + B = TransferMechanism(name='composition-pytests-B') + C = TransferMechanism(name='composition-pytests-C') + D = TransferMechanism(name='composition-pytests-D') comp.add_mechanism(A) comp.add_mechanism(B) comp.add_mechanism(C) @@ -265,8 +265,8 @@ class TestValidateFeedDict: def test_empty_feed_dicts(self): comp = Composition() - A = TransferMechanism(name='A') - B = TransferMechanism(name='B') + A = TransferMechanism(name='composition-pytests-A') + B = TransferMechanism(name='composition-pytests-B') comp.add_mechanism(A) comp.add_mechanism(B) comp.add_projection(A, MappingProjection(), B) @@ -278,8 +278,8 @@ def test_empty_feed_dicts(self): def test_origin_and_terminal_with_mapping(self): comp = Composition() - A = TransferMechanism(name='A') - B = TransferMechanism(name='B') + A = TransferMechanism(name='composition-pytests-A') + B = TransferMechanism(name='composition-pytests-B') comp.add_mechanism(A) comp.add_mechanism(B) comp.add_projection(A, MappingProjection(), B) @@ -291,8 +291,8 @@ def test_origin_and_terminal_with_mapping(self): def test_origin_and_terminal_with_swapped_feed_dicts_1(self): comp = Composition() - A = TransferMechanism(name='A') - B = TransferMechanism(name='B') + A = TransferMechanism(name='composition-pytests-A') + B = TransferMechanism(name='composition-pytests-B') comp.add_mechanism(A) comp.add_mechanism(B) comp.add_projection(A, MappingProjection(), B) @@ -304,8 +304,8 @@ def test_origin_and_terminal_with_swapped_feed_dicts_1(self): def test_origin_and_terminal_with_swapped_feed_dicts_2(self): comp = Composition() - A = TransferMechanism(name='A') - B = TransferMechanism(name='B') + A = TransferMechanism(name='composition-pytests-A') + B = TransferMechanism(name='composition-pytests-B') comp.add_mechanism(A) comp.add_mechanism(B) comp.add_projection(A, MappingProjection(), B) @@ -317,9 +317,9 @@ def test_origin_and_terminal_with_swapped_feed_dicts_2(self): def test_multiple_origin_mechs(self): comp = Composition() - A = TransferMechanism(name='A') - B = TransferMechanism(name='B') - C = TransferMechanism(name='C') + A = TransferMechanism(name='composition-pytests-A') + B = TransferMechanism(name='composition-pytests-B') + C = TransferMechanism(name='composition-pytests-C') comp.add_mechanism(A) comp.add_mechanism(B) comp.add_mechanism(C) @@ -333,9 +333,9 @@ def test_multiple_origin_mechs(self): def test_multiple_origin_mechs_only_one_in_feed_dict(self): comp = Composition() - A = TransferMechanism(name='A') - B = TransferMechanism(name='B') - C = TransferMechanism(name='C') + A = TransferMechanism(name='composition-pytests-A') + B = TransferMechanism(name='composition-pytests-B') + C = TransferMechanism(name='composition-pytests-C') comp.add_mechanism(A) comp.add_mechanism(B) comp.add_mechanism(C) @@ -349,8 +349,8 @@ def test_multiple_origin_mechs_only_one_in_feed_dict(self): def test_input_state_len_3(self): comp = Composition() - A = TransferMechanism(default_variable=[0, 1, 2], name='A') - B = TransferMechanism(default_variable=[0, 1, 2], name='B') + A = TransferMechanism(default_variable=[0, 1, 2], name='composition-pytests-A') + B = TransferMechanism(default_variable=[0, 1, 2], name='composition-pytests-B') comp.add_mechanism(A) comp.add_mechanism(B) comp.add_projection(A, MappingProjection(), B) @@ -362,8 +362,8 @@ def test_input_state_len_3(self): def test_input_state_len_3_feed_dict_len_2(self): comp = Composition() - A = TransferMechanism(default_variable=[0, 1, 2], name='A') - B = TransferMechanism(default_variable=[0, 1, 2], name='B') + A = TransferMechanism(default_variable=[0, 1, 2], name='composition-pytests-A') + B = TransferMechanism(default_variable=[0, 1, 2], name='composition-pytests-B') comp.add_mechanism(A) comp.add_mechanism(B) comp.add_projection(A, MappingProjection(), B) @@ -375,8 +375,8 @@ def test_input_state_len_3_feed_dict_len_2(self): def test_input_state_len_2_feed_dict_len_3(self): comp = Composition() - A = TransferMechanism(default_variable=[0, 1], name='A') - B = TransferMechanism(default_variable=[0, 1], name='B') + A = TransferMechanism(default_variable=[0, 1], name='composition-pytests-A') + B = TransferMechanism(default_variable=[0, 1], name='composition-pytests-B') comp.add_mechanism(A) comp.add_mechanism(B) comp.add_projection(A, MappingProjection(), B) @@ -388,8 +388,8 @@ def test_input_state_len_2_feed_dict_len_3(self): def test_feed_dict_includes_mechs_of_correct_and_incorrect_types(self): comp = Composition() - A = TransferMechanism(default_variable=[0], name='A') - B = TransferMechanism(default_variable=[0], name='B') + A = TransferMechanism(default_variable=[0], name='composition-pytests-A') + B = TransferMechanism(default_variable=[0], name='composition-pytests-B') comp.add_mechanism(A) comp.add_mechanism(B) comp.add_projection(A, MappingProjection(), B) @@ -400,8 +400,8 @@ def test_feed_dict_includes_mechs_of_correct_and_incorrect_types(self): def test_input_state_len_3_brackets_extra_1(self): comp = Composition() - A = TransferMechanism(default_variable=[0, 1, 2], name='A') - B = TransferMechanism(default_variable=[0, 1, 2], name='B') + A = TransferMechanism(default_variable=[0, 1, 2], name='composition-pytests-A') + B = TransferMechanism(default_variable=[0, 1, 2], name='composition-pytests-B') comp.add_mechanism(A) comp.add_mechanism(B) comp.add_projection(A, MappingProjection(), B) @@ -413,8 +413,8 @@ def test_input_state_len_3_brackets_extra_1(self): def test_input_state_len_3_brackets_missing_1(self): comp = Composition() - A = TransferMechanism(default_variable=[0, 1, 2], name='A') - B = TransferMechanism(default_variable=[0, 1, 2], name='B') + A = TransferMechanism(default_variable=[0, 1, 2], name='composition-pytests-A') + B = TransferMechanism(default_variable=[0, 1, 2], name='composition-pytests-B') comp.add_mechanism(A) comp.add_mechanism(B) comp.add_projection(A, MappingProjection(), B) @@ -426,8 +426,8 @@ def test_input_state_len_3_brackets_missing_1(self): def test_empty_feed_dict_for_empty_type(self): comp = Composition() - A = TransferMechanism(default_variable=[0], name='A') - B = TransferMechanism(default_variable=[0], name='B') + A = TransferMechanism(default_variable=[0], name='composition-pytests-A') + B = TransferMechanism(default_variable=[0], name='composition-pytests-B') comp.add_mechanism(A) comp.add_mechanism(B) comp.add_projection(A, MappingProjection(), B) @@ -439,7 +439,7 @@ def test_empty_feed_dict_for_empty_type(self): def test_mech_in_feed_dict_for_empty_type(self): comp = Composition() A = TransferMechanism(default_variable=[0]) - B = TransferMechanism(name='B') + B = TransferMechanism(name='composition-pytests-B') comp.add_mechanism(A) comp.add_mechanism(B) comp.add_projection(A, MappingProjection(), B) @@ -469,8 +469,8 @@ def test_one_mech_2(self): def test_multiple_time_steps_1(self): comp = Composition() - A = TransferMechanism(default_variable=[[0, 1, 2]], name='A') - B = TransferMechanism(default_variable=[[0, 1, 2]], name='B') + A = TransferMechanism(default_variable=[[0, 1, 2]], name='composition-pytests-A') + B = TransferMechanism(default_variable=[[0, 1, 2]], name='composition-pytests-B') comp.add_mechanism(A) comp.add_mechanism(B) comp.add_projection(A, MappingProjection(), B) @@ -482,8 +482,8 @@ def test_multiple_time_steps_1(self): def test_multiple_time_steps_2(self): comp = Composition() - A = TransferMechanism(default_variable=[[0, 1, 2]], name='A') - B = TransferMechanism(default_variable=[[0, 1, 2]], name='B') + A = TransferMechanism(default_variable=[[0, 1, 2]], name='composition-pytests-A') + B = TransferMechanism(default_variable=[[0, 1, 2]], name='composition-pytests-B') comp.add_mechanism(A) comp.add_mechanism(B) comp.add_projection(A, MappingProjection(), B) @@ -531,9 +531,9 @@ class TestProcessingGraph: def test_all_mechanisms(self): comp = Composition() - A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A') - B = TransferMechanism(function=Linear(intercept=4.0), name='B') - C = TransferMechanism(function=Linear(intercept=1.5), name='C') + A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='composition-pytests-A') + B = TransferMechanism(function=Linear(intercept=4.0), name='composition-pytests-B') + C = TransferMechanism(function=Linear(intercept=1.5), name='composition-pytests-C') mechs = [A, B, C] for m in mechs: comp.add_mechanism(m) @@ -553,9 +553,9 @@ def test_all_mechanisms(self): def test_triangle(self): comp = Composition() - A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A') - B = TransferMechanism(function=Linear(intercept=4.0), name='B') - C = TransferMechanism(function=Linear(intercept=1.5), name='C') + A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='composition-pytests-A') + B = TransferMechanism(function=Linear(intercept=4.0), name='composition-pytests-B') + C = TransferMechanism(function=Linear(intercept=1.5), name='composition-pytests-C') mechs = [A, B, C] for m in mechs: comp.add_mechanism(m) @@ -577,11 +577,11 @@ def test_triangle(self): def test_x(self): comp = Composition() - A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A') - B = TransferMechanism(function=Linear(intercept=4.0), name='B') - C = TransferMechanism(function=Linear(intercept=1.5), name='C') - D = TransferMechanism(function=Linear(intercept=1.5), name='D') - E = TransferMechanism(function=Linear(intercept=1.5), name='E') + A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='composition-pytests-A') + B = TransferMechanism(function=Linear(intercept=4.0), name='composition-pytests-B') + C = TransferMechanism(function=Linear(intercept=1.5), name='composition-pytests-C') + D = TransferMechanism(function=Linear(intercept=1.5), name='composition-pytests-D') + E = TransferMechanism(function=Linear(intercept=1.5), name='composition-pytests-E') mechs = [A, B, C, D, E] for m in mechs: comp.add_mechanism(m) @@ -615,9 +615,9 @@ def test_x(self): def test_cycle_linear(self): comp = Composition() - A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A') - B = TransferMechanism(function=Linear(intercept=4.0), name='B') - C = TransferMechanism(function=Linear(intercept=1.5), name='C') + A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='composition-pytests-A') + B = TransferMechanism(function=Linear(intercept=4.0), name='composition-pytests-B') + C = TransferMechanism(function=Linear(intercept=1.5), name='composition-pytests-C') mechs = [A, B, C] for m in mechs: comp.add_mechanism(m) @@ -640,11 +640,11 @@ def test_cycle_linear(self): def test_cycle_x(self): comp = Composition() - A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A') - B = TransferMechanism(function=Linear(intercept=4.0), name='B') - C = TransferMechanism(function=Linear(intercept=1.5), name='C') - D = TransferMechanism(function=Linear(intercept=1.5), name='D') - E = TransferMechanism(function=Linear(intercept=1.5), name='E') + A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='composition-pytests-A') + B = TransferMechanism(function=Linear(intercept=4.0), name='composition-pytests-B') + C = TransferMechanism(function=Linear(intercept=1.5), name='composition-pytests-C') + D = TransferMechanism(function=Linear(intercept=1.5), name='composition-pytests-D') + E = TransferMechanism(function=Linear(intercept=1.5), name='composition-pytests-E') mechs = [A, B, C, D, E] for m in mechs: comp.add_mechanism(m) @@ -680,11 +680,11 @@ def test_cycle_x(self): def test_cycle_x_multiple_incoming(self): comp = Composition() - A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A') - B = TransferMechanism(function=Linear(intercept=4.0), name='B') - C = TransferMechanism(function=Linear(intercept=1.5), name='C') - D = TransferMechanism(function=Linear(intercept=1.5), name='D') - E = TransferMechanism(function=Linear(intercept=1.5), name='E') + A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='composition-pytests-A') + B = TransferMechanism(function=Linear(intercept=4.0), name='composition-pytests-B') + C = TransferMechanism(function=Linear(intercept=1.5), name='composition-pytests-C') + D = TransferMechanism(function=Linear(intercept=1.5), name='composition-pytests-D') + E = TransferMechanism(function=Linear(intercept=1.5), name='composition-pytests-E') mechs = [A, B, C, D, E] for m in mechs: comp.add_mechanism(m) @@ -767,10 +767,10 @@ def test_run_2_mechanisms_input_5(self): def test_projection_assignment_mistake_swap(self): comp = Composition() - A = TransferMechanism(name="A", function=Linear(slope=1.0)) - B = TransferMechanism(name="B", function=Linear(slope=1.0)) - C = TransferMechanism(name="C", function=Linear(slope=5.0)) - D = TransferMechanism(name="D", function=Linear(slope=5.0)) + A = TransferMechanism(name="composition-pytests-A", function=Linear(slope=1.0)) + B = TransferMechanism(name="composition-pytests-B", function=Linear(slope=1.0)) + C = TransferMechanism(name="composition-pytests-C", function=Linear(slope=5.0)) + D = TransferMechanism(name="composition-pytests-D", function=Linear(slope=5.0)) comp.add_mechanism(A) comp.add_mechanism(B) comp.add_mechanism(C) @@ -786,11 +786,11 @@ def test_projection_assignment_mistake_swap2(self): # B ----> D -- comp = Composition() - A = TransferMechanism(name="A", function=Linear(slope=1.0)) - B = TransferMechanism(name="B", function=Linear(slope=1.0)) - C = TransferMechanism(name="C", function=Linear(slope=5.0)) - D = TransferMechanism(name="D", function=Linear(slope=5.0)) - E = TransferMechanism(name="E", function=Linear(slope=5.0)) + A = TransferMechanism(name="composition-pytests-A", function=Linear(slope=1.0)) + B = TransferMechanism(name="composition-pytests-B", function=Linear(slope=1.0)) + C = TransferMechanism(name="composition-pytests-C", function=Linear(slope=5.0)) + D = TransferMechanism(name="composition-pytests-D", function=Linear(slope=5.0)) + E = TransferMechanism(name="composition-pytests-E", function=Linear(slope=5.0)) comp.add_mechanism(A) comp.add_mechanism(B) comp.add_mechanism(C) @@ -811,11 +811,11 @@ def test_run_5_mechanisms_2_origins_1_terminal(self): # 5 * 1 = 5 ----> 5 x 5 = 25 -- comp = Composition() - A = TransferMechanism(name="A", function=Linear(slope=1.0)) - B = TransferMechanism(name="B", function=Linear(slope=1.0)) - C = TransferMechanism(name="C", function=Linear(slope=5.0)) - D = TransferMechanism(name="D", function=Linear(slope=5.0)) - E = TransferMechanism(name="E", function=Linear(slope=5.0)) + A = TransferMechanism(name="composition-pytests-A", function=Linear(slope=1.0)) + B = TransferMechanism(name="composition-pytests-B", function=Linear(slope=1.0)) + C = TransferMechanism(name="composition-pytests-C", function=Linear(slope=5.0)) + D = TransferMechanism(name="composition-pytests-D", function=Linear(slope=5.0)) + E = TransferMechanism(name="composition-pytests-E", function=Linear(slope=5.0)) comp.add_mechanism(A) comp.add_mechanism(B) comp.add_mechanism(C) @@ -967,11 +967,11 @@ def test_execute_composition(self): def test_LPP(self): comp = Composition() - A = TransferMechanism(name="A", function=Linear(slope=2.0)) # 1 x 2 = 2 - B = TransferMechanism(name="B", function=Linear(slope=2.0)) # 2 x 2 = 4 - C = TransferMechanism(name="C", function=Linear(slope=2.0)) # 4 x 2 = 8 - D = TransferMechanism(name="D", function=Linear(slope=2.0)) # 8 x 2 = 16 - E = TransferMechanism(name="E", function=Linear(slope=2.0)) # 16 x 2 = 32 + A = TransferMechanism(name="composition-pytests-A", function=Linear(slope=2.0)) # 1 x 2 = 2 + B = TransferMechanism(name="composition-pytests-B", function=Linear(slope=2.0)) # 2 x 2 = 4 + C = TransferMechanism(name="composition-pytests-C", function=Linear(slope=2.0)) # 4 x 2 = 8 + D = TransferMechanism(name="composition-pytests-D", function=Linear(slope=2.0)) # 8 x 2 = 16 + E = TransferMechanism(name="composition-pytests-E", function=Linear(slope=2.0)) # 16 x 2 = 32 comp.add_linear_processing_pathway([A, B, C, D, E]) comp._analyze_graph() inputs_dict = {A: [[1]]} @@ -984,11 +984,11 @@ def test_LPP(self): def test_LPP_with_projections(self): comp = Composition() - A = TransferMechanism(name="A", function=Linear(slope=2.0)) # 1 x 2 = 2 - B = TransferMechanism(name="B", function=Linear(slope=2.0)) # 2 x 2 = 4 - C = TransferMechanism(name="C", function=Linear(slope=2.0)) # 4 x 2 = 8 - D = TransferMechanism(name="D", function=Linear(slope=2.0)) # 8 x 2 = 16 - E = TransferMechanism(name="E", function=Linear(slope=2.0)) # 16 x 2 = 32 + A = TransferMechanism(name="composition-pytests-A", function=Linear(slope=2.0)) # 1 x 2 = 2 + B = TransferMechanism(name="composition-pytests-B", function=Linear(slope=2.0)) # 2 x 2 = 4 + C = TransferMechanism(name="composition-pytests-C", function=Linear(slope=2.0)) # 4 x 2 = 8 + D = TransferMechanism(name="composition-pytests-D", function=Linear(slope=2.0)) # 8 x 2 = 16 + E = TransferMechanism(name="composition-pytests-E", function=Linear(slope=2.0)) # 16 x 2 = 32 A_to_B = MappingProjection(sender=A, receiver=B) D_to_E = MappingProjection(sender=D, receiver=E) comp.add_linear_processing_pathway([A, A_to_B, B, C, D, D_to_E, E]) @@ -1003,11 +1003,11 @@ def test_LPP_with_projections(self): def test_LPP_end_with_projection(self): comp = Composition() - A = TransferMechanism(name="A", function=Linear(slope=2.0)) - B = TransferMechanism(name="B", function=Linear(slope=2.0)) - C = TransferMechanism(name="C", function=Linear(slope=2.0)) - D = TransferMechanism(name="D", function=Linear(slope=2.0)) - E = TransferMechanism(name="E", function=Linear(slope=2.0)) + A = TransferMechanism(name="composition-pytests-A", function=Linear(slope=2.0)) + B = TransferMechanism(name="composition-pytests-B", function=Linear(slope=2.0)) + C = TransferMechanism(name="composition-pytests-C", function=Linear(slope=2.0)) + D = TransferMechanism(name="composition-pytests-D", function=Linear(slope=2.0)) + E = TransferMechanism(name="composition-pytests-E", function=Linear(slope=2.0)) A_to_B = MappingProjection(sender=A, receiver=B) D_to_E = MappingProjection(sender=D, receiver=E) with pytest.raises(CompositionError) as error_text: @@ -1017,9 +1017,9 @@ def test_LPP_end_with_projection(self): def test_LPP_two_projections_in_a_row(self): comp = Composition() - A = TransferMechanism(name="A", function=Linear(slope=2.0)) - B = TransferMechanism(name="B", function=Linear(slope=2.0)) - C = TransferMechanism(name="C", function=Linear(slope=2.0)) + A = TransferMechanism(name="composition-pytests-A", function=Linear(slope=2.0)) + B = TransferMechanism(name="composition-pytests-B", function=Linear(slope=2.0)) + C = TransferMechanism(name="composition-pytests-C", function=Linear(slope=2.0)) A_to_B = MappingProjection(sender=A, receiver=B) B_to_C = MappingProjection(sender=B, receiver=C) with pytest.raises(CompositionError) as error_text: @@ -1032,8 +1032,8 @@ def test_LPP_two_projections_in_a_row(self): def test_LPP_start_with_projection(self): comp = Composition() Nonsense_Projection = MappingProjection() - A = TransferMechanism(name="A", function=Linear(slope=2.0)) - B = TransferMechanism(name="B", function=Linear(slope=2.0)) + A = TransferMechanism(name="composition-pytests-A", function=Linear(slope=2.0)) + B = TransferMechanism(name="composition-pytests-B", function=Linear(slope=2.0)) with pytest.raises(CompositionError) as error_text: comp.add_linear_processing_pathway([Nonsense_Projection, A, B]) @@ -1043,8 +1043,8 @@ def test_LPP_start_with_projection(self): def test_LPP_wrong_component(self): comp = Composition() Nonsense = "string" - A = TransferMechanism(name="A", function=Linear(slope=2.0)) - B = TransferMechanism(name="B", function=Linear(slope=2.0)) + A = TransferMechanism(name="composition-pytests-A", function=Linear(slope=2.0)) + B = TransferMechanism(name="composition-pytests-B", function=Linear(slope=2.0)) with pytest.raises(CompositionError) as error_text: comp.add_linear_processing_pathway([A, Nonsense, B]) @@ -1061,11 +1061,11 @@ def test_LPP_two_origins_one_terminal(self): # 5 * 1 = 5 ----> 5 x 5 = 25 -- comp = Composition() - A = TransferMechanism(name="A", function=Linear(slope=1.0)) - B = TransferMechanism(name="B", function=Linear(slope=1.0)) - C = TransferMechanism(name="C", function=Linear(slope=5.0)) - D = TransferMechanism(name="D", function=Linear(slope=5.0)) - E = TransferMechanism(name="E", function=Linear(slope=5.0)) + A = TransferMechanism(name="composition-pytests-A", function=Linear(slope=1.0)) + B = TransferMechanism(name="composition-pytests-B", function=Linear(slope=1.0)) + C = TransferMechanism(name="composition-pytests-C", function=Linear(slope=5.0)) + D = TransferMechanism(name="composition-pytests-D", function=Linear(slope=5.0)) + E = TransferMechanism(name="composition-pytests-E", function=Linear(slope=5.0)) comp.add_linear_processing_pathway([A, C, E]) comp.add_linear_processing_pathway([B, D, E]) comp._analyze_graph() diff --git a/tests/scheduling/test_scheduler.py b/tests/scheduling/test_scheduler.py index 604f5d6a840..e5f3f531797 100644 --- a/tests/scheduling/test_scheduler.py +++ b/tests/scheduling/test_scheduler.py @@ -24,7 +24,7 @@ def test_deepcopy(self): def test_create_multiple_contexts(self): comp = Composition() - A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A') + A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') comp.add_mechanism(A) comp.scheduler_processing.clock._increment_time(TimeScale.TRIAL) @@ -46,7 +46,7 @@ def test_create_multiple_contexts(self): def test_two_compositions_one_scheduler(self): comp1 = Composition() comp2 = Composition() - A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A') + A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') comp1.add_mechanism(A) comp2.add_mechanism(A) @@ -85,7 +85,7 @@ def test_two_compositions_one_scheduler(self): def test_one_composition_two_contexts(self): comp = Composition() - A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A') + A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') comp.add_mechanism(A) sched = Scheduler(composition=comp) @@ -144,9 +144,9 @@ class TestLinear: def test_no_termination_conds(self): comp = Composition() - A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A') - B = TransferMechanism(function=Linear(intercept=4.0), name='B') - C = TransferMechanism(function=Linear(intercept=1.5), name='C') + A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') + B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') + C = TransferMechanism(function=Linear(intercept=1.5), name='scheduler-pytests-C') for m in [A, B, C]: comp.add_mechanism(m) comp.add_projection(A, MappingProjection(), B) @@ -169,9 +169,9 @@ def test_no_termination_conds(self): # tests below are copied from old scheduler, need renaming def test_1(self): comp = Composition() - A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A') - B = TransferMechanism(function=Linear(intercept=4.0), name='B') - C = TransferMechanism(function=Linear(intercept=1.5), name='C') + A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') + B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') + C = TransferMechanism(function=Linear(intercept=1.5), name='scheduler-pytests-C') for m in [A, B, C]: comp.add_mechanism(m) comp.add_projection(A, MappingProjection(), B) @@ -199,9 +199,9 @@ def test_1(self): def test_1b(self): comp = Composition() - A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A') - B = TransferMechanism(function=Linear(intercept=4.0), name='B') - C = TransferMechanism(function=Linear(intercept=1.5), name='C') + A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') + B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') + C = TransferMechanism(function=Linear(intercept=1.5), name='scheduler-pytests-C') for m in [A, B, C]: comp.add_mechanism(m) comp.add_projection(A, MappingProjection(), B) @@ -229,9 +229,9 @@ def test_1b(self): def test_2(self): comp = Composition() - A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A') - B = TransferMechanism(function=Linear(intercept=4.0), name='B') - C = TransferMechanism(function=Linear(intercept=1.5), name='C') + A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') + B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') + C = TransferMechanism(function=Linear(intercept=1.5), name='scheduler-pytests-C') for m in [A, B, C]: comp.add_mechanism(m) comp.add_projection(A, MappingProjection(), B) @@ -253,9 +253,9 @@ def test_2(self): def test_3(self): comp = Composition() - A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A') - B = TransferMechanism(function=Linear(intercept=4.0), name='B') - C = TransferMechanism(function=Linear(intercept=1.5), name='C') + A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') + B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') + C = TransferMechanism(function=Linear(intercept=1.5), name='scheduler-pytests-C') for m in [A, B, C]: comp.add_mechanism(m) comp.add_projection(A, MappingProjection(), B) @@ -280,9 +280,9 @@ def test_3(self): def test_6(self): comp = Composition() - A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A') - B = TransferMechanism(function=Linear(intercept=4.0), name='B') - C = TransferMechanism(function=Linear(intercept=1.5), name='C') + A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') + B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') + C = TransferMechanism(function=Linear(intercept=1.5), name='scheduler-pytests-C') for m in [A, B, C]: comp.add_mechanism(m) comp.add_projection(A, MappingProjection(), B) @@ -307,9 +307,9 @@ def test_6(self): def test_6_two_trials(self): comp = Composition() - A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A') - B = TransferMechanism(function=Linear(intercept=4.0), name='B') - C = TransferMechanism(function=Linear(intercept=1.5), name='C') + A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') + B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') + C = TransferMechanism(function=Linear(intercept=1.5), name='scheduler-pytests-C') for m in [A, B, C]: comp.add_mechanism(m) comp.add_projection(A, MappingProjection(), B) @@ -340,8 +340,8 @@ def test_6_two_trials(self): def test_7(self): comp = Composition() - A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A') - B = TransferMechanism(function=Linear(intercept=4.0), name='B') + A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') + B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') for m in [A, B]: comp.add_mechanism(m) comp.add_projection(A, MappingProjection(), B) @@ -361,8 +361,8 @@ def test_7(self): def test_8(self): comp = Composition() - A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A') - B = TransferMechanism(function=Linear(intercept=4.0), name='B') + A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') + B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') for m in [A, B]: comp.add_mechanism(m) comp.add_projection(A, MappingProjection(), B) @@ -382,8 +382,8 @@ def test_8(self): def test_9(self): comp = Composition() - A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A') - B = TransferMechanism(function=Linear(intercept=4.0), name='B') + A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') + B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') for m in [A, B]: comp.add_mechanism(m) comp.add_projection(A, MappingProjection(), B) @@ -410,9 +410,9 @@ def test_9(self): def test_9b(self): comp = Composition() - A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A') + A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') A.is_finished = False - B = TransferMechanism(function=Linear(intercept=4.0), name='B') + B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') for m in [A, B]: comp.add_mechanism(m) comp.add_projection(A, MappingProjection(), B) @@ -432,9 +432,9 @@ def test_9b(self): def test_10(self): comp = Composition() - A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A') + A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') A.is_finished = True - B = TransferMechanism(function=Linear(intercept=4.0), name='B') + B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') for m in [A, B]: comp.add_mechanism(m) @@ -455,9 +455,9 @@ def test_10(self): def test_10b(self): comp = Composition() - A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A') + A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') A.is_finished = False - B = TransferMechanism(function=Linear(intercept=4.0), name='B') + B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') for m in [A, B]: comp.add_mechanism(m) @@ -478,9 +478,9 @@ def test_10b(self): def test_10c(self): comp = Composition() - A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A') + A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') A.is_finished = True - B = TransferMechanism(function=Linear(intercept=4.0), name='B') + B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') for m in [A, B]: comp.add_mechanism(m) @@ -501,9 +501,9 @@ def test_10c(self): def test_10d(self): comp = Composition() - A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A') + A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') A.is_finished = False - B = TransferMechanism(function=Linear(intercept=4.0), name='B') + B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') for m in [A, B]: comp.add_mechanism(m) @@ -527,8 +527,8 @@ def test_10d(self): ######################################## def test_linear_AAB(self): comp = Composition() - A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A') - B = TransferMechanism(function=Linear(intercept=4.0), name='B') + A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') + B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') for m in [A, B]: comp.add_mechanism(m) comp.add_projection(A, MappingProjection(), B) @@ -548,8 +548,8 @@ def test_linear_AAB(self): def test_linear_ABB(self): comp = Composition() - A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A') - B = TransferMechanism(function=Linear(intercept=4.0), name='B') + A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') + B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') for m in [A, B]: comp.add_mechanism(m) comp.add_projection(A, MappingProjection(), B) @@ -569,9 +569,9 @@ def test_linear_ABB(self): def test_linear_ABBCC(self): comp = Composition() - A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A') - B = TransferMechanism(function=Linear(intercept=4.0), name='B') - C = TransferMechanism(function=Linear(intercept=1.5), name='C') + A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') + B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') + C = TransferMechanism(function=Linear(intercept=1.5), name='scheduler-pytests-C') for m in [A, B, C]: comp.add_mechanism(m) comp.add_projection(A, MappingProjection(), B) @@ -593,9 +593,9 @@ def test_linear_ABBCC(self): def test_linear_ABCBC(self): comp = Composition() - A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A') - B = TransferMechanism(function=Linear(intercept=4.0), name='B') - C = TransferMechanism(function=Linear(intercept=1.5), name='C') + A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') + B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') + C = TransferMechanism(function=Linear(intercept=1.5), name='scheduler-pytests-C') for m in [A, B, C]: comp.add_mechanism(m) comp.add_projection(A, MappingProjection(), B) @@ -627,9 +627,9 @@ class TestBranching: def test_triangle_1(self): comp = Composition() - A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A') - B = TransferMechanism(function=Linear(intercept=4.0), name='B') - C = TransferMechanism(function=Linear(intercept=1.5), name='C') + A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') + B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') + C = TransferMechanism(function=Linear(intercept=1.5), name='scheduler-pytests-C') for m in [A, B, C]: comp.add_mechanism(m) comp.add_projection(A, MappingProjection(), B) @@ -656,9 +656,9 @@ def test_triangle_1(self): def test_triangle_2(self): comp = Composition() - A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A') - B = TransferMechanism(function=Linear(intercept=4.0), name='B') - C = TransferMechanism(function=Linear(intercept=1.5), name='C') + A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') + B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') + C = TransferMechanism(function=Linear(intercept=1.5), name='scheduler-pytests-C') for m in [A, B, C]: comp.add_mechanism(m) comp.add_projection(A, MappingProjection(), B) @@ -688,9 +688,9 @@ def test_triangle_2(self): def test_triangle_3(self): comp = Composition() - A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A') - B = TransferMechanism(function=Linear(intercept=4.0), name='B') - C = TransferMechanism(function=Linear(intercept=1.5), name='C') + A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') + B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') + C = TransferMechanism(function=Linear(intercept=1.5), name='scheduler-pytests-C') for m in [A, B, C]: comp.add_mechanism(m) comp.add_projection(A, MappingProjection(), B) @@ -716,9 +716,9 @@ def test_triangle_3(self): # this is test 11 of original constraint_scheduler.py def test_triangle_4(self): comp = Composition() - A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A') - B = TransferMechanism(function=Linear(intercept=4.0), name='B') - C = TransferMechanism(function=Linear(intercept=1.5), name='C') + A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') + B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') + C = TransferMechanism(function=Linear(intercept=1.5), name='scheduler-pytests-C') for m in [A, B, C]: comp.add_mechanism(m) @@ -748,9 +748,9 @@ def test_triangle_4(self): def test_triangle_4b(self): comp = Composition() - A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='A') - B = TransferMechanism(function=Linear(intercept=4.0), name='B') - C = TransferMechanism(function=Linear(intercept=1.5), name='C') + A = TransferMechanism(function=Linear(slope=5.0, intercept=2.0), name='scheduler-pytests-A') + B = TransferMechanism(function=Linear(intercept=4.0), name='scheduler-pytests-B') + C = TransferMechanism(function=Linear(intercept=1.5), name='scheduler-pytests-C') for m in [A, B, C]: comp.add_mechanism(m) @@ -786,9 +786,9 @@ def test_triangle_4b(self): # this test has an implicit priority set of A Date: Tue, 24 Apr 2018 18:00:15 -0400 Subject: [PATCH 022/200] reintroducing composition's 'AnalyzeGraph' pytests - only skipping the two origin mechanism test with unstable output --- tests/composition/test_composition.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/composition/test_composition.py b/tests/composition/test_composition.py index 70c25e3f2a1..4e693b3ca86 100644 --- a/tests/composition/test_composition.py +++ b/tests/composition/test_composition.py @@ -183,7 +183,6 @@ def test_timing_stress(self, count): logger.info('completed {0} addition{2} of a projection to a composition in {1:.8f}s'.format(count, t, 's' if count != 1 else '')) -@pytest.mark.skip class TestAnalyzeGraph: def test_empty_call(self): @@ -241,6 +240,7 @@ def test_two_recursive(self): assert B in comp.get_mechanisms_by_role(MechanismRole.RECURRENT_INIT) # (A)->(B)<->(C)<-(D) + @pytest.mark.skip def test_two_origins_pointing_to_recursive_pair(self): comp = Composition() A = TransferMechanism(name='composition-pytests-A') From 9e12f7e123317829b2dc8f7051c9b0751d6c539f Mon Sep 17 00:00:00 2001 From: KristenManning Date: Wed, 25 Apr 2018 12:30:48 -0400 Subject: [PATCH 023/200] adding pytests from old CIM branch --- tests/composition/test_composition.py | 1744 +++++++++++++++++++++++++ 1 file changed, 1744 insertions(+) diff --git a/tests/composition/test_composition.py b/tests/composition/test_composition.py index 4e693b3ca86..de49d3ed15b 100644 --- a/tests/composition/test_composition.py +++ b/tests/composition/test_composition.py @@ -8,11 +8,17 @@ from psyneulink.components.functions.function import Linear, SimpleIntegrator from psyneulink.components.mechanisms.processing.integratormechanism import IntegratorMechanism from psyneulink.components.mechanisms.processing.transfermechanism import TransferMechanism +from psyneulink.components.mechanisms.processing.processingmechanism import ProcessingMechanism +from psyneulink.library.mechanisms.processing.transfer.recurrenttransfermechanism import RecurrentTransferMechanism from psyneulink.components.projections.pathway.mappingprojection import MappingProjection +from psyneulink.components.states.inputstate import InputState from psyneulink.compositions.composition import Composition, CompositionError, MechanismRole +from psyneulink.compositions.pathwaycomposition import PathwayComposition +from psyneulink.compositions.systemcomposition import SystemComposition from psyneulink.scheduling.condition import EveryNCalls from psyneulink.scheduling.scheduler import Scheduler from psyneulink.scheduling.time import TimeScale +from psyneulink.globals.keywords import NAME logger = logging.getLogger(__name__) @@ -1420,3 +1426,1741 @@ def record_values(d, time_scale, *mechs): # expected_Output_Layer_output = [np.array([0.97988347, 0.97988347, 0.97988347])] # # np.testing.assert_allclose(expected_Output_Layer_output, Output_Layer.output_values) + + +class TestClampInput: + + def test_run_5_mechanisms_2_origins_1_terminal_hard_clamp(self): + # HARD_CLAMP + + # recurrent projection ignored on the second execution of A + # __ + # | | + # 5 -#2-> x | + # 5 -#1-> A -^--> C -- + # ==> E + # 5 ----> B ----> D -- + + # 5 x 1 = 5 ----> 5 x 5 = 25 -- + # 25 + 25 = 50 ==> 50 * 5 = 250 + # 5 * 1 = 5 ----> 5 x 5 = 25 -- + + comp = Composition() + A = RecurrentTransferMechanism(name="A", function=Linear(slope=1.0)) + B = TransferMechanism(name="B", function=Linear(slope=1.0)) + C = TransferMechanism(name="C", function=Linear(slope=5.0)) + D = TransferMechanism(name="D", function=Linear(slope=5.0)) + E = TransferMechanism(name="E", function=Linear(slope=5.0)) + comp.add_mechanism(A) + comp.add_mechanism(B) + comp.add_mechanism(C) + comp.add_mechanism(D) + comp.add_projection(A, MappingProjection(sender=A, receiver=C), C) + comp.add_projection(B, MappingProjection(sender=B, receiver=D), D) + comp.add_mechanism(E) + comp.add_projection(C, MappingProjection(sender=C, receiver=E), E) + comp.add_projection(D, MappingProjection(sender=D, receiver=E), E) + comp._analyze_graph() + inputs_dict = { + A: [[5]], + B: [[5]] + } + sched = Scheduler(composition=comp) + sched.add_condition(A, EveryNPasses(1)) + sched.add_condition(B, EveryNCalls(A, 2)) + sched.add_condition(C, AfterNCalls(A, 2)) + sched.add_condition(D, AfterNCalls(A, 2)) + sched.add_condition(E, AfterNCalls(C, 1)) + sched.add_condition(E, AfterNCalls(D, 1)) + output = comp.run( + inputs=inputs_dict, + scheduler_processing=sched, + clamp_input=HARD_CLAMP + ) + assert 250 == output[0][0] + + def test_run_5_mechanisms_2_origins_1_terminal_soft_clamp(self): + # recurrent projection combines with input on the second execution of A + # _r_ + # | | + # 5 -#2-> V | + # 5 -#1-> A --^ --> C -- + # ==> E + # 5 ----> B ------> D -- + + # 5 x 1 = 5 ----> 5 x 5 = 25 -- + # 25 + 25 = 50 ==> 50 * 5 = 250 + # 5 * 1 = 5 ----> 5 x 5 = 25 -- + + comp = Composition() + A = RecurrentTransferMechanism(name="A", function=Linear(slope=1.0)) + B = TransferMechanism(name="B", function=Linear(slope=1.0)) + C = TransferMechanism(name="C", function=Linear(slope=5.0)) + D = TransferMechanism(name="D", function=Linear(slope=5.0)) + E = TransferMechanism(name="E", function=Linear(slope=5.0)) + comp.add_mechanism(A) + comp.add_mechanism(B) + comp.add_mechanism(C) + comp.add_mechanism(D) + comp.add_projection(A, MappingProjection(sender=A, receiver=C), C) + comp.add_projection(B, MappingProjection(sender=B, receiver=D), D) + comp.add_mechanism(E) + comp.add_projection(C, MappingProjection(sender=C, receiver=E), E) + comp.add_projection(D, MappingProjection(sender=D, receiver=E), E) + comp._analyze_graph() + inputs_dict = { + A: [[5.]], + B: [[5.]] + } + sched = Scheduler(composition=comp) + sched.add_condition(A, EveryNPasses(1)) + sched.add_condition(B, EveryNCalls(A, 2)) + sched.add_condition(C, AfterNCalls(A, 2)) + sched.add_condition(D, AfterNCalls(A, 2)) + sched.add_condition(E, AfterNCalls(C, 1)) + sched.add_condition(E, AfterNCalls(D, 1)) + output = comp.run( + inputs=inputs_dict, + scheduler_processing=sched, + clamp_input=SOFT_CLAMP + ) + assert 375 == output[0][0] + + def test_run_5_mechanisms_2_origins_1_terminal_pulse_clamp(self): + # input ignored on the second execution of A + # __ + # | | + # V | + # 5 -#1-> A -^--> C -- + # ==> E + # 5 ----> B ----> D -- + + # 5 x 1 = 5 ----> 5 x 5 = 25 -- + # 25 + 25 = 50 ==> 50 * 5 = 250 + # 5 * 1 = 5 ----> 5 x 5 = 25 -- + + comp = Composition() + A = RecurrentTransferMechanism(name="A", function=Linear(slope=2.0)) + B = TransferMechanism(name="B", function=Linear(slope=1.0)) + C = TransferMechanism(name="C", function=Linear(slope=5.0)) + D = TransferMechanism(name="D", function=Linear(slope=5.0)) + E = TransferMechanism(name="E", function=Linear(slope=5.0)) + comp.add_mechanism(A) + comp.add_mechanism(B) + comp.add_mechanism(C) + comp.add_mechanism(D) + comp.add_projection(A, MappingProjection(sender=A, receiver=C), C) + comp.add_projection(B, MappingProjection(sender=B, receiver=D), D) + comp.add_mechanism(E) + comp.add_projection(C, MappingProjection(sender=C, receiver=E), E) + comp.add_projection(D, MappingProjection(sender=D, receiver=E), E) + comp._analyze_graph() + inputs_dict = { + A: [[5]], + B: [[5]] + } + sched = Scheduler(composition=comp) + sched.add_condition(A, EveryNPasses(1)) + sched.add_condition(B, EveryNCalls(A, 2)) + sched.add_condition(C, AfterNCalls(A, 2)) + sched.add_condition(D, AfterNCalls(A, 2)) + sched.add_condition(E, AfterNCalls(C, 1)) + sched.add_condition(E, AfterNCalls(D, 1)) + output = comp.run( + inputs=inputs_dict, + scheduler_processing=sched, + clamp_input=PULSE_CLAMP + ) + assert 625 == output[0][0] + + def test_run_5_mechanisms_2_origins_1_hard_clamp_1_soft_clamp(self): + + # __ + # | | + # V | + # 5 -#1-> A -^--> C -- + # ==> E + # 5 ----> B ----> D -- + + # v Recurrent + # 5 * 1 = (5 + 5) x 1 = 10 + # 5 x 1 = 5 ----> 10 x 5 = 50 -- + # 50 + 25 = 75 ==> 75 * 5 = 375 + # 5 * 1 = 5 ----> 5 x 5 = 25 -- + + comp = Composition() + A = RecurrentTransferMechanism(name="A", function=Linear(slope=1.0)) + B = RecurrentTransferMechanism(name="B", function=Linear(slope=1.0)) + C = TransferMechanism(name="C", function=Linear(slope=5.0)) + D = TransferMechanism(name="D", function=Linear(slope=5.0)) + E = TransferMechanism(name="E", function=Linear(slope=5.0)) + comp.add_mechanism(A) + comp.add_mechanism(B) + comp.add_mechanism(C) + comp.add_mechanism(D) + comp.add_projection(A, MappingProjection(sender=A, receiver=C), C) + comp.add_projection(B, MappingProjection(sender=B, receiver=D), D) + comp.add_mechanism(E) + comp.add_projection(C, MappingProjection(sender=C, receiver=E), E) + comp.add_projection(D, MappingProjection(sender=D, receiver=E), E) + comp._analyze_graph() + inputs_dict = { + A: [[5]], + B: [[5]] + } + sched = Scheduler(composition=comp) + sched.add_condition(A, EveryNPasses(1)) + sched.add_condition(B, EveryNPasses(1)) + sched.add_condition(B, EveryNCalls(A, 1)) + sched.add_condition(C, AfterNCalls(A, 2)) + sched.add_condition(D, AfterNCalls(A, 2)) + sched.add_condition(E, AfterNCalls(C, 1)) + sched.add_condition(E, AfterNCalls(D, 1)) + output = comp.run( + inputs=inputs_dict, + scheduler_processing=sched, + clamp_input={A: SOFT_CLAMP, + B: HARD_CLAMP} + ) + assert 375 == output[0][0] + + def test_run_5_mechanisms_2_origins_1_terminal_no_clamp(self): + # input ignored on all executions + # _r_ + # | | + # 0 -#2-> V | + # 0 -#1-> A -^--> C -- + # ==> E + # 0 ----> B ----> D -- + + # 1 * 2 + 1 = 3 + # 0 x 2 + 1 = 1 ----> 4 x 5 = 20 -- + # 20 + 5 = 25 ==> 25 * 5 = 125 + # 0 x 1 + 1 = 1 ----> 1 x 5 = 5 -- + + comp = Composition() + + A = RecurrentTransferMechanism(name="A", function=Linear(slope=2.0, intercept=5.0)) + B = RecurrentTransferMechanism(name="B", function=Linear(slope=1.0, intercept=1.0)) + C = TransferMechanism(name="C", function=Linear(slope=5.0)) + D = TransferMechanism(name="D", function=Linear(slope=5.0)) + E = TransferMechanism(name="E", function=Linear(slope=5.0)) + comp.add_mechanism(A) + comp.add_mechanism(B) + comp.add_mechanism(C) + comp.add_mechanism(D) + comp.add_projection(A, MappingProjection(sender=A, receiver=C), C) + comp.add_projection(B, MappingProjection(sender=B, receiver=D), D) + comp.add_mechanism(E) + comp.add_projection(C, MappingProjection(sender=C, receiver=E), E) + comp.add_projection(D, MappingProjection(sender=D, receiver=E), E) + comp._analyze_graph() + inputs_dict = { + A: [[100.0]], + B: [[500.0]] + } + sched = Scheduler(composition=comp) + sched.add_condition(A, EveryNPasses(1)) + sched.add_condition(B, EveryNCalls(A, 2)) + sched.add_condition(C, AfterNCalls(A, 2)) + sched.add_condition(D, AfterNCalls(A, 2)) + sched.add_condition(E, AfterNCalls(C, 1)) + sched.add_condition(E, AfterNCalls(D, 1)) + output = comp.run( + inputs=inputs_dict, + scheduler_processing=sched, + clamp_input=NO_CLAMP + ) + # FIX: This value is correct given that there is a BUG in Recurrent Transfer Mech -- + # Recurrent projection BEGINS with a value leftover from initialization + # (only shows up if the function has an additive component or default variable is not zero) + assert 925 == output[0][0] + + +class TestCallBeforeAfterTimescale: + + def test_call_before_record_timescale(self): + time_step_array = [] + trial_array = [] + pass_array = [] + + def cb_timestep(composition, scheduler, arr): + def record_timestep(): + + arr.append(scheduler.times[composition._execution_id][TimeScale.TIME_STEP][TimeScale.TIME_STEP]) + + return record_timestep + + def cb_pass(composition, scheduler, arr): + + def record_pass(): + + arr.append(scheduler.times[composition._execution_id][TimeScale.RUN][TimeScale.PASS]) + + return record_pass + + def cb_trial(composition, scheduler, arr): + + def record_trial(): + + arr.append(scheduler.times[composition._execution_id][TimeScale.LIFE][TimeScale.TRIAL]) + + return record_trial + + comp = Composition() + + A = TransferMechanism(name="A [transfer]", function=Linear(slope=2.0)) + B = TransferMechanism(name="B [transfer]", function=Linear(slope=5.0)) + comp.add_mechanism(A) + comp.add_mechanism(B) + comp.add_projection(A, MappingProjection(sender=A, receiver=B), B) + comp._analyze_graph() + inputs_dict = {A: [[1], [2], [3], [4]]} + sched = Scheduler(composition=comp) + + comp.run( + inputs=inputs_dict, + scheduler_processing=sched, + call_before_time_step=cb_timestep(comp, sched, time_step_array), + call_before_trial=cb_trial(comp, sched, trial_array), + call_before_pass=cb_pass(comp, sched, pass_array) + ) + + assert time_step_array == [0, 1, 0, 1, 0, 1, 0, 1] + assert trial_array == [0, 1, 2, 3] + assert pass_array == [0, 1, 2, 3] + + +class TestSystemComposition: + + def test_run_2_mechanisms_default_input_1(self): + sys = SystemComposition() + A = IntegratorMechanism(default_variable=1.0, function=Linear(slope=5.0)) + B = TransferMechanism(function=Linear(slope=5.0)) + sys.add_mechanism(A) + sys.add_mechanism(B) + sys.add_projection(A, MappingProjection(sender=A, receiver=B), B) + sys._analyze_graph() + sched = Scheduler(composition=sys) + output = sys.run( + scheduler_processing=sched + ) + assert 25 == output[0][0] + + def test_run_2_mechanisms_input_5(self): + sys = SystemComposition() + A = IntegratorMechanism(default_variable=1.0, function=Linear(slope=5.0)) + B = TransferMechanism(function=Linear(slope=5.0)) + sys.add_mechanism(A) + sys.add_mechanism(B) + sys.add_projection(A, MappingProjection(sender=A, receiver=B), B) + sys._analyze_graph() + inputs_dict = {A: [[5]]} + sched = Scheduler(composition=sys) + output = sys.run( + inputs=inputs_dict, + scheduler_processing=sched + ) + assert 125 == output[0][0] + + def test_call_beforeafter_values_onepass(self): + + def record_values(d, time_scale, *mechs): + if time_scale not in d: + d[time_scale] = {} + for mech in mechs: + if mech not in d[time_scale]: + d[time_scale][mech] = [] + if mech.value is None: + d[time_scale][mech].append(np.nan) + else: + d[time_scale][mech].append(mech.value) + + comp = Composition() + + A = TransferMechanism(name="A [transfer]", function=Linear(slope=2.0)) + B = TransferMechanism(name="B [transfer]", function=Linear(slope=5.0)) + comp.add_mechanism(A) + comp.add_mechanism(B) + comp.add_projection(A, MappingProjection(sender=A, receiver=B), B) + comp._analyze_graph() + inputs_dict = {A: [[1],[ 2], [3], [4]]} + sched = Scheduler(composition=comp) + + before = {} + after = {} + + before_expected = { + TimeScale.TIME_STEP: { + A: [np.nan, 2, 2, 4, 4, 6, 6, 8], + B: [np.nan, np.nan, 10, 10, 20, 20, 30, 30] + }, + TimeScale.PASS: { + A: [np.nan, 2, 4, 6], + B: [np.nan, 10, 20, 30] + }, + TimeScale.TRIAL: { + A: [np.nan, 2, 4, 6], + B: [np.nan, 10, 20, 30] + }, + } + + after_expected = { + TimeScale.TIME_STEP: { + A: [2, 2, 4, 4, 6, 6, 8, 8], + B: [np.nan, 10, 10, 20, 20, 30, 30, 40] + }, + TimeScale.PASS: { + A: [2, 4, 6, 8], + B: [10, 20, 30, 40] + }, + TimeScale.TRIAL: { + A: [2, 4, 6, 8], + B: [10, 20, 30, 40] + }, + } + + comp.run( + inputs=inputs_dict, + scheduler_processing=sched, + call_before_time_step=functools.partial(record_values, before, TimeScale.TIME_STEP, A, B), + call_before_pass=functools.partial(record_values, before, TimeScale.PASS, A, B), + call_before_trial=functools.partial(record_values, before, TimeScale.TRIAL, A, B), + call_after_time_step=functools.partial(record_values, after, TimeScale.TIME_STEP, A, B), + call_after_pass=functools.partial(record_values, after, TimeScale.PASS, A, B), + call_after_trial=functools.partial(record_values, after, TimeScale.TRIAL, A, B), + ) + + for ts in before_expected: + for mech in before_expected[ts]: + # extra brackets around 'before_expected[ts][mech]' were needed for np assert to work + np.testing.assert_allclose([before[ts][mech]], [before_expected[ts][mech]], err_msg='Failed on before[{0}][{1}]'.format(ts, mech)) + + for ts in after_expected: + for mech in after_expected[ts]: + comp = [] + for x in after[ts][mech]: + try: + comp.append(x[0][0]) + except TypeError: + comp.append(x) + np.testing.assert_allclose(comp, after_expected[ts][mech], err_msg='Failed on after[{0}][{1}]'.format(ts, mech)) + + def test_call_beforeafter_values_twopass(self): + + def record_values(d, time_scale, *mechs): + if time_scale not in d: + d[time_scale] = {} + for mech in mechs: + if mech not in d[time_scale]: + d[time_scale][mech] = [] + if mech.value is None: + d[time_scale][mech].append(np.nan) + else: + d[time_scale][mech].append(mech.value) + + comp = Composition() + + A = IntegratorMechanism(name="A [transfer]", function=SimpleIntegrator(rate=1)) + B = IntegratorMechanism(name="B [transfer]", function=SimpleIntegrator(rate=2)) + comp.add_mechanism(A) + comp.add_mechanism(B) + comp.add_projection(A, MappingProjection(sender=A, receiver=B), B) + comp._analyze_graph() + inputs_dict = {A: [[1], [2]]} + sched = Scheduler(composition=comp) + sched.add_condition(B, EveryNCalls(A, 2)) + + before = {} + after = {} + + before_expected = { + TimeScale.TIME_STEP: { + A: [ + np.nan, 1, 2, + 2, 4, 6, + ], + B: [ + np.nan, np.nan, np.nan, + 4, 4, 4, + ] + }, + TimeScale.PASS: { + A: [ + np.nan, 1, + 2, 4, + ], + B: [ + np.nan, np.nan, + 4, 4, + ] + }, + TimeScale.TRIAL: { + A: [np.nan, 2], + B: [np.nan, 4] + }, + } + + after_expected = { + TimeScale.TIME_STEP: { + A: [ + 1, 2, 2, + 4, 6, 6, + ], + B: [ + np.nan, np.nan, 4, + 4, 4, 16, + ] + }, + TimeScale.PASS: { + A: [ + 1, 2, + 4, 6, + ], + B: [ + np.nan, 4, + 4, 16, + ] + }, + TimeScale.TRIAL: { + A: [2, 6], + B: [4, 16] + }, + } + + comp.run( + inputs=inputs_dict, + scheduler_processing=sched, + call_before_time_step=functools.partial(record_values, before, TimeScale.TIME_STEP, A, B), + call_before_pass=functools.partial(record_values, before, TimeScale.PASS, A, B), + call_before_trial=functools.partial(record_values, before, TimeScale.TRIAL, A, B), + call_after_time_step=functools.partial(record_values, after, TimeScale.TIME_STEP, A, B), + call_after_pass=functools.partial(record_values, after, TimeScale.PASS, A, B), + call_after_trial=functools.partial(record_values, after, TimeScale.TRIAL, A, B), + ) + + for ts in before_expected: + for mech in before_expected[ts]: + np.testing.assert_allclose(before[ts][mech], before_expected[ts][mech], err_msg='Failed on before[{0}][{1}]'.format(ts, mech)) + + for ts in after_expected: + for mech in after_expected[ts]: + comp = [] + for x in after[ts][mech]: + try: + comp.append(x[0][0]) + except TypeError: + comp.append(x) + np.testing.assert_allclose(comp, after_expected[ts][mech], err_msg='Failed on after[{0}][{1}]'.format(ts, mech)) + + # when self.sched is ready: + # def test_run_default_scheduler(self): + # comp = Composition() + # A = IntegratorMechanism(default_variable=1.0, function=Linear(slope=5.0)) + # B = TransferMechanism(function=Linear(slope=5.0)) + # comp.add_mechanism(A) + # comp.add_mechanism(B) + # comp.add_projection(A, MappingProjection(sender=A, receiver=B), B) + # comp._analyze_graph() + # inputs_dict = {A: [[5], [4], [3]]} + # output = comp.run( + # inputs=inputs_dict, + # num_trials=3 + # ) + # assert 75 == output[0][0] + + # def test_multilayer_no_learning(self): + # Input_Layer = TransferMechanism( + # name='Input Layer', + # function=Logistic, + # default_variable=np.zeros((2,)), + # ) + # + # Hidden_Layer_1 = TransferMechanism( + # name='Hidden Layer_1', + # function=Logistic(), + # default_variable=np.zeros((5,)), + # ) + # + # Hidden_Layer_2 = TransferMechanism( + # name='Hidden Layer_2', + # function=Logistic(), + # default_variable=[0, 0, 0, 0], + # ) + # + # Output_Layer = TransferMechanism( + # name='Output Layer', + # function=Logistic, + # default_variable=[0, 0, 0], + # ) + # + # Input_Weights_matrix = (np.arange(2 * 5).reshape((2, 5)) + 1) / (2 * 5) + # + # Input_Weights = MappingProjection( + # name='Input Weights', + # matrix=Input_Weights_matrix, + # ) + # + # comp = Composition() + # comp.add_mechanism(Input_Layer) + # comp.add_mechanism(Hidden_Layer_1) + # comp.add_mechanism(Hidden_Layer_2) + # comp.add_mechanism(Output_Layer) + # + # comp.add_projection(Input_Layer, Input_Weights, Hidden_Layer_1) + # comp.add_projection(Hidden_Layer_1, MappingProjection(), Hidden_Layer_2) + # comp.add_projection(Hidden_Layer_2, MappingProjection(), Output_Layer) + # + # comp._analyze_graph() + # stim_list = {Input_Layer: [[-1, 30]]} + # sched = Scheduler(composition=comp) + # output = comp.run( + # inputs=stim_list, + # scheduler_processing=sched, + # num_trials=10 + # ) + # + # # p = process( + # # default_variable=[0, 0], + # # pathway=[ + # # Input_Layer, + # # # The following reference to Input_Weights is needed to use it in the pathway + # # # since it's sender and receiver args are not specified in its declaration above + # # Input_Weights, + # # Hidden_Layer_1, + # # # No projection specification is needed here since the sender arg for Middle_Weights + # # # is Hidden_Layer_1 and its receiver arg is Hidden_Layer_2 + # # # Middle_Weights, + # # Hidden_Layer_2, + # # # Output_Weights does not need to be listed for the same reason as Middle_Weights + # # # If Middle_Weights and/or Output_Weights is not declared above, then the process + # # # will assign a default for missing projection + # # # Output_Weights, + # # Output_Layer + # # ], + # # clamp_input=SOFT_CLAMP, + # # target=[0, 0, 1] + # # + # # + # # ) + # # + # # s.run( + # # num_executions=10, + # # inputs=stim_list, + # # ) + # + # expected_Output_Layer_output = [np.array([0.97988347, 0.97988347, 0.97988347])] + # + # np.testing.assert_allclose(expected_Output_Layer_output, Output_Layer.output_values) + + +# class TestOldSyntax: +# +# # new syntax pathway, old syntax system +# def test_one_pathway_inside_one_system_old_syntax(self): +# # create a PathwayComposition | blank slate for composition +# myPath = PathwayComposition() +# +# # create mechanisms to add to myPath +# myMech1 = TransferMechanism(function=Linear(slope=2.0)) # 1 x 2 = 2 +# myMech2 = TransferMechanism(function=Linear(slope=2.0)) # 2 x 2 = 4 +# myMech3 = TransferMechanism(function=Linear(slope=2.0)) # 4 x 2 = 8 +# +# # add mechanisms to myPath with default MappingProjections between them +# myPath.add_linear_processing_pathway([myMech1, myMech2, myMech3]) +# +# # analyze graph (assign roles) +# myPath._analyze_graph() +# +# # Create a system using the old factory method syntax +# sys = system(processes=[myPath]) +# +# # assign input to origin mech +# stimulus = {myMech1: [[1]]} +# +# # schedule = Scheduler(composition=sys) +# output = sys.run( +# inputs=stimulus, +# # scheduler_processing=schedule +# ) +# assert 8 == output[0][0] +# +# # old syntax pathway (process) +# def test_one_process_old_syntax(self): +# +# # create mechanisms to add to myPath +# myMech1 = TransferMechanism(function=Linear(slope=2.0)) # 1 x 2 = 2 +# myMech2 = TransferMechanism(function=Linear(slope=2.0)) # 2 x 2 = 4 +# myMech3 = TransferMechanism(function=Linear(slope=2.0)) # 4 x 2 = 8 +# +# # create a PathwayComposition | blank slate for composition +# myPath = process(pathway=[myMech1, myMech2, myMech3]) +# +# # assign input to origin mech +# stimulus = {myMech1: [[1]]} +# +# # schedule = Scheduler(composition=sys) +# output = myPath.run( +# inputs=stimulus, +# # scheduler_processing=schedule +# ) +# assert 8 == output[0][0] +# +# # old syntax pathway (process), old syntax system +# def test_one_process_inside_one_system_old_syntax(self): +# # create mechanisms to add to myPath +# myMech1 = TransferMechanism(function=Linear(slope=2.0)) # 1 x 2 = 2 +# myMech2 = TransferMechanism(function=Linear(slope=2.0)) # 2 x 2 = 4 +# myMech3 = TransferMechanism(function=Linear(slope=2.0)) # 4 x 2 = 8 +# +# # create a PathwayComposition | blank slate for composition +# myPath = process(pathway=[myMech1, myMech2, myMech3]) +# +# # Create a system using the old factory method syntax +# sys = system(processes=[myPath]) +# +# # assign input to origin mech +# stimulus = {myMech1: [[1]]} +# +# # schedule = Scheduler(composition=sys) +# output = sys.run( +# inputs=stimulus, +# # scheduler_processing=schedule +# ) +# assert 8 == output[0][0] +# +# # old syntax pathway (process), old syntax system; 2 processes in series +# def test_two_processes_in_series_in_system_old_syntax(self): +# +# # create mechanisms to add to myPath +# myMech1 = TransferMechanism(function=Linear(slope=2.0)) # 1 x 2 = 2 +# myMech2 = TransferMechanism(function=Linear(slope=2.0)) # 2 x 2 = 4 +# myMech3 = TransferMechanism(function=Linear(slope=2.0)) # 4 x 2 = 8 +# # create a PathwayComposition | blank slate for composition +# myPath = process(pathway=[myMech1, myMech2, myMech3]) +# +# # create a PathwayComposition | blank slate for composition +# myPath2 = PathwayComposition() +# +# # create mechanisms to add to myPath2 +# myMech4 = TransferMechanism(function=Linear(slope=2.0)) # 8 x 2 = 16 +# myMech5 = TransferMechanism(function=Linear(slope=2.0)) # 16 x 2 = 32 +# myMech6 = TransferMechanism(function=Linear(slope=2.0)) # 32 x 2 = 64 +# +# # add mechanisms to myPath2 with default MappingProjections between them +# myPath2.add_linear_processing_pathway([myMech4, myMech5, myMech6]) +# +# # analyze graph (assign roles) +# myPath2._analyze_graph() +# +# # Create a system using the old factory method syntax +# sys = system(processes=[myPath, myPath2]) +# +# # connect the two pathways in series +# sys.add_projection(sender=myMech3, +# projection=MappingProjection(sender=myMech3, receiver=myMech4), +# receiver=myMech4) +# # assign input to origin mech +# stimulus = {myMech1: [[1]]} +# +# # schedule = Scheduler(composition=sys) +# output = sys.run( +# inputs=stimulus, +# # scheduler_processing=schedule +# ) +# assert 64 == output[0][0] +# +# # old syntax pathway (process), old syntax system; 2 processes converge +# def test_two_processes_converge_in_system_old_syntax(self): +# # create a PathwayComposition | blank slate for composition +# myPath = PathwayComposition() +# +# # create mechanisms to add to myPath +# myMech1 = TransferMechanism(function=Linear(slope=2.0)) # 1 x 2 = 2 +# myMech2 = TransferMechanism(function=Linear(slope=2.0)) # 2 x 2 = 4 +# myMech3 = TransferMechanism(function=Linear(slope=2.0)) +# +# # add mechanisms to myPath with default MappingProjections between them +# myPath.add_linear_processing_pathway([myMech1, myMech2, myMech3]) +# +# # analyze graph (assign roles) +# myPath._analyze_graph() +# +# # create a PathwayComposition | blank slate for composition +# myPath2 = PathwayComposition() +# +# # create mechanisms to add to myPath2 +# myMech4 = TransferMechanism(function=Linear(slope=2.0)) # 1 x 2 = 2 +# myMech5 = TransferMechanism(function=Linear(slope=2.0)) # 2 x 2 = 4 +# +# # add mechanisms to myPath2 with default MappingProjections between them +# myPath2.add_linear_processing_pathway([myMech4, myMech5, myMech3]) +# +# # analyze graph (assign roles) +# myPath2._analyze_graph() +# +# # Create a system using the old factory method syntax +# sys = system(processes=[myPath, myPath2]) +# +# # assign input to origin mech +# stimulus = {myMech1: [[1]], +# myMech4: [[1]]} +# +# # schedule = Scheduler(composition=sys) +# output = sys.run( +# inputs=stimulus, +# # scheduler_processing=schedule +# ) +# assert 16 == output[0][0] +# + +class TestNestedCompositions: + def test_combine_two_disjunct_trees(self): + # Goal: + + # Mech1 -- + # --> Mech3 ----> Mech4 -- + # Mech2 -- --> Mech6 + # Mech5 -- + + # create first composition ----------------------------------------------- + + # Mech1 -- + # --> Mech3 + # Mech2 -- + + tree1 = Composition() + + myMech1 = TransferMechanism(name="myMech1") + myMech2 = TransferMechanism(name="myMech2") + myMech3 = TransferMechanism(name="myMech3") + myMech4 = TransferMechanism(name="myMech4") + myMech5 = TransferMechanism(name="myMech5") + myMech6 = TransferMechanism(name="myMech6") + + tree1.add_mechanism(myMech1) + tree1.add_mechanism(myMech2) + tree1.add_mechanism(myMech3) + tree1.add_projection(myMech1, MappingProjection(sender=myMech1, receiver=myMech3), myMech3) + tree1.add_projection(myMech2, MappingProjection(sender=myMech2, receiver=myMech3), myMech3) + + # validate first composition --------------------------------------------- + + tree1._analyze_graph() + origins = tree1.get_mechanisms_by_role(MechanismRole.ORIGIN) + assert len(origins) == 2 + assert myMech1 in origins + assert myMech2 in origins + terminals = tree1.get_mechanisms_by_role(MechanismRole.TERMINAL) + assert len(terminals) == 1 + assert myMech3 in terminals + + # create second composition ---------------------------------------------- + + # Mech4 -- + # --> Mech6 + # Mech5 -- + + tree2 = Composition() + tree2.add_mechanism(myMech4) + tree2.add_mechanism(myMech5) + tree2.add_mechanism(myMech6) + tree2.add_projection(myMech4, MappingProjection(sender=myMech4, receiver=myMech6), myMech6) + tree2.add_projection(myMech5, MappingProjection(sender=myMech5, receiver=myMech6), myMech6) + + # validate second composition ---------------------------------------------- + + tree2._analyze_graph() + origins = tree2.get_mechanisms_by_role(MechanismRole.ORIGIN) + assert len(origins) == 2 + assert myMech4 in origins + assert myMech5 in origins + terminals = tree2.get_mechanisms_by_role(MechanismRole.TERMINAL) + assert len(terminals) == 1 + assert myMech6 in terminals + + # combine the compositions ------------------------------------------------- + + tree1.add_pathway(tree2) + tree1._analyze_graph() + + # BEFORE linking via 3 --> 4 projection ------------------------------------ + # Mech1 -- + # --> Mech3 + # Mech2 -- + # Mech4 -- + # --> Mech6 + # Mech5 -- + + origins = tree1.get_mechanisms_by_role(MechanismRole.ORIGIN) + assert len(origins) == 4 + assert myMech1 in origins + assert myMech2 in origins + assert myMech4 in origins + assert myMech5 in origins + terminals = tree1.get_mechanisms_by_role(MechanismRole.TERMINAL) + assert len(terminals) == 2 + assert myMech3 in terminals + assert myMech6 in terminals + + # AFTER linking via 3 --> 4 projection ------------------------------------ + # Mech1 -- + # --> Mech3 ----> Mech4 -- + # Mech2 -- --> Mech6 + # Mech5 -- + + tree1.add_projection(myMech3, MappingProjection(sender=myMech3, receiver=myMech4), myMech4) + tree1._analyze_graph() + + origins = tree1.get_mechanisms_by_role(MechanismRole.ORIGIN) + assert len(origins) == 3 + assert myMech1 in origins + assert myMech2 in origins + assert myMech5 in origins + terminals = tree1.get_mechanisms_by_role(MechanismRole.TERMINAL) + assert len(terminals) == 1 + assert myMech6 in terminals + + def test_combine_two_overlapping_trees(self): + # Goal: + + # Mech1 -- + # --> Mech3 -- + # Mech2 -- --> Mech5 + # Mech4 -- + + # create first composition ----------------------------------------------- + + # Mech1 -- + # --> Mech3 + # Mech2 -- + + tree1 = Composition() + + myMech1 = TransferMechanism(name="myMech1") + myMech2 = TransferMechanism(name="myMech2") + myMech3 = TransferMechanism(name="myMech3") + myMech4 = TransferMechanism(name="myMech4") + myMech5 = TransferMechanism(name="myMech5") + + tree1.add_mechanism(myMech1) + tree1.add_mechanism(myMech2) + tree1.add_mechanism(myMech3) + tree1.add_projection(myMech1, MappingProjection(sender=myMech1, receiver=myMech3), myMech3) + tree1.add_projection(myMech2, MappingProjection(sender=myMech2, receiver=myMech3), myMech3) + + # validate first composition --------------------------------------------- + + tree1._analyze_graph() + origins = tree1.get_mechanisms_by_role(MechanismRole.ORIGIN) + assert len(origins) == 2 + assert myMech1 in origins + assert myMech2 in origins + terminals = tree1.get_mechanisms_by_role(MechanismRole.TERMINAL) + assert len(terminals) == 1 + assert myMech3 in terminals + + # create second composition ---------------------------------------------- + + # Mech3 -- + # --> Mech5 + # Mech4 -- + + tree2 = Composition() + tree2.add_mechanism(myMech3) + tree2.add_mechanism(myMech4) + tree2.add_mechanism(myMech5) + tree2.add_projection(myMech3, MappingProjection(sender=myMech3, receiver=myMech5), myMech5) + tree2.add_projection(myMech4, MappingProjection(sender=myMech4, receiver=myMech5), myMech5) + + # validate second composition ---------------------------------------------- + + tree2._analyze_graph() + origins = tree2.get_mechanisms_by_role(MechanismRole.ORIGIN) + assert len(origins) == 2 + assert myMech3 in origins + assert myMech4 in origins + terminals = tree2.get_mechanisms_by_role(MechanismRole.TERMINAL) + assert len(terminals) == 1 + assert myMech5 in terminals + + # combine the compositions ------------------------------------------------- + + tree1.add_pathway(tree2) + tree1._analyze_graph() + # no need for a projection connecting the two compositions because they share myMech3 + + origins = tree1.get_mechanisms_by_role(MechanismRole.ORIGIN) + assert len(origins) == 3 + assert myMech1 in origins + assert myMech2 in origins + assert myMech4 in origins + terminals = tree1.get_mechanisms_by_role(MechanismRole.TERMINAL) + assert len(terminals) == 1 + assert myMech5 in terminals + + def test_one_pathway_inside_one_system(self): + # create a PathwayComposition | blank slate for composition + myPath = PathwayComposition() + + # create mechanisms to add to myPath + myMech1 = TransferMechanism(function=Linear(slope=2.0)) # 1 x 2 = 2 + myMech2 = TransferMechanism(function=Linear(slope=2.0)) # 2 x 2 = 4 + myMech3 = TransferMechanism(function=Linear(slope=2.0)) # 4 x 2 = 8 + + # add mechanisms to myPath with default MappingProjections between them + myPath.add_linear_processing_pathway([myMech1, myMech2, myMech3]) + + # analyze graph (assign roles) + myPath._analyze_graph() + + # assign input to origin mech + stimulus = {myMech1: [[1]]} + + # execute path (just for comparison) + print("EXECUTING PATH: ") + myPath.run(inputs=stimulus) + + # create a SystemComposition | blank slate for composition + sys = SystemComposition() + + # add a PathwayComposition [myPath] to the SystemComposition [sys] + sys.add_pathway(myPath) + + # execute the SystemComposition + output = sys.run( + inputs=stimulus, + ) + assert 8 == output[0][0] + + def test_two_paths_converge_one_system(self): + + # mech1 ---> mech2 -- + # --> mech3 + # mech4 ---> mech5 -- + + # 1x2=2 ---> 2x2=4 -- + # --> (4+4)x2=16 + # 1x2=2 ---> 2x2=4 -- + + # create a PathwayComposition | blank slate for composition + myPath = PathwayComposition() + + # create mechanisms to add to myPath + myMech1 = TransferMechanism(function=Linear(slope=2.0)) # 1 x 2 = 2 + myMech2 = TransferMechanism(function=Linear(slope=2.0)) # 2 x 2 = 4 + myMech3 = TransferMechanism(function=Linear(slope=2.0)) # 4 x 2 = 8 + + # add mechanisms to myPath with default MappingProjections between them + myPath.add_linear_processing_pathway([myMech1, myMech2, myMech3]) + + # analyze graph (assign roles) + myPath._analyze_graph() + + myPath2 = PathwayComposition() + myMech4 = TransferMechanism(function=Linear(slope=2.0)) # 1 x 2 = 2 + myMech5 = TransferMechanism(function=Linear(slope=2.0)) # 2 x 2 = 4 + myPath.add_linear_processing_pathway([myMech4, myMech5, myMech3]) + myPath._analyze_graph() + + sys = SystemComposition() + sys.add_pathway(myPath) + sys.add_pathway(myPath2) + # assign input to origin mechs + stimulus = {myMech1: [[1]], myMech4: [[1]]} + + # schedule = Scheduler(composition=sys) + output = sys.run( + inputs=stimulus, + # scheduler_processing=schedule + ) + assert 16 == output[0][0] + + def test_two_paths_in_series_one_system(self): + + # [ mech1 --> mech2 --> mech3 ] --> [ mech4 --> mech5 --> mech6 ] + # 1x2=2 --> 2x2=4 --> 4x2=8 --> (8+1)x2=18 --> 18x2=36 --> 36*2=64 + # X + # | + # 1 + # (if mech4 were recognized as an origin mech, and used SOFT_CLAMP, we would expect the final result to be 72) + # create a PathwayComposition | blank slate for composition + myPath = PathwayComposition() + + # create mechanisms to add to myPath + myMech1 = TransferMechanism(function=Linear(slope=2.0)) # 1 x 2 = 2 + myMech2 = TransferMechanism(function=Linear(slope=2.0)) # 2 x 2 = 4 + myMech3 = TransferMechanism(function=Linear(slope=2.0)) # 4 x 2 = 8 + + # add mechanisms to myPath with default MappingProjections between them + myPath.add_linear_processing_pathway([myMech1, myMech2, myMech3]) + + # analyze graph (assign roles) + myPath._analyze_graph() + + myPath2 = PathwayComposition() + myMech4 = TransferMechanism(function=Linear(slope=2.0)) + myMech5 = TransferMechanism(function=Linear(slope=2.0)) + myMech6 = TransferMechanism(function=Linear(slope=2.0)) + myPath.add_linear_processing_pathway([myMech4, myMech5, myMech6]) + myPath._analyze_graph() + + sys = SystemComposition() + sys.add_pathway(myPath) + sys.add_pathway(myPath2) + sys.add_projection(sender=myMech3, projection=MappingProjection(sender=myMech3, + receiver=myMech4), receiver=myMech4) + # assign input to origin mechs + # myMech4 ignores its input from the outside world because it is no longer considered an origin! + stimulus = {myMech1: [[1]]} + sys._analyze_graph() + # schedule = Scheduler(composition=sys) + output = sys.run( + inputs=stimulus, + # scheduler_processing=schedule + ) + assert 64 == output[0][0] + + def test_two_paths_converge_one_system_scheduling_matters(self): + + # mech1 ---> mech2 -- + # --> mech3 + # mech4 ---> mech5 -- + + # 1x2=2 ---> 2x2=4 -- + # --> (4+4)x2=16 + # 1x2=2 ---> 2x2=4 -- + + # create a PathwayComposition | blank slate for composition + myPath = PathwayComposition() + + # create mechanisms to add to myPath + myMech1 = IntegratorMechanism(function=Linear(slope=2.0)) # 1 x 2 = 2 + myMech2 = TransferMechanism(function=Linear(slope=2.0)) # 2 x 2 = 4 + myMech3 = TransferMechanism(function=Linear(slope=2.0)) # 4 x 2 = 8 + + # add mechanisms to myPath with default MappingProjections between them + myPath.add_linear_processing_pathway([myMech1, myMech2, myMech3]) + + # analyze graph (assign roles) + myPath._analyze_graph() + myPathScheduler = Scheduler(composition=myPath) + myPathScheduler.add_condition(myMech2, AfterNCalls(myMech1, 2)) + + myPath.run(inputs={myMech1: [[1]]}, scheduler_processing=myPathScheduler) + myPath.run(inputs={myMech1: [[1]]}, scheduler_processing=myPathScheduler) + myPath2 = PathwayComposition() + myMech4 = TransferMechanism(function=Linear(slope=2.0)) # 1 x 2 = 2 + myMech5 = TransferMechanism(function=Linear(slope=2.0)) # 2 x 2 = 4 + myPath.add_linear_processing_pathway([myMech4, myMech5, myMech3]) + myPath._analyze_graph() + + sys = SystemComposition() + sys.add_pathway(myPath) + sys.add_pathway(myPath2) + # assign input to origin mechs + stimulus = {myMech1: [[1]], myMech4: [[1]]} + + # schedule = Scheduler(composition=sys) + output = sys.run( + inputs=stimulus, + # scheduler_processing=schedule + ) + assert 16 == output[0][0] + + +class TestCompositionInterface: + + def test_one_input_state_per_origin_two_origins(self): + + # 5 -#1-> A --^ --> C -- + # ==> E + # 5 ----> B ------> D -- + + # 5 x 1 = 5 ----> 5 x 5 = 25 -- + # 25 + 25 = 50 ==> 50 * 5 = 250 + # 5 * 1 = 5 ----> 5 x 5 = 25 -- + + comp = Composition() + A = TransferMechanism(name="A", + function=Linear(slope=1.0) + ) + + B = TransferMechanism(name="B", function=Linear(slope=1.0)) + C = TransferMechanism(name="C", function=Linear(slope=5.0)) + D = TransferMechanism(name="D", function=Linear(slope=5.0)) + E = TransferMechanism(name="E", function=Linear(slope=5.0)) + comp.add_mechanism(A) + comp.add_mechanism(B) + comp.add_mechanism(C) + comp.add_mechanism(D) + comp.add_projection(A, MappingProjection(sender=A, receiver=C), C) + comp.add_projection(B, MappingProjection(sender=B, receiver=D), D) + comp.add_mechanism(E) + comp.add_projection(C, MappingProjection(sender=C, receiver=E), E) + comp.add_projection(D, MappingProjection(sender=D, receiver=E), E) + comp._analyze_graph() + inputs_dict = { + A: [[5.]], + # two trials of one input state each + # TRIAL 1 TRIAL 2 + # A : [ [ [0,0] ] , [ [0, 0] ] ] + + # two trials of multiple input states each + # TRIAL 1 TRIAL 2 + + # TRIAL1 IS1 IS2 IS3 TRIAL2 IS1 IS2 + # A : [ [ [0,0], [0,0,0], [0,0,0,0] ] , [ [0, 0], [0] ] ] + B: [[5.]] + } + sched = Scheduler(composition=comp) + output = comp.run( + inputs=inputs_dict, + scheduler_processing=sched + ) + + assert 250 == output[0][0] + + def test_updating_input_values_for_second_execution(self): + # 5 -#1-> A --^ --> C -- + # ==> E + # 5 ----> B ------> D -- + + # 5 x 1 = 5 ----> 5 x 5 = 25 -- + # 25 + 25 = 50 ==> 50 * 5 = 250 + # 5 * 1 = 5 ----> 5 x 5 = 25 -- + + comp = Composition() + A = TransferMechanism(name="A", + function=Linear(slope=1.0) + ) + + B = TransferMechanism(name="B", function=Linear(slope=1.0)) + C = TransferMechanism(name="C", function=Linear(slope=5.0)) + D = TransferMechanism(name="D", function=Linear(slope=5.0)) + E = TransferMechanism(name="E", function=Linear(slope=5.0)) + comp.add_mechanism(A) + comp.add_mechanism(B) + comp.add_mechanism(C) + comp.add_mechanism(D) + comp.add_projection(A, MappingProjection(sender=A, receiver=C), C) + comp.add_projection(B, MappingProjection(sender=B, receiver=D), D) + comp.add_mechanism(E) + comp.add_projection(C, MappingProjection(sender=C, receiver=E), E) + comp.add_projection(D, MappingProjection(sender=D, receiver=E), E) + comp._analyze_graph() + inputs_dict = { + A: [[5.]], + B: [[5.]] + } + sched = Scheduler(composition=comp) + + output = comp.run( + inputs=inputs_dict, + scheduler_processing=sched + ) + + inputs_dict2 = { + A: [[2.]], + B: [[5.]], + # two trials of one input state each + # TRIAL 1 TRIAL 2 + # A : [ [ [0,0] ] , [ [0, 0] ] ] + + # two trials of multiple input states each + # TRIAL 1 TRIAL 2 + + # TRIAL1 IS1 IS2 IS3 TRIAL2 IS1 IS2 + # A : [ [ [0,0], [0,0,0], [0,0,0,0] ] , [ [0, 0], [0] ] ] + B: [[5.]] + } + sched = Scheduler(composition=comp) + + output = comp.run( + inputs=inputs_dict, + scheduler_processing=sched + ) + + # add a new branch to the composition + F = TransferMechanism(name="F", function=Linear(slope=2.0)) + G = TransferMechanism(name="G", function=Linear(slope=2.0)) + comp.add_mechanism(F) + comp.add_mechanism(G) + comp.add_projection(sender=F, projection=MappingProjection(sender=F, receiver=G), receiver=G) + comp.add_projection(sender=G, projection=MappingProjection(sender=G, receiver=E), receiver=E) + + # reassign roles + comp._analyze_graph() + + # execute the updated composition + inputs_dict2 = { + A: [[1.]], + B: [[2.]], + F: [[3.]] + } + + sched = Scheduler(composition=comp) + output2 = comp.run( + inputs=inputs_dict2, + scheduler_processing=sched + ) + + assert 250 == output[0][0] + assert 135 == output2[0][0] + + def test_changing_origin_for_second_execution(self): + + comp = Composition() + A = TransferMechanism(name="A", + function=Linear(slope=1.0) + ) + + B = TransferMechanism(name="B", function=Linear(slope=1.0)) + C = TransferMechanism(name="C", function=Linear(slope=5.0)) + comp.add_mechanism(A) + comp.add_mechanism(B) + comp.add_mechanism(C) + comp.add_projection(A, MappingProjection(sender=A, receiver=B), B) + comp.add_projection(B, MappingProjection(sender=B, receiver=C), C) + comp._analyze_graph() + inputs_dict = {A: [[5.]]} + sched = Scheduler(composition=comp) + + output = comp.run( + inputs=inputs_dict, + scheduler_processing=sched + ) + + assert 25 == output[0][0] + + # add a new origin to the composition + F = TransferMechanism(name="F", function=Linear(slope=2.0)) + comp.add_mechanism(F) + comp.add_projection(sender=F, projection=MappingProjection(sender=F, receiver=A), receiver=A) + + # reassign roles + comp._analyze_graph() + + # execute the updated composition + inputs_dict2 = {F: [[3.]]} + + sched = Scheduler(composition=comp) + output2 = comp.run( + inputs=inputs_dict2, + scheduler_processing=sched + ) + + projections_to_A = [] + expected_projections_to_A = [("(OutputState RESULT)", "(InputState Default_InputState)")] + for input_state in A.input_states: + for p_a in input_state.path_afferents: + projections_to_A.append((str(p_a.sender), str(p_a.receiver))) + + assert projections_to_A == expected_projections_to_A + assert 30 == output2[0][0] + + def test_two_input_states_new_inputs_second_trial(self): + + comp = Composition() + my_fun = Linear( + # default_variable=[[0], [0]], + # ^ setting default_variable on the function actually does not matter -- does the mechanism update it? + slope=1.0) + A = TransferMechanism(name="A", + default_variable=[[0], [0]], + input_states=[{NAME: "Input State 1", + }, + {NAME: "Input State 2", + }], + function=my_fun + ) + comp.add_mechanism(A) + comp._analyze_graph() + inputs_dict = {A: {A.input_states[0]: [[5.]], + A.input_states[1]: [[5.]]} + } + sched = Scheduler(composition=comp) + output = comp.run( + inputs=inputs_dict, + scheduler_processing=sched + ) + + inputs_dict2 = {A: {A.input_states[0]: [[2.]], + A.input_states[1]: [[4.]]} + } + output2 = comp.run( + inputs=inputs_dict2, + scheduler_processing=sched + ) + assert 2. == A.input_states[0].value + assert 4. == A.input_states[1].value + assert "Input State 1" == A.input_states[0].name + assert "Input State 2" == A.input_states[1].name + assert 2. == A.variable[0] + assert 4. == A.variable[1] + assert 5 == output[0][0] + assert 2 == output2[0][0] + + def test_two_input_states_new_origin_second_trial(self): + + # A --> B --> C + + comp = Composition() + my_fun = Linear( + # default_variable=[[0], [0]], + # ^ setting default_variable on the function actually does not matter -- does the mechanism update it? + slope=1.0) + A = TransferMechanism( + name="A", + default_variable=[[0], [0]], + input_states=[ + {NAME: "Input State 1", }, + {NAME: "Input State 2", } + ], + function=my_fun + ) + + B = TransferMechanism(name="B", function=Linear(slope=2.0)) + C = TransferMechanism(name="C", function=Linear(slope=5.0)) + comp.add_mechanism(A) + comp.add_mechanism(B) + comp.add_mechanism(C) + comp.add_projection(A, MappingProjection(sender=A, receiver=B), B) + comp.add_projection(B, MappingProjection(sender=B, receiver=C), C) + comp._analyze_graph() + inputs_dict = {A: {A.input_states[0]: [[5.]], + A.input_states[1]: [[5.]]} + } + sched = Scheduler(composition=comp) + output = comp.run( + inputs=inputs_dict, + scheduler_processing=sched + ) + assert 5. == A.input_states[0].value + assert 5. == A.input_states[1].value + assert "Input State 1" == A.input_states[0].name + assert "Input State 2" == A.input_states[1].name + assert 5. == A.variable[0] + assert 5. == A.variable[1] + assert 50 == output[0][0] + + # A --> B --> C + # ^ + # D __| + + D = TransferMechanism( + name="D", + default_variable=[[0], [0]], + input_states=[ + {NAME: "Input State 1", }, + {NAME: "Input State 2", } + ], + function=my_fun + ) + comp.add_mechanism(D) + comp.add_projection(D, MappingProjection(sender=D, receiver=B), B) + # Need to analyze graph again (identify D as an origin so that we can assign input) AND create the scheduler + # again (sched, even though it is tied to comp, will not update according to changes in comp) + comp._analyze_graph() + sched = Scheduler(composition=comp) + inputs_dict2 = {A: {A.input_states[0]: [[2.]], + A.input_states[1]: [[4.]]}, + D: {D.input_states[0]: [[2.]], + D.input_states[1]: [[4.]]} } + output2 = comp.run( + inputs=inputs_dict2, + scheduler_processing=sched + ) + assert 2. == A.input_states[0].value + assert 4. == A.input_states[1].value + assert "Input State 1" == A.input_states[0].name + assert "Input State 2" == A.input_states[1].name + assert 2. == A.variable[0] + assert 4. == A.variable[1] + assert 2. == D.input_states[0].value + assert 4. == D.input_states[1].value + assert "Input State 1" == D.input_states[0].name + assert "Input State 2" == D.input_states[1].name + assert 2. == D.variable[0] + assert 4. == D.variable[1] + assert 40 == output2[0][0] + + +class TestInputStateSpecifications: + + def test_two_input_states_created_with_dictionaries(self): + + comp = Composition() + A = ProcessingMechanism( + name="A", + default_variable=[[0], [0]], + # input_states=[ + # {NAME: "Input State 1", }, + # {NAME: "Input State 2", } + # ], + function=Linear(slope=1.0) + # specifying default_variable on the function doesn't seem to matter? + ) + + comp.add_mechanism(A) + + comp._analyze_graph() + + inputs_dict = {A: [[2.], [4.]]} + sched = Scheduler(composition=comp) + output = comp.run( + inputs=inputs_dict, + scheduler_processing=sched + ) + print(A.input_states) + print(A.input_states[0].value) + print(A.input_states[1].value) + assert np.allclose(A.input_states[0].value, [2.0]) + assert np.allclose(A.input_states[1].value, [4.0]) + assert np.allclose(A.variable, [[2.0], [4.0]]) + + def test_two_input_states_created_first_with_deferred_init(self): + comp = Composition() + + # create mechanism A + I1 = InputState( + name="Input State 1", + reference_value=[0] + ) + I2 = InputState( + name="Input State 2", + reference_value=[0] + ) + A = TransferMechanism( + name="A", + default_variable=[[0], [0]], + input_states=[I1, I2], + function=Linear(slope=1.0) + ) + + # add mech A to composition + comp.add_mechanism(A) + + # get comp ready to run (identify roles, create sched, assign inputs) + comp._analyze_graph() + inputs_dict = { A: { A.input_states[0]: [[2.]], + A.input_states[1]: [[4.]]} + } + sched = Scheduler(composition=comp) + output = comp.run( + inputs=inputs_dict, + scheduler_processing=sched + ) + + assert 2. == A.input_states[0].value + assert 4. == A.input_states[1].value + assert "Input State 1" == A.input_states[0].name + assert "Input State 2" == A.input_states[1].name + assert 2. == A.variable[0] + assert 4. == A.variable[1] + + assert 2 == output[0][0] + + def test_two_input_states_created_with_keyword(self): + comp = Composition() + + # create mechanism A + + A = TransferMechanism( + name="A", + default_variable=[[0], [0]], + input_states=[INPUT_STATE, INPUT_STATE], + function=Linear(slope=1.0) + ) + + # add mech A to composition + comp.add_mechanism(A) + + # get comp ready to run (identify roles, create sched, assign inputs) + comp._analyze_graph() + inputs_dict = { A: { A.input_states[0]: [[2.]], + A.input_states[1]: [[4.]]} + } + sched = Scheduler(composition=comp) + output = comp.run( + inputs=inputs_dict, + scheduler_processing=sched + ) + + assert 2. == A.input_states[0].value + assert 4. == A.input_states[1].value + assert "InputState" == A.input_states[0].name + assert "InputState-1" == A.input_states[1].name + assert 2. == A.variable[0] + assert 4. == A.variable[1] + + assert 2 == output[0][0] + + def test_two_input_states_created_with_strings(self): + comp = Composition() + + # create mechanism A + + A = TransferMechanism( + name="A", + default_variable=[[0], [0]], + input_states=["Input State 1", "Input State 2"], + function=Linear(slope=1.0) + ) + + # add mech A to composition + comp.add_mechanism(A) + + # get comp ready to run (identify roles, create sched, assign inputs) + comp._analyze_graph() + inputs_dict = { A: { A.input_states[0]: [[2.]], + A.input_states[1]: [[4.]]} + } + sched = Scheduler(composition=comp) + output = comp.run( + inputs=inputs_dict, + scheduler_processing=sched + ) + + assert 2. == A.input_states[0].value + assert 4. == A.input_states[1].value + assert "Input State 1" == A.input_states[0].name + assert "Input State 2" == A.input_states[1].name + assert 2. == A.variable[0] + assert 4. == A.variable[1] + + assert 2 == output[0][0] + + def test_two_input_states_created_with_values(self): + comp = Composition() + + # create mechanism A + + A = TransferMechanism( + name="A", + default_variable=[[0], [0]], + input_states=[[0.], [0.]], + function=Linear(slope=1.0) + ) + + # add mech A to composition + comp.add_mechanism(A) + + # get comp ready to run (identify roles, create sched, assign inputs) + comp._analyze_graph() + inputs_dict = { A: { A.input_states[0]: [[2.]], + A.input_states[1]: [[4.]]} + } + sched = Scheduler(composition=comp) + output = comp.run( + inputs=inputs_dict, + scheduler_processing=sched + ) + + assert 2. == A.input_states[0].value + assert 4. == A.input_states[1].value + assert "Default_InputState-1" == A.input_states[0].name + assert "Default_InputState-2" == A.input_states[1].name + assert 2. == A.variable[0] + assert 4. == A.variable[1] + + assert 2 == output[0][0] + +class TestInputSpecifications: + + def test_2_mechanisms_default_input_1(self): + comp = Composition() + A = IntegratorMechanism(default_variable=1.0, function=Linear(slope=5.0)) + B = TransferMechanism(function=Linear(slope=5.0)) + comp.add_mechanism(A) + comp.add_mechanism(B) + comp.add_projection(A, MappingProjection(sender=A, receiver=B), B) + comp._analyze_graph() + sched = Scheduler(composition=comp) + output = comp.run( + scheduler_processing=sched + ) + assert 25 == output[0][0] + + def test_3_origins(self): + comp = Composition() + I1 = InputState( + name="Input State 1", + reference_value=[0] + ) + I2 = InputState( + name="Input State 2", + reference_value=[0] + ) + A = TransferMechanism( + name="A", + default_variable=[[0], [0]], + input_states=[I1, I2], + function=Linear(slope=1.0) + ) + B = TransferMechanism( + name="B", + default_variable=[0,0], + function=Linear(slope=1.0)) + C = TransferMechanism( + name="C", + default_variable=[0, 0, 0], + function=Linear(slope=1.0)) + D = TransferMechanism( + name="D", + default_variable=[0], + function=Linear(slope=1.0)) + comp.add_mechanism(A) + comp.add_mechanism(B) + comp.add_mechanism(C) + comp.add_mechanism(D) + comp.add_projection(A, MappingProjection(sender=A, receiver=D), D) + comp.add_projection(B, MappingProjection(sender=B, receiver=D), D) + comp.add_projection(C, MappingProjection(sender=C, receiver=D), D) + comp._analyze_graph() + inputs = {A: {I1: [[0],[1],[2]], + I2: [[0],[1],[2]]}, + B: [[0,0], [1,1], [2,2]], + C: [[0,0,0], [1,1,1], [2,2,2]] + + } + sched = Scheduler(composition=comp) + output = comp.run( + inputs=inputs, + scheduler_processing=sched + ) + assert 12 == output[0][0] + + def test_2_mechanisms_input_5(self): + comp = Composition() + A = IntegratorMechanism(default_variable=1.0, function=Linear(slope=5.0)) + B = TransferMechanism(function=Linear(slope=5.0)) + comp.add_mechanism(A) + comp.add_mechanism(B) + comp.add_projection(A, MappingProjection(sender=A, receiver=B), B) + comp._analyze_graph() + inputs_dict = {A: [[5]]} + sched = Scheduler(composition=comp) + output = comp.run( + inputs=inputs_dict, + scheduler_processing=sched + ) + assert 125 == output[0][0] + + def test_run_2_mechanisms_reuse_input(self): + comp = Composition() + A = IntegratorMechanism(default_variable=1.0, function=Linear(slope=5.0)) + B = TransferMechanism(function=Linear(slope=5.0)) + comp.add_mechanism(A) + comp.add_mechanism(B) + comp.add_projection(A, MappingProjection(sender=A, receiver=B), B) + comp._analyze_graph() + inputs_dict = {A: [[5]]} + sched = Scheduler(composition=comp) + output = comp.run( + inputs=inputs_dict, + scheduler_processing=sched, + num_trials=5 + ) + assert 125 == output[0][0] + From 07c5ca38ae618293f5f9c7f08c287d627ffd0772 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Wed, 25 Apr 2018 14:24:04 -0400 Subject: [PATCH 024/200] modifying composition's execute method to properly handle new input specification (from devel) --- psyneulink/compositions/composition.py | 10 +++------- tests/composition/test_composition.py | 10 +++------- 2 files changed, 6 insertions(+), 14 deletions(-) diff --git a/psyneulink/compositions/composition.py b/psyneulink/compositions/composition.py index ed2988b5bbb..1fe52ae2bfc 100644 --- a/psyneulink/compositions/composition.py +++ b/psyneulink/compositions/composition.py @@ -832,12 +832,9 @@ def _create_CIM_output_states(self): def _assign_values_to_CIM_output_states(self, inputs): current_mechanisms = set() for key in inputs: - if isinstance(key, Mechanism): - self.CIM_output_states[key.input_state].value = inputs[key] - current_mechanisms.add(key) - else: - self.CIM_output_states[key].value = inputs[key] - current_mechanisms.add(key.owner) + for i in range(len(inputs[key])): + self.CIM_output_states[key.input_states[i]].value = inputs[key][i] + current_mechanisms.add(key) origins = self.get_mechanisms_by_role(MechanismRole.ORIGIN) @@ -1119,7 +1116,6 @@ def run( scheduler_processing.update_termination_conditions(termination_processing) scheduler_learning.update_termination_conditions(termination_learning) - # ------------------------------------ FROM DEVEL START ------------------------------------ origin_mechanisms = self.get_mechanisms_by_role(MechanismRole.ORIGIN) # if there is only one origin mechanism, allow inputs to be specified in a list diff --git a/tests/composition/test_composition.py b/tests/composition/test_composition.py index de49d3ed15b..ed00bb7f674 100644 --- a/tests/composition/test_composition.py +++ b/tests/composition/test_composition.py @@ -2908,13 +2908,9 @@ def test_two_input_states_created_with_dictionaries(self): inputs_dict = {A: [[2.], [4.]]} sched = Scheduler(composition=comp) - output = comp.run( - inputs=inputs_dict, - scheduler_processing=sched - ) - print(A.input_states) - print(A.input_states[0].value) - print(A.input_states[1].value) + comp.run(inputs=inputs_dict, + scheduler_processing=sched) + assert np.allclose(A.input_states[0].value, [2.0]) assert np.allclose(A.input_states[1].value, [4.0]) assert np.allclose(A.variable, [[2.0], [4.0]]) From 946201af4241bd0177ccbc053b8cf015cb216697 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Wed, 25 Apr 2018 14:33:21 -0400 Subject: [PATCH 025/200] fixing misspecification of inputs in input state pytests --- tests/composition/test_composition.py | 65 +++++++++------------------ 1 file changed, 22 insertions(+), 43 deletions(-) diff --git a/tests/composition/test_composition.py b/tests/composition/test_composition.py index ed00bb7f674..96fd5660225 100644 --- a/tests/composition/test_composition.py +++ b/tests/composition/test_composition.py @@ -18,7 +18,7 @@ from psyneulink.scheduling.condition import EveryNCalls from psyneulink.scheduling.scheduler import Scheduler from psyneulink.scheduling.time import TimeScale -from psyneulink.globals.keywords import NAME +from psyneulink.globals.keywords import NAME, INPUT_STATE logger = logging.getLogger(__name__) @@ -2939,23 +2939,17 @@ def test_two_input_states_created_first_with_deferred_init(self): # get comp ready to run (identify roles, create sched, assign inputs) comp._analyze_graph() - inputs_dict = { A: { A.input_states[0]: [[2.]], - A.input_states[1]: [[4.]]} - } + inputs_dict = { A: [[2.],[4.]]} + sched = Scheduler(composition=comp) output = comp.run( inputs=inputs_dict, scheduler_processing=sched ) - assert 2. == A.input_states[0].value - assert 4. == A.input_states[1].value - assert "Input State 1" == A.input_states[0].name - assert "Input State 2" == A.input_states[1].name - assert 2. == A.variable[0] - assert 4. == A.variable[1] - - assert 2 == output[0][0] + assert np.allclose(A.input_states[0].value, [2.0]) + assert np.allclose(A.input_states[1].value, [4.0]) + assert np.allclose(A.variable, [[2.0], [4.0]]) def test_two_input_states_created_with_keyword(self): comp = Composition() @@ -2974,21 +2968,17 @@ def test_two_input_states_created_with_keyword(self): # get comp ready to run (identify roles, create sched, assign inputs) comp._analyze_graph() - inputs_dict = { A: { A.input_states[0]: [[2.]], - A.input_states[1]: [[4.]]} - } + inputs_dict = {A: [[2.], [4.]]} + sched = Scheduler(composition=comp) output = comp.run( inputs=inputs_dict, scheduler_processing=sched ) - assert 2. == A.input_states[0].value - assert 4. == A.input_states[1].value - assert "InputState" == A.input_states[0].name - assert "InputState-1" == A.input_states[1].name - assert 2. == A.variable[0] - assert 4. == A.variable[1] + assert np.allclose(A.input_states[0].value, [2.0]) + assert np.allclose(A.input_states[1].value, [4.0]) + assert np.allclose(A.variable, [[2.0], [4.0]]) assert 2 == output[0][0] @@ -3009,23 +2999,18 @@ def test_two_input_states_created_with_strings(self): # get comp ready to run (identify roles, create sched, assign inputs) comp._analyze_graph() - inputs_dict = { A: { A.input_states[0]: [[2.]], - A.input_states[1]: [[4.]]} - } + + inputs_dict = {A: [[2.], [4.]]} + sched = Scheduler(composition=comp) output = comp.run( inputs=inputs_dict, scheduler_processing=sched ) - assert 2. == A.input_states[0].value - assert 4. == A.input_states[1].value - assert "Input State 1" == A.input_states[0].name - assert "Input State 2" == A.input_states[1].name - assert 2. == A.variable[0] - assert 4. == A.variable[1] - - assert 2 == output[0][0] + assert np.allclose(A.input_states[0].value, [2.0]) + assert np.allclose(A.input_states[1].value, [4.0]) + assert np.allclose(A.variable, [[2.0], [4.0]]) def test_two_input_states_created_with_values(self): comp = Composition() @@ -3044,23 +3029,17 @@ def test_two_input_states_created_with_values(self): # get comp ready to run (identify roles, create sched, assign inputs) comp._analyze_graph() - inputs_dict = { A: { A.input_states[0]: [[2.]], - A.input_states[1]: [[4.]]} - } + inputs_dict = {A: [[2.], [4.]]} + sched = Scheduler(composition=comp) output = comp.run( inputs=inputs_dict, scheduler_processing=sched ) - assert 2. == A.input_states[0].value - assert 4. == A.input_states[1].value - assert "Default_InputState-1" == A.input_states[0].name - assert "Default_InputState-2" == A.input_states[1].name - assert 2. == A.variable[0] - assert 4. == A.variable[1] - - assert 2 == output[0][0] + assert np.allclose(A.input_states[0].value, [2.0]) + assert np.allclose(A.input_states[1].value, [4.0]) + assert np.allclose(A.variable, [[2.0], [4.0]]) class TestInputSpecifications: From 7f316bdfffb547bb09970e5edb456973a7f25fe3 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Wed, 25 Apr 2018 16:06:44 -0400 Subject: [PATCH 026/200] bugs in how execute params were passed from PathwayComposition to super --- psyneulink/compositions/pathwaycomposition.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/psyneulink/compositions/pathwaycomposition.py b/psyneulink/compositions/pathwaycomposition.py index d579fac86a1..80afcf16f6e 100644 --- a/psyneulink/compositions/pathwaycomposition.py +++ b/psyneulink/compositions/pathwaycomposition.py @@ -82,11 +82,13 @@ def execute( inputs, scheduler_processing=None, scheduler_learning=None, - execution_id=None, + termination_processing=None, + termination_learning=None, call_before_time_step=None, call_before_pass=None, call_after_time_step=None, call_after_pass=None, + execution_id=None, clamp_input=SOFT_CLAMP, targets=None ): @@ -98,11 +100,14 @@ def execute( inputs, scheduler_processing, scheduler_learning, - execution_id, - call_after_time_step, + termination_processing, + termination_learning, + call_before_time_step, call_before_pass, call_after_time_step, call_after_pass, + execution_id, clamp_input, + targets ) return output From cdaf92dd36b6a1a62dc2c575e4b2f45c5b192a27 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Wed, 25 Apr 2018 17:15:52 -0400 Subject: [PATCH 027/200] cleaning up composition pytests: correcting input specifications and assert statements, making all mechanism names more specific, commenting out tests that should not be included yet --- psyneulink/compositions/composition.py | 28 +- tests/composition/test_composition.py | 725 +++++++++++-------------- 2 files changed, 330 insertions(+), 423 deletions(-) diff --git a/psyneulink/compositions/composition.py b/psyneulink/compositions/composition.py index 1fe52ae2bfc..eb0f043bf1d 100644 --- a/psyneulink/compositions/composition.py +++ b/psyneulink/compositions/composition.py @@ -832,6 +832,8 @@ def _create_CIM_output_states(self): def _assign_values_to_CIM_output_states(self, inputs): current_mechanisms = set() for key in inputs: + if isinstance(inputs[key], (float, int)): + inputs[key] = np.atleast_2d(inputs[key]) for i in range(len(inputs[key])): self.CIM_output_states[key.input_states[i]].value = inputs[key][i] current_mechanisms.add(key) @@ -1126,7 +1128,6 @@ def run( raise CompositionError("Inputs to {} must be specified in a dictionary with a key for each of its {} origin " "mechanisms.".format(self.name, len(origin_mechanisms))) elif not isinstance(inputs, dict): - print(inputs) if len(origin_mechanisms) == 1: raise CompositionError( "Inputs to {} must be specified in a list or in a dictionary with the origin mechanism({}) " @@ -1169,22 +1170,19 @@ def run( stimulus_index = trial_num % num_inputs_sets for mech in inputs: execution_stimuli[mech] = inputs[mech][stimulus_index] - # execute processing # pass along the stimuli for this trial - trial_output = self.execute( - execution_stimuli, - scheduler_processing, - scheduler_learning, - termination_processing, - termination_learning, - call_before_time_step, - call_before_pass, - call_after_time_step, - call_after_pass, - execution_id, - clamp_input, - ) + trial_output = self.execute(inputs=execution_stimuli, + scheduler_processing=scheduler_processing, + scheduler_learning=scheduler_learning, + termination_processing=termination_processing, + termination_learning=termination_learning, + call_before_time_step=call_before_time_step, + call_before_pass=call_before_pass, + call_after_time_step=call_after_time_step, + call_after_pass=call_after_pass, + execution_id=execution_id, + clamp_input=clamp_input) # --------------------------------------------------------------------------------- # store the result of this execute in case it will be the final result diff --git a/tests/composition/test_composition.py b/tests/composition/test_composition.py index 96fd5660225..8a2a595f957 100644 --- a/tests/composition/test_composition.py +++ b/tests/composition/test_composition.py @@ -17,8 +17,9 @@ from psyneulink.compositions.systemcomposition import SystemComposition from psyneulink.scheduling.condition import EveryNCalls from psyneulink.scheduling.scheduler import Scheduler +from psyneulink.scheduling.condition import EveryNPasses, AfterNCalls from psyneulink.scheduling.time import TimeScale -from psyneulink.globals.keywords import NAME, INPUT_STATE +from psyneulink.globals.keywords import NAME, INPUT_STATE, HARD_CLAMP, SOFT_CLAMP, NO_CLAMP, PULSE_CLAMP logger = logging.getLogger(__name__) @@ -28,6 +29,7 @@ # Unit tests for each function of the Composition class ####################### # Unit tests for Composition.Composition( + class TestConstructor: def test_no_args(self): @@ -52,6 +54,7 @@ def test_timing_no_args(self, count): print() logger.info('completed {0} creation{2} of Composition() in {1:.8f}s'.format(count, t, 's' if count != 1 else '')) + class TestAddMechanism: def test_add_once(self): @@ -111,6 +114,7 @@ def test_timing_stress(self, count): logger.info('completed {0} addition{2} of a Mechanism to a Composition in {1:.8f}s'. format(count, t, 's' if count != 1 else '')) + class TestAddProjection: def test_add_once(self): @@ -267,6 +271,7 @@ def test_two_origins_pointing_to_recursive_pair(self): assert B in comp.get_mechanisms_by_role(MechanismRole.CYCLE) assert C in comp.get_mechanisms_by_role(MechanismRole.RECURRENT_INIT) + class TestValidateFeedDict: def test_empty_feed_dicts(self): @@ -499,6 +504,7 @@ def test_multiple_time_steps_2(self): comp._validate_feed_dict(feed_dict_origin, comp.get_mechanisms_by_role(MechanismRole.ORIGIN), "origin") comp._validate_feed_dict(feed_dict_terminal, comp.get_mechanisms_by_role(MechanismRole.TERMINAL), "terminal") + class TestGetMechanismsByRole: def test_multiple_roles(self): @@ -531,6 +537,7 @@ def test_nonexistent_role(self): with pytest.raises(CompositionError): comp.get_mechanisms_by_role(None) + class TestGraph: class TestProcessingGraph: @@ -738,6 +745,7 @@ def test_cycle_x_multiple_incoming(self): comp.graph_processing.comp_to_vertex[B], ]) + class TestRun: # def test_run_2_mechanisms_default_input_1(self): @@ -1084,6 +1092,7 @@ def test_LPP_two_origins_one_terminal(self): ) assert 250 == output[0][0] + class TestCallBeforeAfterTimescale: def test_call_before_record_timescale(self): @@ -1215,7 +1224,7 @@ def record_values(d, time_scale, *mechs): comp = [] for x in after[ts][mech]: try: - comp.append(x[0][0]) + comp.append(x[0]) except TypeError: comp.append(x) np.testing.assert_allclose(comp, after_expected[ts][mech], err_msg='Failed on after[{0}][{1}]'.format(ts, mech)) @@ -1322,7 +1331,7 @@ def record_values(d, time_scale, *mechs): comp = [] for x in after[ts][mech]: try: - comp.append(x[0][0]) + comp.append(x[0]) except TypeError: comp.append(x) np.testing.assert_allclose(comp, after_expected[ts][mech], err_msg='Failed on after[{0}][{1}]'.format(ts, mech)) @@ -1427,325 +1436,237 @@ def record_values(d, time_scale, *mechs): # # np.testing.assert_allclose(expected_Output_Layer_output, Output_Layer.output_values) +# Waiting to reintroduce ClampInput tests until we decide how this feature interacts with input specification -class TestClampInput: - - def test_run_5_mechanisms_2_origins_1_terminal_hard_clamp(self): - # HARD_CLAMP - - # recurrent projection ignored on the second execution of A - # __ - # | | - # 5 -#2-> x | - # 5 -#1-> A -^--> C -- - # ==> E - # 5 ----> B ----> D -- - - # 5 x 1 = 5 ----> 5 x 5 = 25 -- - # 25 + 25 = 50 ==> 50 * 5 = 250 - # 5 * 1 = 5 ----> 5 x 5 = 25 -- - - comp = Composition() - A = RecurrentTransferMechanism(name="A", function=Linear(slope=1.0)) - B = TransferMechanism(name="B", function=Linear(slope=1.0)) - C = TransferMechanism(name="C", function=Linear(slope=5.0)) - D = TransferMechanism(name="D", function=Linear(slope=5.0)) - E = TransferMechanism(name="E", function=Linear(slope=5.0)) - comp.add_mechanism(A) - comp.add_mechanism(B) - comp.add_mechanism(C) - comp.add_mechanism(D) - comp.add_projection(A, MappingProjection(sender=A, receiver=C), C) - comp.add_projection(B, MappingProjection(sender=B, receiver=D), D) - comp.add_mechanism(E) - comp.add_projection(C, MappingProjection(sender=C, receiver=E), E) - comp.add_projection(D, MappingProjection(sender=D, receiver=E), E) - comp._analyze_graph() - inputs_dict = { - A: [[5]], - B: [[5]] - } - sched = Scheduler(composition=comp) - sched.add_condition(A, EveryNPasses(1)) - sched.add_condition(B, EveryNCalls(A, 2)) - sched.add_condition(C, AfterNCalls(A, 2)) - sched.add_condition(D, AfterNCalls(A, 2)) - sched.add_condition(E, AfterNCalls(C, 1)) - sched.add_condition(E, AfterNCalls(D, 1)) - output = comp.run( - inputs=inputs_dict, - scheduler_processing=sched, - clamp_input=HARD_CLAMP - ) - assert 250 == output[0][0] - - def test_run_5_mechanisms_2_origins_1_terminal_soft_clamp(self): - # recurrent projection combines with input on the second execution of A - # _r_ - # | | - # 5 -#2-> V | - # 5 -#1-> A --^ --> C -- - # ==> E - # 5 ----> B ------> D -- - - # 5 x 1 = 5 ----> 5 x 5 = 25 -- - # 25 + 25 = 50 ==> 50 * 5 = 250 - # 5 * 1 = 5 ----> 5 x 5 = 25 -- - - comp = Composition() - A = RecurrentTransferMechanism(name="A", function=Linear(slope=1.0)) - B = TransferMechanism(name="B", function=Linear(slope=1.0)) - C = TransferMechanism(name="C", function=Linear(slope=5.0)) - D = TransferMechanism(name="D", function=Linear(slope=5.0)) - E = TransferMechanism(name="E", function=Linear(slope=5.0)) - comp.add_mechanism(A) - comp.add_mechanism(B) - comp.add_mechanism(C) - comp.add_mechanism(D) - comp.add_projection(A, MappingProjection(sender=A, receiver=C), C) - comp.add_projection(B, MappingProjection(sender=B, receiver=D), D) - comp.add_mechanism(E) - comp.add_projection(C, MappingProjection(sender=C, receiver=E), E) - comp.add_projection(D, MappingProjection(sender=D, receiver=E), E) - comp._analyze_graph() - inputs_dict = { - A: [[5.]], - B: [[5.]] - } - sched = Scheduler(composition=comp) - sched.add_condition(A, EveryNPasses(1)) - sched.add_condition(B, EveryNCalls(A, 2)) - sched.add_condition(C, AfterNCalls(A, 2)) - sched.add_condition(D, AfterNCalls(A, 2)) - sched.add_condition(E, AfterNCalls(C, 1)) - sched.add_condition(E, AfterNCalls(D, 1)) - output = comp.run( - inputs=inputs_dict, - scheduler_processing=sched, - clamp_input=SOFT_CLAMP - ) - assert 375 == output[0][0] - - def test_run_5_mechanisms_2_origins_1_terminal_pulse_clamp(self): - # input ignored on the second execution of A - # __ - # | | - # V | - # 5 -#1-> A -^--> C -- - # ==> E - # 5 ----> B ----> D -- - - # 5 x 1 = 5 ----> 5 x 5 = 25 -- - # 25 + 25 = 50 ==> 50 * 5 = 250 - # 5 * 1 = 5 ----> 5 x 5 = 25 -- - - comp = Composition() - A = RecurrentTransferMechanism(name="A", function=Linear(slope=2.0)) - B = TransferMechanism(name="B", function=Linear(slope=1.0)) - C = TransferMechanism(name="C", function=Linear(slope=5.0)) - D = TransferMechanism(name="D", function=Linear(slope=5.0)) - E = TransferMechanism(name="E", function=Linear(slope=5.0)) - comp.add_mechanism(A) - comp.add_mechanism(B) - comp.add_mechanism(C) - comp.add_mechanism(D) - comp.add_projection(A, MappingProjection(sender=A, receiver=C), C) - comp.add_projection(B, MappingProjection(sender=B, receiver=D), D) - comp.add_mechanism(E) - comp.add_projection(C, MappingProjection(sender=C, receiver=E), E) - comp.add_projection(D, MappingProjection(sender=D, receiver=E), E) - comp._analyze_graph() - inputs_dict = { - A: [[5]], - B: [[5]] - } - sched = Scheduler(composition=comp) - sched.add_condition(A, EveryNPasses(1)) - sched.add_condition(B, EveryNCalls(A, 2)) - sched.add_condition(C, AfterNCalls(A, 2)) - sched.add_condition(D, AfterNCalls(A, 2)) - sched.add_condition(E, AfterNCalls(C, 1)) - sched.add_condition(E, AfterNCalls(D, 1)) - output = comp.run( - inputs=inputs_dict, - scheduler_processing=sched, - clamp_input=PULSE_CLAMP - ) - assert 625 == output[0][0] - - def test_run_5_mechanisms_2_origins_1_hard_clamp_1_soft_clamp(self): - - # __ - # | | - # V | - # 5 -#1-> A -^--> C -- - # ==> E - # 5 ----> B ----> D -- - - # v Recurrent - # 5 * 1 = (5 + 5) x 1 = 10 - # 5 x 1 = 5 ----> 10 x 5 = 50 -- - # 50 + 25 = 75 ==> 75 * 5 = 375 - # 5 * 1 = 5 ----> 5 x 5 = 25 -- - - comp = Composition() - A = RecurrentTransferMechanism(name="A", function=Linear(slope=1.0)) - B = RecurrentTransferMechanism(name="B", function=Linear(slope=1.0)) - C = TransferMechanism(name="C", function=Linear(slope=5.0)) - D = TransferMechanism(name="D", function=Linear(slope=5.0)) - E = TransferMechanism(name="E", function=Linear(slope=5.0)) - comp.add_mechanism(A) - comp.add_mechanism(B) - comp.add_mechanism(C) - comp.add_mechanism(D) - comp.add_projection(A, MappingProjection(sender=A, receiver=C), C) - comp.add_projection(B, MappingProjection(sender=B, receiver=D), D) - comp.add_mechanism(E) - comp.add_projection(C, MappingProjection(sender=C, receiver=E), E) - comp.add_projection(D, MappingProjection(sender=D, receiver=E), E) - comp._analyze_graph() - inputs_dict = { - A: [[5]], - B: [[5]] - } - sched = Scheduler(composition=comp) - sched.add_condition(A, EveryNPasses(1)) - sched.add_condition(B, EveryNPasses(1)) - sched.add_condition(B, EveryNCalls(A, 1)) - sched.add_condition(C, AfterNCalls(A, 2)) - sched.add_condition(D, AfterNCalls(A, 2)) - sched.add_condition(E, AfterNCalls(C, 1)) - sched.add_condition(E, AfterNCalls(D, 1)) - output = comp.run( - inputs=inputs_dict, - scheduler_processing=sched, - clamp_input={A: SOFT_CLAMP, - B: HARD_CLAMP} - ) - assert 375 == output[0][0] - - def test_run_5_mechanisms_2_origins_1_terminal_no_clamp(self): - # input ignored on all executions - # _r_ - # | | - # 0 -#2-> V | - # 0 -#1-> A -^--> C -- - # ==> E - # 0 ----> B ----> D -- - - # 1 * 2 + 1 = 3 - # 0 x 2 + 1 = 1 ----> 4 x 5 = 20 -- - # 20 + 5 = 25 ==> 25 * 5 = 125 - # 0 x 1 + 1 = 1 ----> 1 x 5 = 5 -- - - comp = Composition() - - A = RecurrentTransferMechanism(name="A", function=Linear(slope=2.0, intercept=5.0)) - B = RecurrentTransferMechanism(name="B", function=Linear(slope=1.0, intercept=1.0)) - C = TransferMechanism(name="C", function=Linear(slope=5.0)) - D = TransferMechanism(name="D", function=Linear(slope=5.0)) - E = TransferMechanism(name="E", function=Linear(slope=5.0)) - comp.add_mechanism(A) - comp.add_mechanism(B) - comp.add_mechanism(C) - comp.add_mechanism(D) - comp.add_projection(A, MappingProjection(sender=A, receiver=C), C) - comp.add_projection(B, MappingProjection(sender=B, receiver=D), D) - comp.add_mechanism(E) - comp.add_projection(C, MappingProjection(sender=C, receiver=E), E) - comp.add_projection(D, MappingProjection(sender=D, receiver=E), E) - comp._analyze_graph() - inputs_dict = { - A: [[100.0]], - B: [[500.0]] - } - sched = Scheduler(composition=comp) - sched.add_condition(A, EveryNPasses(1)) - sched.add_condition(B, EveryNCalls(A, 2)) - sched.add_condition(C, AfterNCalls(A, 2)) - sched.add_condition(D, AfterNCalls(A, 2)) - sched.add_condition(E, AfterNCalls(C, 1)) - sched.add_condition(E, AfterNCalls(D, 1)) - output = comp.run( - inputs=inputs_dict, - scheduler_processing=sched, - clamp_input=NO_CLAMP - ) - # FIX: This value is correct given that there is a BUG in Recurrent Transfer Mech -- - # Recurrent projection BEGINS with a value leftover from initialization - # (only shows up if the function has an additive component or default variable is not zero) - assert 925 == output[0][0] - - -class TestCallBeforeAfterTimescale: - - def test_call_before_record_timescale(self): - time_step_array = [] - trial_array = [] - pass_array = [] - - def cb_timestep(composition, scheduler, arr): - def record_timestep(): - - arr.append(scheduler.times[composition._execution_id][TimeScale.TIME_STEP][TimeScale.TIME_STEP]) - - return record_timestep - - def cb_pass(composition, scheduler, arr): - - def record_pass(): - - arr.append(scheduler.times[composition._execution_id][TimeScale.RUN][TimeScale.PASS]) - - return record_pass - - def cb_trial(composition, scheduler, arr): - - def record_trial(): - - arr.append(scheduler.times[composition._execution_id][TimeScale.LIFE][TimeScale.TRIAL]) - - return record_trial - - comp = Composition() - - A = TransferMechanism(name="A [transfer]", function=Linear(slope=2.0)) - B = TransferMechanism(name="B [transfer]", function=Linear(slope=5.0)) - comp.add_mechanism(A) - comp.add_mechanism(B) - comp.add_projection(A, MappingProjection(sender=A, receiver=B), B) - comp._analyze_graph() - inputs_dict = {A: [[1], [2], [3], [4]]} - sched = Scheduler(composition=comp) - - comp.run( - inputs=inputs_dict, - scheduler_processing=sched, - call_before_time_step=cb_timestep(comp, sched, time_step_array), - call_before_trial=cb_trial(comp, sched, trial_array), - call_before_pass=cb_pass(comp, sched, pass_array) - ) - - assert time_step_array == [0, 1, 0, 1, 0, 1, 0, 1] - assert trial_array == [0, 1, 2, 3] - assert pass_array == [0, 1, 2, 3] +# class TestClampInput: +# +# def test_run_5_mechanisms_2_origins_1_terminal_hard_clamp(self): +# +# comp = Composition() +# A = RecurrentTransferMechanism(name="composition-pytests-A", function=Linear(slope=1.0)) +# B = TransferMechanism(name="composition-pytests-B", function=Linear(slope=1.0)) +# C = TransferMechanism(name="composition-pytests-C", function=Linear(slope=5.0)) +# D = TransferMechanism(name="composition-pytests-D", function=Linear(slope=5.0)) +# E = TransferMechanism(name="composition-pytests-E", function=Linear(slope=5.0)) +# comp.add_mechanism(A) +# comp.add_mechanism(B) +# comp.add_mechanism(C) +# comp.add_mechanism(D) +# comp.add_projection(A, MappingProjection(sender=A, receiver=C), C) +# comp.add_projection(B, MappingProjection(sender=B, receiver=D), D) +# comp.add_mechanism(E) +# comp.add_projection(C, MappingProjection(sender=C, receiver=E), E) +# comp.add_projection(D, MappingProjection(sender=D, receiver=E), E) +# comp._analyze_graph() +# inputs_dict = { +# A: [[5]], +# B: [[5]] +# } +# sched = Scheduler(composition=comp) +# sched.add_condition(A, EveryNPasses(1)) +# sched.add_condition(B, EveryNCalls(A, 2)) +# sched.add_condition(C, AfterNCalls(A, 2)) +# sched.add_condition(D, AfterNCalls(A, 2)) +# sched.add_condition(E, AfterNCalls(C, 1)) +# sched.add_condition(E, AfterNCalls(D, 1)) +# output = comp.run( +# inputs=inputs_dict, +# scheduler_processing=sched, +# # clamp_input=HARD_CLAMP +# ) +# assert 250 == output[0][0] +# +# def test_run_5_mechanisms_2_origins_1_terminal_soft_clamp(self): +# +# comp = Composition() +# A = RecurrentTransferMechanism(name="composition-pytests-A", function=Linear(slope=1.0)) +# B = TransferMechanism(name="composition-pytests-B", function=Linear(slope=1.0)) +# C = TransferMechanism(name="composition-pytests-C", function=Linear(slope=5.0)) +# D = TransferMechanism(name="composition-pytests-D", function=Linear(slope=5.0)) +# E = TransferMechanism(name="composition-pytests-E", function=Linear(slope=5.0)) +# comp.add_mechanism(A) +# comp.add_mechanism(B) +# comp.add_mechanism(C) +# comp.add_mechanism(D) +# comp.add_projection(A, MappingProjection(sender=A, receiver=C), C) +# comp.add_projection(B, MappingProjection(sender=B, receiver=D), D) +# comp.add_mechanism(E) +# comp.add_projection(C, MappingProjection(sender=C, receiver=E), E) +# comp.add_projection(D, MappingProjection(sender=D, receiver=E), E) +# comp._analyze_graph() +# inputs_dict = { +# A: [[5.]], +# B: [[5.]] +# } +# sched = Scheduler(composition=comp) +# sched.add_condition(A, EveryNPasses(1)) +# sched.add_condition(B, EveryNCalls(A, 2)) +# sched.add_condition(C, AfterNCalls(A, 2)) +# sched.add_condition(D, AfterNCalls(A, 2)) +# sched.add_condition(E, AfterNCalls(C, 1)) +# sched.add_condition(E, AfterNCalls(D, 1)) +# output = comp.run( +# inputs=inputs_dict, +# scheduler_processing=sched, +# clamp_input=SOFT_CLAMP +# ) +# assert 375 == output[0][0] +# +# def test_run_5_mechanisms_2_origins_1_terminal_pulse_clamp(self): +# +# comp = Composition() +# A = RecurrentTransferMechanism(name="composition-pytests-A", function=Linear(slope=2.0)) +# B = TransferMechanism(name="composition-pytests-B", function=Linear(slope=1.0)) +# C = TransferMechanism(name="composition-pytests-C", function=Linear(slope=5.0)) +# D = TransferMechanism(name="composition-pytests-D", function=Linear(slope=5.0)) +# E = TransferMechanism(name="composition-pytests-E", function=Linear(slope=5.0)) +# comp.add_mechanism(A) +# comp.add_mechanism(B) +# comp.add_mechanism(C) +# comp.add_mechanism(D) +# comp.add_projection(A, MappingProjection(sender=A, receiver=C), C) +# comp.add_projection(B, MappingProjection(sender=B, receiver=D), D) +# comp.add_mechanism(E) +# comp.add_projection(C, MappingProjection(sender=C, receiver=E), E) +# comp.add_projection(D, MappingProjection(sender=D, receiver=E), E) +# comp._analyze_graph() +# inputs_dict = { +# A: [[5]], +# B: [[5]] +# } +# sched = Scheduler(composition=comp) +# sched.add_condition(A, EveryNPasses(1)) +# sched.add_condition(B, EveryNCalls(A, 2)) +# sched.add_condition(C, AfterNCalls(A, 2)) +# sched.add_condition(D, AfterNCalls(A, 2)) +# sched.add_condition(E, AfterNCalls(C, 1)) +# sched.add_condition(E, AfterNCalls(D, 1)) +# output = comp.run( +# inputs=inputs_dict, +# scheduler_processing=sched, +# clamp_input=PULSE_CLAMP +# ) +# assert 625 == output[0][0] +# +# def test_run_5_mechanisms_2_origins_1_hard_clamp_1_soft_clamp(self): +# +# # __ +# # | | +# # V | +# # 5 -#1-> A -^--> C -- +# # ==> E +# # 5 ----> B ----> D -- +# +# # v Recurrent +# # 5 * 1 = (5 + 5) x 1 = 10 +# # 5 x 1 = 5 ----> 10 x 5 = 50 -- +# # 50 + 25 = 75 ==> 75 * 5 = 375 +# # 5 * 1 = 5 ----> 5 x 5 = 25 -- +# +# comp = Composition() +# A = RecurrentTransferMechanism(name="composition-pytests-A", function=Linear(slope=1.0)) +# B = RecurrentTransferMechanism(name="composition-pytests-B", function=Linear(slope=1.0)) +# C = TransferMechanism(name="composition-pytests-C", function=Linear(slope=5.0)) +# D = TransferMechanism(name="composition-pytests-D", function=Linear(slope=5.0)) +# E = TransferMechanism(name="composition-pytests-E", function=Linear(slope=5.0)) +# comp.add_mechanism(A) +# comp.add_mechanism(B) +# comp.add_mechanism(C) +# comp.add_mechanism(D) +# comp.add_projection(A, MappingProjection(sender=A, receiver=C), C) +# comp.add_projection(B, MappingProjection(sender=B, receiver=D), D) +# comp.add_mechanism(E) +# comp.add_projection(C, MappingProjection(sender=C, receiver=E), E) +# comp.add_projection(D, MappingProjection(sender=D, receiver=E), E) +# comp._analyze_graph() +# inputs_dict = { +# A: [[5]], +# B: [[5]] +# } +# sched = Scheduler(composition=comp) +# sched.add_condition(A, EveryNPasses(1)) +# sched.add_condition(B, EveryNPasses(1)) +# sched.add_condition(B, EveryNCalls(A, 1)) +# sched.add_condition(C, AfterNCalls(A, 2)) +# sched.add_condition(D, AfterNCalls(A, 2)) +# sched.add_condition(E, AfterNCalls(C, 1)) +# sched.add_condition(E, AfterNCalls(D, 1)) +# output = comp.run( +# inputs=inputs_dict, +# scheduler_processing=sched, +# clamp_input={A: SOFT_CLAMP, +# B: HARD_CLAMP} +# ) +# assert 375 == output[0][0] +# +# def test_run_5_mechanisms_2_origins_1_terminal_no_clamp(self): +# # input ignored on all executions +# # _r_ +# # | | +# # 0 -#2-> V | +# # 0 -#1-> A -^--> C -- +# # ==> E +# # 0 ----> B ----> D -- +# +# # 1 * 2 + 1 = 3 +# # 0 x 2 + 1 = 1 ----> 4 x 5 = 20 -- +# # 20 + 5 = 25 ==> 25 * 5 = 125 +# # 0 x 1 + 1 = 1 ----> 1 x 5 = 5 -- +# +# comp = Composition() +# +# A = RecurrentTransferMechanism(name="composition-pytests-A", function=Linear(slope=2.0, intercept=5.0)) +# B = RecurrentTransferMechanism(name="composition-pytests-B", function=Linear(slope=1.0, intercept=1.0)) +# C = TransferMechanism(name="composition-pytests-C", function=Linear(slope=5.0)) +# D = TransferMechanism(name="composition-pytests-D", function=Linear(slope=5.0)) +# E = TransferMechanism(name="composition-pytests-E", function=Linear(slope=5.0)) +# comp.add_mechanism(A) +# comp.add_mechanism(B) +# comp.add_mechanism(C) +# comp.add_mechanism(D) +# comp.add_projection(A, MappingProjection(sender=A, receiver=C), C) +# comp.add_projection(B, MappingProjection(sender=B, receiver=D), D) +# comp.add_mechanism(E) +# comp.add_projection(C, MappingProjection(sender=C, receiver=E), E) +# comp.add_projection(D, MappingProjection(sender=D, receiver=E), E) +# comp._analyze_graph() +# inputs_dict = { +# A: [[100.0]], +# B: [[500.0]] +# } +# sched = Scheduler(composition=comp) +# sched.add_condition(A, EveryNPasses(1)) +# sched.add_condition(B, EveryNCalls(A, 2)) +# sched.add_condition(C, AfterNCalls(A, 2)) +# sched.add_condition(D, AfterNCalls(A, 2)) +# sched.add_condition(E, AfterNCalls(C, 1)) +# sched.add_condition(E, AfterNCalls(D, 1)) +# output = comp.run( +# inputs=inputs_dict, +# scheduler_processing=sched, +# clamp_input=NO_CLAMP +# ) +# # FIX: This value is correct given that there is a BUG in Recurrent Transfer Mech -- +# # Recurrent projection BEGINS with a value leftover from initialization +# # (only shows up if the function has an additive component or default variable is not zero) +# assert 925 == output[0][0] class TestSystemComposition: - def test_run_2_mechanisms_default_input_1(self): - sys = SystemComposition() - A = IntegratorMechanism(default_variable=1.0, function=Linear(slope=5.0)) - B = TransferMechanism(function=Linear(slope=5.0)) - sys.add_mechanism(A) - sys.add_mechanism(B) - sys.add_projection(A, MappingProjection(sender=A, receiver=B), B) - sys._analyze_graph() - sched = Scheduler(composition=sys) - output = sys.run( - scheduler_processing=sched - ) - assert 25 == output[0][0] + # def test_run_2_mechanisms_default_input_1(self): + # sys = SystemComposition() + # A = IntegratorMechanism(default_variable=1.0, function=Linear(slope=5.0)) + # B = TransferMechanism(function=Linear(slope=5.0)) + # sys.add_mechanism(A) + # sys.add_mechanism(B) + # sys.add_projection(A, MappingProjection(sender=A, receiver=B), B) + # sys._analyze_graph() + # sched = Scheduler(composition=sys) + # output = sys.run( + # scheduler_processing=sched + # ) + # assert 25 == output[0][0] def test_run_2_mechanisms_input_5(self): sys = SystemComposition() @@ -2054,6 +1975,7 @@ def record_values(d, time_scale, *mechs): # np.testing.assert_allclose(expected_Output_Layer_output, Output_Layer.output_values) +# Cannot test old syntax until we are ready for the current System and Process classes to create compositions # class TestOldSyntax: # # # new syntax pathway, old syntax system @@ -2418,7 +2340,6 @@ def test_one_pathway_inside_one_system(self): stimulus = {myMech1: [[1]]} # execute path (just for comparison) - print("EXECUTING PATH: ") myPath.run(inputs=stimulus) # create a SystemComposition | blank slate for composition @@ -2582,14 +2503,14 @@ def test_one_input_state_per_origin_two_origins(self): # 5 * 1 = 5 ----> 5 x 5 = 25 -- comp = Composition() - A = TransferMechanism(name="A", + A = TransferMechanism(name="composition-pytests-A", function=Linear(slope=1.0) ) - B = TransferMechanism(name="B", function=Linear(slope=1.0)) - C = TransferMechanism(name="C", function=Linear(slope=5.0)) - D = TransferMechanism(name="D", function=Linear(slope=5.0)) - E = TransferMechanism(name="E", function=Linear(slope=5.0)) + B = TransferMechanism(name="composition-pytests-B", function=Linear(slope=1.0)) + C = TransferMechanism(name="composition-pytests-C", function=Linear(slope=5.0)) + D = TransferMechanism(name="composition-pytests-D", function=Linear(slope=5.0)) + E = TransferMechanism(name="composition-pytests-E", function=Linear(slope=5.0)) comp.add_mechanism(A) comp.add_mechanism(B) comp.add_mechanism(C) @@ -2631,14 +2552,14 @@ def test_updating_input_values_for_second_execution(self): # 5 * 1 = 5 ----> 5 x 5 = 25 -- comp = Composition() - A = TransferMechanism(name="A", + A = TransferMechanism(name="composition-pytests-A", function=Linear(slope=1.0) ) - B = TransferMechanism(name="B", function=Linear(slope=1.0)) - C = TransferMechanism(name="C", function=Linear(slope=5.0)) - D = TransferMechanism(name="D", function=Linear(slope=5.0)) - E = TransferMechanism(name="E", function=Linear(slope=5.0)) + B = TransferMechanism(name="composition-pytests-B", function=Linear(slope=1.0)) + C = TransferMechanism(name="composition-pytests-C", function=Linear(slope=5.0)) + D = TransferMechanism(name="composition-pytests-D", function=Linear(slope=5.0)) + E = TransferMechanism(name="composition-pytests-E", function=Linear(slope=5.0)) comp.add_mechanism(A) comp.add_mechanism(B) comp.add_mechanism(C) @@ -2682,8 +2603,8 @@ def test_updating_input_values_for_second_execution(self): ) # add a new branch to the composition - F = TransferMechanism(name="F", function=Linear(slope=2.0)) - G = TransferMechanism(name="G", function=Linear(slope=2.0)) + F = TransferMechanism(name="composition-pytests-F", function=Linear(slope=2.0)) + G = TransferMechanism(name="composition-pytests-G", function=Linear(slope=2.0)) comp.add_mechanism(F) comp.add_mechanism(G) comp.add_projection(sender=F, projection=MappingProjection(sender=F, receiver=G), receiver=G) @@ -2711,12 +2632,12 @@ def test_updating_input_values_for_second_execution(self): def test_changing_origin_for_second_execution(self): comp = Composition() - A = TransferMechanism(name="A", + A = TransferMechanism(name="composition-pytests-A", function=Linear(slope=1.0) ) - B = TransferMechanism(name="B", function=Linear(slope=1.0)) - C = TransferMechanism(name="C", function=Linear(slope=5.0)) + B = TransferMechanism(name="composition-pytests-B", function=Linear(slope=1.0)) + C = TransferMechanism(name="composition-pytests-C", function=Linear(slope=5.0)) comp.add_mechanism(A) comp.add_mechanism(B) comp.add_mechanism(C) @@ -2734,7 +2655,7 @@ def test_changing_origin_for_second_execution(self): assert 25 == output[0][0] # add a new origin to the composition - F = TransferMechanism(name="F", function=Linear(slope=2.0)) + F = TransferMechanism(name="composition-pytests-F", function=Linear(slope=2.0)) comp.add_mechanism(F) comp.add_projection(sender=F, projection=MappingProjection(sender=F, receiver=A), receiver=A) @@ -2750,13 +2671,13 @@ def test_changing_origin_for_second_execution(self): scheduler_processing=sched ) - projections_to_A = [] - expected_projections_to_A = [("(OutputState RESULT)", "(InputState Default_InputState)")] + connections_to_A = [] + expected_connections_to_A = [(F.output_states[0], A.input_states[0])] for input_state in A.input_states: for p_a in input_state.path_afferents: - projections_to_A.append((str(p_a.sender), str(p_a.receiver))) + connections_to_A.append((p_a.sender, p_a.receiver)) - assert projections_to_A == expected_projections_to_A + assert connections_to_A == expected_connections_to_A assert 30 == output2[0][0] def test_two_input_states_new_inputs_second_trial(self): @@ -2766,7 +2687,7 @@ def test_two_input_states_new_inputs_second_trial(self): # default_variable=[[0], [0]], # ^ setting default_variable on the function actually does not matter -- does the mechanism update it? slope=1.0) - A = TransferMechanism(name="A", + A = TransferMechanism(name="composition-pytests-A", default_variable=[[0], [0]], input_states=[{NAME: "Input State 1", }, @@ -2776,30 +2697,26 @@ def test_two_input_states_new_inputs_second_trial(self): ) comp.add_mechanism(A) comp._analyze_graph() - inputs_dict = {A: {A.input_states[0]: [[5.]], - A.input_states[1]: [[5.]]} - } + inputs_dict = {A: [[5.], [5.]]} + sched = Scheduler(composition=comp) output = comp.run( inputs=inputs_dict, scheduler_processing=sched ) - inputs_dict2 = {A: {A.input_states[0]: [[2.]], - A.input_states[1]: [[4.]]} - } + inputs_dict2 = {A: [[2.], [4.]]} + output2 = comp.run( inputs=inputs_dict2, scheduler_processing=sched ) - assert 2. == A.input_states[0].value - assert 4. == A.input_states[1].value - assert "Input State 1" == A.input_states[0].name - assert "Input State 2" == A.input_states[1].name - assert 2. == A.variable[0] - assert 4. == A.variable[1] - assert 5 == output[0][0] - assert 2 == output2[0][0] + + assert np.allclose(A.input_states[0].value, [2.]) + assert np.allclose(A.input_states[1].value, [4.]) + assert np.allclose(A.variable, [[2.], [4.]]) + assert np.allclose(output, [[5.], [5.]]) + assert np.allclose(output2, [[2.], [4.]]) def test_two_input_states_new_origin_second_trial(self): @@ -2811,7 +2728,7 @@ def test_two_input_states_new_origin_second_trial(self): # ^ setting default_variable on the function actually does not matter -- does the mechanism update it? slope=1.0) A = TransferMechanism( - name="A", + name="composition-pytests-A", default_variable=[[0], [0]], input_states=[ {NAME: "Input State 1", }, @@ -2820,36 +2737,33 @@ def test_two_input_states_new_origin_second_trial(self): function=my_fun ) - B = TransferMechanism(name="B", function=Linear(slope=2.0)) - C = TransferMechanism(name="C", function=Linear(slope=5.0)) + B = TransferMechanism(name="composition-pytests-B", function=Linear(slope=2.0)) + C = TransferMechanism(name="composition-pytests-C", function=Linear(slope=5.0)) comp.add_mechanism(A) comp.add_mechanism(B) comp.add_mechanism(C) comp.add_projection(A, MappingProjection(sender=A, receiver=B), B) comp.add_projection(B, MappingProjection(sender=B, receiver=C), C) comp._analyze_graph() - inputs_dict = {A: {A.input_states[0]: [[5.]], - A.input_states[1]: [[5.]]} - } + + inputs_dict = {A: [[5.], [5.]]} + sched = Scheduler(composition=comp) output = comp.run( inputs=inputs_dict, scheduler_processing=sched ) - assert 5. == A.input_states[0].value - assert 5. == A.input_states[1].value - assert "Input State 1" == A.input_states[0].name - assert "Input State 2" == A.input_states[1].name - assert 5. == A.variable[0] - assert 5. == A.variable[1] - assert 50 == output[0][0] + assert np.allclose(A.input_states[0].value, [5.]) + assert np.allclose(A.input_states[1].value, [5.]) + assert np.allclose(A.variable, [[5.], [5.]]) + assert np.allclose(output, [[50.]]) # A --> B --> C # ^ # D __| D = TransferMechanism( - name="D", + name="composition-pytests-D", default_variable=[[0], [0]], input_states=[ {NAME: "Input State 1", }, @@ -2863,27 +2777,22 @@ def test_two_input_states_new_origin_second_trial(self): # again (sched, even though it is tied to comp, will not update according to changes in comp) comp._analyze_graph() sched = Scheduler(composition=comp) - inputs_dict2 = {A: {A.input_states[0]: [[2.]], - A.input_states[1]: [[4.]]}, - D: {D.input_states[0]: [[2.]], - D.input_states[1]: [[4.]]} } + + inputs_dict2 = {A: [[2.], [4.]], + D: [[2.], [4.]]} output2 = comp.run( inputs=inputs_dict2, scheduler_processing=sched ) - assert 2. == A.input_states[0].value - assert 4. == A.input_states[1].value - assert "Input State 1" == A.input_states[0].name - assert "Input State 2" == A.input_states[1].name - assert 2. == A.variable[0] - assert 4. == A.variable[1] - assert 2. == D.input_states[0].value - assert 4. == D.input_states[1].value - assert "Input State 1" == D.input_states[0].name - assert "Input State 2" == D.input_states[1].name - assert 2. == D.variable[0] - assert 4. == D.variable[1] - assert 40 == output2[0][0] + assert np.allclose(A.input_states[0].value, [2.]) + assert np.allclose(A.input_states[1].value, [4.]) + assert np.allclose(A.variable, [[2.], [4.]]) + + assert np.allclose(D.input_states[0].value, [2.]) + assert np.allclose(D.input_states[1].value, [4.]) + assert np.allclose(D.variable, [[2.], [4.]]) + + assert np.allclose(output2, [[40]]) class TestInputStateSpecifications: @@ -2892,7 +2801,7 @@ def test_two_input_states_created_with_dictionaries(self): comp = Composition() A = ProcessingMechanism( - name="A", + name="composition-pytests-A", default_variable=[[0], [0]], # input_states=[ # {NAME: "Input State 1", }, @@ -2928,7 +2837,7 @@ def test_two_input_states_created_first_with_deferred_init(self): reference_value=[0] ) A = TransferMechanism( - name="A", + name="composition-pytests-A", default_variable=[[0], [0]], input_states=[I1, I2], function=Linear(slope=1.0) @@ -2957,7 +2866,7 @@ def test_two_input_states_created_with_keyword(self): # create mechanism A A = TransferMechanism( - name="A", + name="composition-pytests-A", default_variable=[[0], [0]], input_states=[INPUT_STATE, INPUT_STATE], function=Linear(slope=1.0) @@ -2988,7 +2897,7 @@ def test_two_input_states_created_with_strings(self): # create mechanism A A = TransferMechanism( - name="A", + name="composition-pytests-A", default_variable=[[0], [0]], input_states=["Input State 1", "Input State 2"], function=Linear(slope=1.0) @@ -3018,7 +2927,7 @@ def test_two_input_states_created_with_values(self): # create mechanism A A = TransferMechanism( - name="A", + name="composition-pytests-A", default_variable=[[0], [0]], input_states=[[0.], [0.]], function=Linear(slope=1.0) @@ -3041,21 +2950,22 @@ def test_two_input_states_created_with_values(self): assert np.allclose(A.input_states[1].value, [4.0]) assert np.allclose(A.variable, [[2.0], [4.0]]) + class TestInputSpecifications: - def test_2_mechanisms_default_input_1(self): - comp = Composition() - A = IntegratorMechanism(default_variable=1.0, function=Linear(slope=5.0)) - B = TransferMechanism(function=Linear(slope=5.0)) - comp.add_mechanism(A) - comp.add_mechanism(B) - comp.add_projection(A, MappingProjection(sender=A, receiver=B), B) - comp._analyze_graph() - sched = Scheduler(composition=comp) - output = comp.run( - scheduler_processing=sched - ) - assert 25 == output[0][0] + # def test_2_mechanisms_default_input_1(self): + # comp = Composition() + # A = IntegratorMechanism(default_variable=1.0, function=Linear(slope=5.0)) + # B = TransferMechanism(function=Linear(slope=5.0)) + # comp.add_mechanism(A) + # comp.add_mechanism(B) + # comp.add_projection(A, MappingProjection(sender=A, receiver=B), B) + # comp._analyze_graph() + # sched = Scheduler(composition=comp) + # output = comp.run( + # scheduler_processing=sched + # ) + # assert 25 == output[0][0] def test_3_origins(self): comp = Composition() @@ -3068,21 +2978,21 @@ def test_3_origins(self): reference_value=[0] ) A = TransferMechanism( - name="A", + name="composition-pytests-A", default_variable=[[0], [0]], input_states=[I1, I2], function=Linear(slope=1.0) ) B = TransferMechanism( - name="B", + name="composition-pytests-B", default_variable=[0,0], function=Linear(slope=1.0)) C = TransferMechanism( - name="C", + name="composition-pytests-C", default_variable=[0, 0, 0], function=Linear(slope=1.0)) D = TransferMechanism( - name="D", + name="composition-pytests-D", default_variable=[0], function=Linear(slope=1.0)) comp.add_mechanism(A) @@ -3093,8 +3003,7 @@ def test_3_origins(self): comp.add_projection(B, MappingProjection(sender=B, receiver=D), D) comp.add_projection(C, MappingProjection(sender=C, receiver=D), D) comp._analyze_graph() - inputs = {A: {I1: [[0],[1],[2]], - I2: [[0],[1],[2]]}, + inputs = {A: [[[0],[0]], [[1],[1]], [[2],[2]]], B: [[0,0], [1,1], [2,2]], C: [[0,0,0], [1,1,1], [2,2,2]] From 6719f18c5cc1c575e6e1b0d24fa79da1ae2ebc47 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Thu, 26 Apr 2018 13:25:08 -0400 Subject: [PATCH 028/200] adding an 'output CIM' which has one output state for each terminal mechanism output state (each of the CIM output states copies its terminal output state's value) --- psyneulink/compositions/composition.py | 74 +++++++++++++++++++------- 1 file changed, 55 insertions(+), 19 deletions(-) diff --git a/psyneulink/compositions/composition.py b/psyneulink/compositions/composition.py index eb0f043bf1d..1e821f183a3 100644 --- a/psyneulink/compositions/composition.py +++ b/psyneulink/compositions/composition.py @@ -348,8 +348,10 @@ def __init__(self): self.graph = Graph() # Graph of the Composition self._graph_processing = None self.mechanisms = [] - self.CIM = CompositionInterfaceMechanism(name="Stimulus_CIM") - self.CIM_output_states = {} + self.input_CIM = CompositionInterfaceMechanism(name="Stimulus_CIM") + self.input_CIM_output_states = {} + self.output_CIM = CompositionInterfaceMechanism(name="Output_CIM") + self.output_CIM_output_states = {} self.execution_ids = [] self._scheduler_processing = None @@ -791,43 +793,71 @@ def _create_CIM_output_states(self): # FIX BUG: stimulus CIM output states are not properly destroyed when analyze graph is run multiple times # (extra mechanisms are marked as CIMs when graph is analyzed too early, so they create CIM output states) + # INPUT CIMS # loop over all origin mechanisms - current_input_states = set() + current_origin_input_states = set() for mech in self.get_mechanisms_by_role(MechanismRole.ORIGIN): for input_state in mech.input_states: # add it to our set of current input states - current_input_states.add(input_state) + current_origin_input_states.add(input_state) # if there is not a corresponding CIM output state, add one - if input_state not in set(self.CIM_output_states.keys()): - interface_output_state = OutputState(owner=self.CIM, + if input_state not in set(self.input_CIM_output_states.keys()): + interface_output_state = OutputState(owner=self.input_CIM, variable=input_state.variable, reference_value= input_state.variable, name="STIMULUS_CIM_" + mech.name + "_" + input_state.name) - # self.CIM.add_states(interface_output_state) - self.CIM.output_states.append(interface_output_state) - self.CIM_output_states[input_state] = interface_output_state + # self.input_CIM.add_states(interface_output_state) + self.input_CIM.output_states.append(interface_output_state) + self.input_CIM_output_states[input_state] = interface_output_state MappingProjection(sender=interface_output_state, receiver=input_state, matrix= IDENTITY_MATRIX, name="("+interface_output_state.name + ") to (" + input_state.owner.name + "-" + input_state.name+")") - sends_to_input_states = set(self.CIM_output_states.keys()) + sends_to_input_states = set(self.input_CIM_output_states.keys()) # For any output state still registered on the CIM that does not map to a corresponding ORIGIN mech I.S.: - for input_state in sends_to_input_states.difference(current_input_states): + for input_state in sends_to_input_states.difference(current_origin_input_states): for projection in input_state.path_afferents: - if projection.sender == self.CIM_output_states[input_state]: + if projection.sender == self.input_CIM_output_states[input_state]: # remove the corresponding projection from the ORIGIN mechanism's path afferents input_state.path_afferents.remove(projection) projection = None # remove the output state associated with this input state (this iteration) from the CIM output states - self.CIM.output_states.remove(self.CIM_output_states[input_state]) + self.input_CIM.output_states.remove(self.input_CIM_output_states[input_state]) # and from the dictionary of CIM output state/input state pairs - del self.CIM_output_states[input_state] + del self.input_CIM_output_states[input_state] + + # OUTPUT CIMS + # loop over all terminal mechanisms + current_terminal_output_states = set() + for mech in self.get_mechanisms_by_role(MechanismRole.TERMINAL): + for output_state in mech.output_states: + current_terminal_output_states.add(output_state) + # if there is not a corresponding CIM output state, add one + if output_state not in set(self.output_CIM_output_states.keys()): + interface_output_state = OutputState(owner=self.output_CIM, + variable=output_state.variable, + reference_value=output_state.variable, + name="OUTPUT_CIM_" + mech.name + "_" + output_state.name) + + self.output_CIM.output_states.append(interface_output_state) + self.output_CIM_output_states[output_state] = interface_output_state + # MappingProjection(sender=interface_output_state, + # receiver=output_state, + # matrix= IDENTITY_MATRIX, + # name="("+interface_output_state.name + ") to (" + # + output_state.owner.name + "-" + output_state.name+")") + + # - - - - - + previous_terminal_output_states = set(self.output_CIM_output_states.keys()) + for output_state in previous_terminal_output_states.difference(current_terminal_output_states): + self.output_CIM.output_states.remove(self.output_CIM_output_states[output_state]) + del self.output_CIM_output_states[output_state] def _assign_values_to_CIM_output_states(self, inputs): current_mechanisms = set() @@ -835,7 +865,7 @@ def _assign_values_to_CIM_output_states(self, inputs): if isinstance(inputs[key], (float, int)): inputs[key] = np.atleast_2d(inputs[key]) for i in range(len(inputs[key])): - self.CIM_output_states[key.input_states[i]].value = inputs[key][i] + self.input_CIM_output_states[key.input_states[i]].value = inputs[key][i] current_mechanisms.add(key) origins = self.get_mechanisms_by_role(MechanismRole.ORIGIN) @@ -844,7 +874,7 @@ def _assign_values_to_CIM_output_states(self, inputs): # is stored -- the point is that if an input is not supplied for an origin mechanism, the mechanism should use # its default variable value for mech in origins.difference(set(current_mechanisms)): - self.CIM_output_states[mech.input_state].value = mech.instance_defaults.variable + self.input_CIM_output_states[mech.input_state].value = mech.instance_defaults.variable def _assign_execution_ids(self, execution_id=None): @@ -868,7 +898,7 @@ def _assign_execution_ids(self, execution_id=None): # for k in self.input_mechanisms.keys(): # self.input_mechanisms[k]._execution_id = execution_id - self.CIM._execution_id = execution_id + self.input_CIM._execution_id = execution_id # self.target_CIM._execution_id = execution_id self._execution_id = execution_id @@ -1008,7 +1038,7 @@ def execute( mechanism.recurrent_projection.sender.value = [0.0] elif mechanism in no_clamp_inputs: for input_state in mechanism.input_states: - self.CIM_output_states[input_state].value = 0.0 + self.input_CIM_output_states[input_state].value = 0.0 # self.input_mechanisms[mechanism]._output_states[0].value = 0.0 if isinstance(mechanism, Mechanism): @@ -1022,7 +1052,7 @@ def execute( for input_state in mechanism.input_states: # clamp = None --> "turn off" input mechanism # self.input_mechanisms[mechanism]._output_states[0].value = 0 - self.CIM_output_states[input_state].value = 0 + self.input_CIM_output_states[input_state].value = 0 if call_after_time_step: call_after_time_step() @@ -1226,7 +1256,13 @@ def run( call_after_trial() scheduler_processing.clocks[execution_id]._increment_time(TimeScale.RUN) + terminal_mechanisms = self.get_mechanisms_by_role(MechanismRole.TERMINAL) + for terminal_mechanism in terminal_mechanisms: + for terminal_output_state in terminal_mechanisms.output_states: + CIM_output_state = self.output_CIM_output_states[terminal_output_state] + CIM_output_state.value = terminal_output_state.value + # return the output of the LAST mechanism executed in the composition return result From e134893cf53eb36595d67625476b9d2994000d50 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Thu, 26 Apr 2018 14:57:06 -0400 Subject: [PATCH 029/200] adding pytests for output CIMs and fixing minor bugs based on tests - removed duplicate CIM Output States --- psyneulink/compositions/composition.py | 6 +-- tests/composition/test_composition.py | 74 +++++++++++++++++++++++++- 2 files changed, 75 insertions(+), 5 deletions(-) diff --git a/psyneulink/compositions/composition.py b/psyneulink/compositions/composition.py index 1e821f183a3..350628cfdfd 100644 --- a/psyneulink/compositions/composition.py +++ b/psyneulink/compositions/composition.py @@ -809,7 +809,6 @@ def _create_CIM_output_states(self): reference_value= input_state.variable, name="STIMULUS_CIM_" + mech.name + "_" + input_state.name) # self.input_CIM.add_states(interface_output_state) - self.input_CIM.output_states.append(interface_output_state) self.input_CIM_output_states[input_state] = interface_output_state MappingProjection(sender=interface_output_state, receiver=input_state, @@ -845,7 +844,6 @@ def _create_CIM_output_states(self): reference_value=output_state.variable, name="OUTPUT_CIM_" + mech.name + "_" + output_state.name) - self.output_CIM.output_states.append(interface_output_state) self.output_CIM_output_states[output_state] = interface_output_state # MappingProjection(sender=interface_output_state, # receiver=output_state, @@ -1259,10 +1257,10 @@ def run( terminal_mechanisms = self.get_mechanisms_by_role(MechanismRole.TERMINAL) for terminal_mechanism in terminal_mechanisms: - for terminal_output_state in terminal_mechanisms.output_states: + for terminal_output_state in terminal_mechanism.output_states: CIM_output_state = self.output_CIM_output_states[terminal_output_state] CIM_output_state.value = terminal_output_state.value - + # return the output of the LAST mechanism executed in the composition return result diff --git a/tests/composition/test_composition.py b/tests/composition/test_composition.py index 8a2a595f957..0e93131174b 100644 --- a/tests/composition/test_composition.py +++ b/tests/composition/test_composition.py @@ -7,7 +7,7 @@ from psyneulink.components.functions.function import Linear, SimpleIntegrator from psyneulink.components.mechanisms.processing.integratormechanism import IntegratorMechanism -from psyneulink.components.mechanisms.processing.transfermechanism import TransferMechanism +from psyneulink.components.mechanisms.processing.transfermechanism import TransferMechanism, TRANSFER_OUTPUT from psyneulink.components.mechanisms.processing.processingmechanism import ProcessingMechanism from psyneulink.library.mechanisms.processing.transfer.recurrenttransfermechanism import RecurrentTransferMechanism from psyneulink.components.projections.pathway.mappingprojection import MappingProjection @@ -2794,6 +2794,78 @@ def test_two_input_states_new_origin_second_trial(self): assert np.allclose(output2, [[40]]) + def test_output_cim_one_terminal_mechanism_multiple_output_states(self): + + comp = Composition() + A = TransferMechanism(name="composition-pytests-A", + function=Linear(slope=1.0)) + B = TransferMechanism(name="composition-pytests-B", + function=Linear(slope=1.0)) + C = TransferMechanism(name="composition-pytests-C", + function=Linear(slope=2.0), + output_states=[TRANSFER_OUTPUT.RESULT, + TRANSFER_OUTPUT.VARIANCE]) + comp.add_mechanism(A) + comp.add_mechanism(B) + comp.add_mechanism(C) + + comp.add_projection(A, MappingProjection(sender=A, receiver=B), B) + comp.add_projection(B, MappingProjection(sender=B, receiver=C), C) + + comp._analyze_graph() + comp.run(inputs={A: [1.0]}) + + for CIM_output_state in comp.output_CIM_output_states: + # all CIM output state keys in the CIM --> Terminal mapping dict are on the actual output CIM + assert comp.output_CIM_output_states[CIM_output_state] in comp.output_CIM.output_states + + # all Terminal Output states are in the CIM --> Terminal mapping dict + assert C.output_states[0] in comp.output_CIM_output_states.keys() + assert C.output_states[1] in comp.output_CIM_output_states.keys() + + # May change to 2 in the future if we get rid of the original primary output state + assert len(comp.output_CIM.output_states) == 3 + + def test_output_cim_many_terminal_mechanisms(self): + + comp = Composition() + A = TransferMechanism(name="composition-pytests-A", + function=Linear(slope=1.0)) + B = TransferMechanism(name="composition-pytests-B", + function=Linear(slope=1.0)) + C = TransferMechanism(name="composition-pytests-C", + function=Linear(slope=2.0)) + D = TransferMechanism(name="composition-pytests-D", + function=Linear(slope=3.0)) + E = TransferMechanism(name="composition-pytests-E", + function=Linear(slope=4.0), + output_states=[TRANSFER_OUTPUT.RESULT, + TRANSFER_OUTPUT.VARIANCE]) + comp.add_mechanism(A) + comp.add_mechanism(B) + comp.add_mechanism(C) + comp.add_mechanism(D) + comp.add_mechanism(E) + comp.add_projection(A, MappingProjection(sender=A, receiver=B), B) + comp.add_projection(B, MappingProjection(sender=B, receiver=C), C) + comp.add_projection(B, MappingProjection(sender=B, receiver=D), D) + comp.add_projection(B, MappingProjection(sender=B, receiver=E), E) + comp._analyze_graph() + comp.run(inputs={A: [1.0]}) + + for CIM_output_state in comp.output_CIM_output_states: + # all CIM output state keys in the CIM --> Terminal mapping dict are on the actual output CIM + assert comp.output_CIM_output_states[CIM_output_state] in comp.output_CIM.output_states + + # all Terminal Output states are in the CIM --> Terminal mapping dict + assert C.output_state in comp.output_CIM_output_states.keys() + assert D.output_state in comp.output_CIM_output_states.keys() + assert E.output_states[0] in comp.output_CIM_output_states.keys() + assert E.output_states[1] in comp.output_CIM_output_states.keys() + + # May change to 4 in the future if we get rid of the original primary output state + assert len(comp.output_CIM.output_states) == 5 + class TestInputStateSpecifications: From 9b8d939c1d62dd7d7804cac7051833d74b6d9f02 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Thu, 26 Apr 2018 15:53:52 -0400 Subject: [PATCH 030/200] removing unreachable code - possibly leftover from merge --- psyneulink/compositions/composition.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/psyneulink/compositions/composition.py b/psyneulink/compositions/composition.py index 350628cfdfd..4d1095a6300 100644 --- a/psyneulink/compositions/composition.py +++ b/psyneulink/compositions/composition.py @@ -851,7 +851,6 @@ def _create_CIM_output_states(self): # name="("+interface_output_state.name + ") to (" # + output_state.owner.name + "-" + output_state.name+")") - # - - - - - previous_terminal_output_states = set(self.output_CIM_output_states.keys()) for output_state in previous_terminal_output_states.difference(current_terminal_output_states): self.output_CIM.output_states.remove(self.output_CIM_output_states[output_state]) @@ -913,10 +912,6 @@ def _identify_clamp_inputs(self, list_type, input_type, origins): else: return [] - # TODO: this is not stateful but necessary for current state of devel, 4/9/18; - # most likely this should be overriden by whatever is done on composition branch - return self._execution_id - def execute( self, inputs, From 86e784ef6340c30136d7371b7f3c357f881b4f66 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Thu, 26 Apr 2018 17:19:46 -0400 Subject: [PATCH 031/200] refactoring runtime params in terms of parameter state value vs attribute value (base value) --- psyneulink/components/component.py | 97 +++++++++++++++++++----------- tests/system/test_system.py | 19 ++++++ 2 files changed, 80 insertions(+), 36 deletions(-) diff --git a/psyneulink/components/component.py b/psyneulink/components/component.py index 09a18eaa92f..51edaade0c5 100644 --- a/psyneulink/components/component.py +++ b/psyneulink/components/component.py @@ -1697,50 +1697,75 @@ def _check_args(self, variable=None, params=None, target_set=None, context=None) if param_name in {FUNCTION, INPUT_STATES, OUTPUT_STATES}: continue # If param is specified in runtime_params, then assign it - if param_name in runtime_params: - self.paramsCurrent[param_name] = runtime_params[param_name] - # Otherwise, (re-)assign to paramInstanceDefaults + if self.runtimeParamStickyAssignmentPref: + if param_name in runtime_params: + # self.paramsCurrent[param_name] = runtime_params[param_name] + if hasattr(self, "parameter_states"): + if param_name in self.parameter_states: + self.parameter_states[param_name].value = self.parameter_states[param_name].execute(runtime_params[param_name]) + elif hasattr(self.owner, "parameter_states"): + if param_name in self.owner.parameter_states: + self.owner.parameter_states[param_name].value = self.owner.parameter_states[param_name].execute(runtime_params[param_name]) + else: + print("no parameter state ") + else: + if param_name in runtime_params: + setattr(self, param_name, runtime_params[param_name]) + if hasattr(self, "parameter_states"): + if param_name in self.parameter_states: + self.parameter_states[param_name].value = self.parameter_states[param_name].execute(runtime_params[param_name]) + elif hasattr(self.owner, "parameter_states"): + if param_name in self.owner.parameter_states: + self.owner.parameter_states[param_name].value = self.owner.parameter_states[ + param_name].execute(runtime_params[param_name]) + else: + print("no parameter state ") + + # Otherwise, (re-)assign to paramInstanceDefaults # this insures that any params that were assigned as runtime on last execution are reset here # (unless they have been assigned another runtime value) - elif not self.runtimeParamStickyAssignmentPref: - if param_name is FUNCTION_PARAMS: - for function_param in self.function_object.user_params: - self.function_object.paramsCurrent[function_param] = \ - self.function_object.paramInstanceDefaults[function_param] - continue - self.paramsCurrent[param_name] = self.paramInstanceDefaults[param_name] + # elif not self.runtimeParamStickyAssignmentPref: + # if param_name is FUNCTION_PARAMS: + # for function_param in self.function_object.user_params: + # self.function_object.paramsCurrent[function_param] = \ + # self.function_object.paramInstanceDefaults[function_param] + # continue + # self.paramsCurrent[param_name] = self.paramInstanceDefaults[param_name] self.runtime_params_in_use = True # CW 1/24/18: This elif block appears to be accidentally deleting self.input_states - # Otherwise, reset paramsCurrent to paramInstanceDefaults - elif self.runtime_params_in_use and not self.runtimeParamStickyAssignmentPref: - # Can't do the following since function could still be a class ref rather than abound method (see below) - # self.paramsCurrent = self.paramInstanceDefaults - for param_name in self.user_params: - # IMPLEMENTATION NOTE: FUNCTION_RUNTIME_PARAM_NOT_SUPPORTED - # At present, assignment of ``function`` as runtime param is not supported - # (this is because paramInstanceDefaults[FUNCTION] could be a class rather than an bound method; - # i.e., not yet instantiated; could be rectified by assignment in _instantiate_function) - if param_name is FUNCTION: - continue - if param_name is FUNCTION_PARAMS: - for function_param in self.function_object.user_params: - self.function_object.paramsCurrent[function_param] = \ - self.function_object.paramInstanceDefaults[function_param] - continue - self.paramsCurrent[param_name] = self.paramInstanceDefaults[param_name] + # KAM 4/26/18: commenting out elif block to prevent deletion of self.input states -- will reinstate once + # runtime params are working - self.runtime_params_in_use = False + # Otherwise, reset paramsCurrent to paramInstanceDefaults + # elif self.runtime_params_in_use and not self.runtimeParamStickyAssignmentPref: + # # Can't do the following since function could still be a class ref rather than abound method (see below) + # # self.paramsCurrent = self.paramInstanceDefaults + # for param_name in self.user_params: + # # IMPLEMENTATION NOTE: FUNCTION_RUNTIME_PARAM_NOT_SUPPORTED + # # At present, assignment of ``function`` as runtime param is not supported + # # (this is because paramInstanceDefaults[FUNCTION] could be a class rather than an bound method; + # # i.e., not yet instantiated; could be rectified by assignment in _instantiate_function) + # if param_name is FUNCTION: + # continue + # if param_name is FUNCTION_PARAMS: + # for function_param in self.function_object.user_params: + # self.function_object.paramsCurrent[function_param] = \ + # self.function_object.paramInstanceDefaults[function_param] + # continue + # self.paramsCurrent[param_name] = self.paramInstanceDefaults[param_name] + # + # self.runtime_params_in_use = False # If parameter_validation is set and they have changed, then validate requested values and assign to target_set - if self.prefs.paramValidationPref and params and not params is target_set: - curr_context = self.context.initialization_status - self.context.initialization_status = ContextFlags.VALIDATING - try: - self._validate_params(variable=variable, request_set=params, target_set=target_set, context=context) - except TypeError: - self._validate_params(request_set=params, target_set=target_set, context=context) - self.context.initialization_status = curr_context + # if self.prefs.paramValidationPref and params and not params is target_set: + # curr_context = self.context.initialization_status + # self.context.initialization_status = ContextFlags.VALIDATING + # try: + # self._validate_params(variable=variable, request_set=params, target_set=target_set, context=context) + # except TypeError: + # self._validate_params(request_set=params, target_set=target_set, context=context) + # self.context.initialization_status = curr_context return variable diff --git a/tests/system/test_system.py b/tests/system/test_system.py index 43a22456727..a20235b7f5c 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -105,6 +105,7 @@ def test_danglingControlledMech(): # no assert, should only complete without error + class TestInputSpecsDocumentationExamples: def test_example_1(self): @@ -429,6 +430,7 @@ def store_inputs(): p1.execute(input_dictionary) + class TestInputSpecsHeterogeneousVariables: def test_heterogeneous_variables_drop_outer_list(self): @@ -458,6 +460,8 @@ def test_heterogeneous_variables(self): inputs = {a: [[[1.1], [2.1, 2.1]], [[1.2], [2.2, 2.2]]]} s.run(inputs) + + class TestGraphAndInput: def test_branch(self): @@ -643,6 +647,8 @@ def cyclic_extended_loop(self): assert d.systems[s] == TERMINAL assert e.systems[s] == ORIGIN assert f.systems[s] == INITIALIZE_CYCLE + + class TestInitialize: def test_initialize_mechanisms(self): @@ -672,3 +678,16 @@ def test_initialize_mechanisms(self): # Run 1 --> Execution 1: 1 + 2 = 3 | Execution 2: 3 + 2 = 5 | Execution 3: 5 + 3 = 8 # Run 2 --> Execution 1: 8 + 1 = 9 | Execution 2: 9 + 2 = 11 | Execution 3: 11 + 3 = 14 assert np.allclose(C.log.nparray_dictionary('value')['value'], [[[3]], [[5]], [[8]], [[9]], [[11]], [[14]]]) + +class TestRuntimeParams: + + def test_mechanism_execute(self): + T = TransferMechanism() + print(T.input_states) + T.execute(runtime_params={"slope": 10.0}, input=2.0) + print(T.function_object.slope) + print(T.parameter_states['slope'].value) + print(T.value) + print(T.input_states) + T.execute(input=2.0) + print(T.value) \ No newline at end of file From b5c912a8fbf2778738670d25cf17494fe9fe2df2 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Wed, 2 May 2018 13:41:18 -0400 Subject: [PATCH 032/200] refactoring runtime params to use parameter state values and get_current_function_param/get_current_mechanism_param methods --- psyneulink/components/component.py | 146 ++++++++++++++++------------- tests/system/test_system.py | 18 ++-- 2 files changed, 94 insertions(+), 70 deletions(-) diff --git a/psyneulink/components/component.py b/psyneulink/components/component.py index 72bfe9ed6f1..8aca0af68b9 100644 --- a/psyneulink/components/component.py +++ b/psyneulink/components/component.py @@ -1681,76 +1681,94 @@ def _check_args(self, variable=None, params=None, target_set=None, context=None) # If params have been passed, treat as runtime params and assign to paramsCurrent # (relabel params as runtime_params for clarity) runtime_params = params - if runtime_params and runtime_params is not None: - for param_name in self.user_params: - # Ignore input_states and output_states -- they should not be modified during run - # IMPLEMENTATION NOTE: - # FUNCTION_RUNTIME_PARAM_NOT_SUPPORTED: - # At present, assignment of ``function`` as runtime param is not supported - # (this is because paramInstanceDefaults[FUNCTION] could be a class rather than an bound method; - # i.e., not yet instantiated; could be rectified by assignment in _instantiate_function) - if param_name in {FUNCTION, INPUT_STATES, OUTPUT_STATES}: - continue - # If param is specified in runtime_params, then assign it - if self.runtimeParamStickyAssignmentPref: - if param_name in runtime_params: - # self.paramsCurrent[param_name] = runtime_params[param_name] - if hasattr(self, "parameter_states"): - if param_name in self.parameter_states: - self.parameter_states[param_name].value = self.parameter_states[param_name].execute(runtime_params[param_name]) - elif hasattr(self.owner, "parameter_states"): - if param_name in self.owner.parameter_states: - self.owner.parameter_states[param_name].value = self.owner.parameter_states[param_name].execute(runtime_params[param_name]) - else: - print("no parameter state ") + self.runtime_params = {} + if runtime_params: + for param_name in runtime_params: + if hasattr(self, "parameter_states"): + if param_name in self.parameter_states: + self.parameter_states[param_name].value = self.parameter_states[param_name].execute(runtime_params[param_name], context=ContextFlags.EXECUTING) + elif hasattr(self, "owner"): + if hasattr(self.owner, "parameter_states"): + if param_name in self.owner.parameter_states: + self.owner.parameter_states[param_name].value = self.owner.parameter_states[param_name].execute(runtime_params[param_name], context=ContextFlags.EXECUTING) + else: + self.runtime_params[param_name] = runtime_params[param_name] + print("no parameter state") else: - if param_name in runtime_params: - setattr(self, param_name, runtime_params[param_name]) - if hasattr(self, "parameter_states"): - if param_name in self.parameter_states: - self.parameter_states[param_name].value = self.parameter_states[param_name].execute(runtime_params[param_name]) - elif hasattr(self.owner, "parameter_states"): - if param_name in self.owner.parameter_states: - self.owner.parameter_states[param_name].value = self.owner.parameter_states[ - param_name].execute(runtime_params[param_name]) - else: - print("no parameter state ") - - # Otherwise, (re-)assign to paramInstanceDefaults - # this insures that any params that were assigned as runtime on last execution are reset here - # (unless they have been assigned another runtime value) - # elif not self.runtimeParamStickyAssignmentPref: - # if param_name is FUNCTION_PARAMS: - # for function_param in self.function_object.user_params: - # self.function_object.paramsCurrent[function_param] = \ - # self.function_object.paramInstanceDefaults[function_param] - # continue - # self.paramsCurrent[param_name] = self.paramInstanceDefaults[param_name] - self.runtime_params_in_use = True - - # CW 1/24/18: This elif block appears to be accidentally deleting self.input_states - # KAM 4/26/18: commenting out elif block to prevent deletion of self.input states -- will reinstate once - # runtime params are working - - # Otherwise, reset paramsCurrent to paramInstanceDefaults - # elif self.runtime_params_in_use and not self.runtimeParamStickyAssignmentPref: - # # Can't do the following since function could still be a class ref rather than abound method (see below) - # # self.paramsCurrent = self.paramInstanceDefaults + self.runtime_params[param_name] = runtime_params[param_name] + print("no parameter state") + + # runtime_params = params + # if runtime_params and runtime_params is not None: # for param_name in self.user_params: - # # IMPLEMENTATION NOTE: FUNCTION_RUNTIME_PARAM_NOT_SUPPORTED - # # At present, assignment of ``function`` as runtime param is not supported + # # Ignore input_states and output_states -- they should not be modified during run + # # IMPLEMENTATION NOTE: + # # FUNCTION_RUNTIME_PARAM_NOT_SUPPORTED: + # # At present, assignment of ``function`` as runtime param is not supported # # (this is because paramInstanceDefaults[FUNCTION] could be a class rather than an bound method; # # i.e., not yet instantiated; could be rectified by assignment in _instantiate_function) - # if param_name is FUNCTION: - # continue - # if param_name is FUNCTION_PARAMS: - # for function_param in self.function_object.user_params: - # self.function_object.paramsCurrent[function_param] = \ - # self.function_object.paramInstanceDefaults[function_param] + # if param_name in {FUNCTION, INPUT_STATES, OUTPUT_STATES}: # continue - # self.paramsCurrent[param_name] = self.paramInstanceDefaults[param_name] + # # If param is specified in runtime_params, then assign it + # if self.runtimeParamStickyAssignmentPref: + # if param_name in runtime_params: + # # self.paramsCurrent[param_name] = runtime_params[param_name] + # if hasattr(self, "parameter_states"): + # if param_name in self.parameter_states: + # self.parameter_states[param_name].value = self.parameter_states[param_name].execute(runtime_params[param_name]) + # elif hasattr(self.owner, "parameter_states"): + # if param_name in self.owner.parameter_states: + # self.owner.parameter_states[param_name].value = self.owner.parameter_states[param_name].execute(runtime_params[param_name]) + # else: + # print("no parameter state ") + # else: + # if param_name in runtime_params: + # setattr(self, param_name, runtime_params[param_name]) + # if hasattr(self, "parameter_states"): + # if param_name in self.parameter_states: + # self.parameter_states[param_name].value = self.parameter_states[param_name].execute(runtime_params[param_name]) + # elif hasattr(self.owner, "parameter_states"): + # if param_name in self.owner.parameter_states: + # self.owner.parameter_states[param_name].value = self.owner.parameter_states[ + # param_name].execute(runtime_params[param_name]) + # else: + # print("no parameter state ") + # + # # Otherwise, (re-)assign to paramInstanceDefaults + # # this insures that any params that were assigned as runtime on last execution are reset here + # # (unless they have been assigned another runtime value) + # # elif not self.runtimeParamStickyAssignmentPref: + # # if param_name is FUNCTION_PARAMS: + # # for function_param in self.function_object.user_params: + # # self.function_object.paramsCurrent[function_param] = \ + # # self.function_object.paramInstanceDefaults[function_param] + # # continue + # # self.paramsCurrent[param_name] = self.paramInstanceDefaults[param_name] + # self.runtime_params_in_use = True + # + # # CW 1/24/18: This elif block appears to be accidentally deleting self.input_states + # # KAM 4/26/18: commenting out elif block to prevent deletion of self.input states -- will reinstate once + # # runtime params are working # - # self.runtime_params_in_use = False + # # Otherwise, reset paramsCurrent to paramInstanceDefaults + # # elif self.runtime_params_in_use and not self.runtimeParamStickyAssignmentPref: + # # # Can't do the following since function could still be a class ref rather than abound method (see below) + # # # self.paramsCurrent = self.paramInstanceDefaults + # # for param_name in self.user_params: + # # # IMPLEMENTATION NOTE: FUNCTION_RUNTIME_PARAM_NOT_SUPPORTED + # # # At present, assignment of ``function`` as runtime param is not supported + # # # (this is because paramInstanceDefaults[FUNCTION] could be a class rather than an bound method; + # # # i.e., not yet instantiated; could be rectified by assignment in _instantiate_function) + # # if param_name is FUNCTION: + # # continue + # # if param_name is FUNCTION_PARAMS: + # # for function_param in self.function_object.user_params: + # # self.function_object.paramsCurrent[function_param] = \ + # # self.function_object.paramInstanceDefaults[function_param] + # # continue + # # self.paramsCurrent[param_name] = self.paramInstanceDefaults[param_name] + # # + # # self.runtime_params_in_use = False # If parameter_validation is set and they have changed, then validate requested values and assign to target_set # if self.prefs.paramValidationPref and params and not params is target_set: diff --git a/tests/system/test_system.py b/tests/system/test_system.py index a20235b7f5c..1396808a13f 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -683,11 +683,17 @@ class TestRuntimeParams: def test_mechanism_execute(self): T = TransferMechanism() - print(T.input_states) + + print(" - - - - - after construction - - - - - ") + print("function attr value = ", T.function_object.slope) + print("parameter state value = ", T.parameter_states['slope'].value) T.execute(runtime_params={"slope": 10.0}, input=2.0) - print(T.function_object.slope) - print(T.parameter_states['slope'].value) - print(T.value) - print(T.input_states) + print(" - - - - - after running with runtime params - - - - - ") + print("function attr value = ", T.function_object.slope) + print("parameter state value = ", T.parameter_states['slope'].value) + print("mechanism value = \n\n", T.value) T.execute(input=2.0) - print(T.value) \ No newline at end of file + print(" - - - - - after executing again without runtime params - - - - - ") + print("function attr value = ", T.function_object.slope) + print("parameter state value = ", T.parameter_states['slope'].value) + print("mechanism value = \n\n", T.value) From 90feb7e90b352596950b1bd6f4c5127a6e7f39d9 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Wed, 2 May 2018 17:18:03 -0400 Subject: [PATCH 033/200] refactoring runtime params around a new dictionary - component._runtime_params_reset - which stores the value of an attr right before it was updated by a runtime param. This allows runtime params to reset attrs directly, then all are reset to original values on next execution --- psyneulink/components/component.py | 123 ++++---------------- psyneulink/components/functions/function.py | 61 ++-------- 2 files changed, 36 insertions(+), 148 deletions(-) diff --git a/psyneulink/components/component.py b/psyneulink/components/component.py index 8aca0af68b9..790e77313d9 100644 --- a/psyneulink/components/component.py +++ b/psyneulink/components/component.py @@ -703,7 +703,7 @@ class Component(object): + paramsCurrent # + parameter_validation + user_params - + runtime_params_in_use + +._runtime_params_reset + recording Instance methods: @@ -1019,7 +1019,7 @@ def __init__(self, default_set=self.paramClassDefaults, # source set from which missing params are assigned context=context) - self.runtime_params_in_use = False + self._runtime_params_reset = {} # KDM: this is a poorly implemented hack that stops the .update call from # starting off a chain of assignment/validation calls that ends up @@ -1623,6 +1623,19 @@ def _create_attributes_for_params(self, make_as_properties=False, **kwargs): for arg_name, arg_value in kwargs.items(): setattr(self, arg_name, arg_value) + def _set_parameter_value(self, param, val): + setattr(self, param, val) + if hasattr(self, "parameter_states"): + if param in self.parameter_states: + new_state_value = self.parameter_states[param].execute(context=ContextFlags.EXECUTING) + self.parameter_states[param].value = new_state_value + elif hasattr(self, "owner"): + if hasattr(self.owner, "parameter_states"): + if param in self.owner.parameter_states: + new_state_value = self.owner.parameter_states[param].execute( + context=ContextFlags.EXECUTING) + self.owner.parameter_states[param].value = new_state_value + def _check_args(self, variable=None, params=None, target_set=None, context=None): """validate variable and params, instantiate variable (if necessary) and assign any runtime params. @@ -1680,105 +1693,19 @@ def _check_args(self, variable=None, params=None, target_set=None, context=None) # If params have been passed, treat as runtime params and assign to paramsCurrent # (relabel params as runtime_params for clarity) + if not self.runtimeParamStickyAssignmentPref: + for key in self._runtime_params_reset: + self._set_parameter_value(key, self._runtime_params_reset[key]) + self._runtime_params_reset = {} + runtime_params = params - self.runtime_params = {} if runtime_params: for param_name in runtime_params: - if hasattr(self, "parameter_states"): - if param_name in self.parameter_states: - self.parameter_states[param_name].value = self.parameter_states[param_name].execute(runtime_params[param_name], context=ContextFlags.EXECUTING) - elif hasattr(self, "owner"): - if hasattr(self.owner, "parameter_states"): - if param_name in self.owner.parameter_states: - self.owner.parameter_states[param_name].value = self.owner.parameter_states[param_name].execute(runtime_params[param_name], context=ContextFlags.EXECUTING) - else: - self.runtime_params[param_name] = runtime_params[param_name] - print("no parameter state") - else: - self.runtime_params[param_name] = runtime_params[param_name] - print("no parameter state") - - # runtime_params = params - # if runtime_params and runtime_params is not None: - # for param_name in self.user_params: - # # Ignore input_states and output_states -- they should not be modified during run - # # IMPLEMENTATION NOTE: - # # FUNCTION_RUNTIME_PARAM_NOT_SUPPORTED: - # # At present, assignment of ``function`` as runtime param is not supported - # # (this is because paramInstanceDefaults[FUNCTION] could be a class rather than an bound method; - # # i.e., not yet instantiated; could be rectified by assignment in _instantiate_function) - # if param_name in {FUNCTION, INPUT_STATES, OUTPUT_STATES}: - # continue - # # If param is specified in runtime_params, then assign it - # if self.runtimeParamStickyAssignmentPref: - # if param_name in runtime_params: - # # self.paramsCurrent[param_name] = runtime_params[param_name] - # if hasattr(self, "parameter_states"): - # if param_name in self.parameter_states: - # self.parameter_states[param_name].value = self.parameter_states[param_name].execute(runtime_params[param_name]) - # elif hasattr(self.owner, "parameter_states"): - # if param_name in self.owner.parameter_states: - # self.owner.parameter_states[param_name].value = self.owner.parameter_states[param_name].execute(runtime_params[param_name]) - # else: - # print("no parameter state ") - # else: - # if param_name in runtime_params: - # setattr(self, param_name, runtime_params[param_name]) - # if hasattr(self, "parameter_states"): - # if param_name in self.parameter_states: - # self.parameter_states[param_name].value = self.parameter_states[param_name].execute(runtime_params[param_name]) - # elif hasattr(self.owner, "parameter_states"): - # if param_name in self.owner.parameter_states: - # self.owner.parameter_states[param_name].value = self.owner.parameter_states[ - # param_name].execute(runtime_params[param_name]) - # else: - # print("no parameter state ") - # - # # Otherwise, (re-)assign to paramInstanceDefaults - # # this insures that any params that were assigned as runtime on last execution are reset here - # # (unless they have been assigned another runtime value) - # # elif not self.runtimeParamStickyAssignmentPref: - # # if param_name is FUNCTION_PARAMS: - # # for function_param in self.function_object.user_params: - # # self.function_object.paramsCurrent[function_param] = \ - # # self.function_object.paramInstanceDefaults[function_param] - # # continue - # # self.paramsCurrent[param_name] = self.paramInstanceDefaults[param_name] - # self.runtime_params_in_use = True - # - # # CW 1/24/18: This elif block appears to be accidentally deleting self.input_states - # # KAM 4/26/18: commenting out elif block to prevent deletion of self.input states -- will reinstate once - # # runtime params are working - # - # # Otherwise, reset paramsCurrent to paramInstanceDefaults - # # elif self.runtime_params_in_use and not self.runtimeParamStickyAssignmentPref: - # # # Can't do the following since function could still be a class ref rather than abound method (see below) - # # # self.paramsCurrent = self.paramInstanceDefaults - # # for param_name in self.user_params: - # # # IMPLEMENTATION NOTE: FUNCTION_RUNTIME_PARAM_NOT_SUPPORTED - # # # At present, assignment of ``function`` as runtime param is not supported - # # # (this is because paramInstanceDefaults[FUNCTION] could be a class rather than an bound method; - # # # i.e., not yet instantiated; could be rectified by assignment in _instantiate_function) - # # if param_name is FUNCTION: - # # continue - # # if param_name is FUNCTION_PARAMS: - # # for function_param in self.function_object.user_params: - # # self.function_object.paramsCurrent[function_param] = \ - # # self.function_object.paramInstanceDefaults[function_param] - # # continue - # # self.paramsCurrent[param_name] = self.paramInstanceDefaults[param_name] - # # - # # self.runtime_params_in_use = False - - # If parameter_validation is set and they have changed, then validate requested values and assign to target_set - # if self.prefs.paramValidationPref and params and not params is target_set: - # curr_context = self.context.initialization_status - # self.context.initialization_status = ContextFlags.VALIDATING - # try: - # self._validate_params(variable=variable, request_set=params, target_set=target_set, context=context) - # except TypeError: - # self._validate_params(request_set=params, target_set=target_set, context=context) - # self.context.initialization_status = curr_context + if hasattr(self, param_name): + if param_name in {FUNCTION, INPUT_STATES, OUTPUT_STATES}: + continue + self._runtime_params_reset[param_name] = getattr(self, param_name) + self._set_parameter_value(param_name, runtime_params[param_name]) return variable diff --git a/psyneulink/components/functions/function.py b/psyneulink/components/functions/function.py index 5210a1e02f4..c9518cf89fc 100644 --- a/psyneulink/components/functions/function.py +++ b/psyneulink/components/functions/function.py @@ -7288,58 +7288,19 @@ def _accumulator_check_args(self, variable=None, params=None, target_set=None, c # If params have been passed, treat as runtime params and assign to paramsCurrent # (relabel params as runtime_params for clarity) + if not self.runtimeParamStickyAssignmentPref: + for key in self._runtime_params_reset: + self._set_parameter_value(key, self._runtime_params_reset[key]) + self._runtime_params_reset = {} + runtime_params = params - if runtime_params and runtime_params is not None: - for param_name in self.user_params: - # Ignore input_states and output_states -- they should not be modified during run - # IMPLEMENTATION NOTE: - # FUNCTION_RUNTIME_PARAM_NOT_SUPPORTED: - # At present, assignment of ``function`` as runtime param is not supported - # (this is because paramInstanceDefaults[FUNCTION] could be a class rather than an bound method; - # i.e., not yet instantiated; could be rectified by assignment in _instantiate_function) - if param_name in {FUNCTION, INPUT_STATES, OUTPUT_STATES}: - continue - # If param is specified in runtime_params, then assign it - if param_name in runtime_params: - self.paramsCurrent[param_name] = runtime_params[param_name] - # Otherwise, (re-)assign to paramInstanceDefaults - # this insures that any params that were assigned as runtime on last execution are reset here - # (unless they have been assigned another runtime value) - elif not self.runtimeParamStickyAssignmentPref: - if param_name is FUNCTION_PARAMS: - for function_param in self.function_object.user_params: - self.function_object.paramsCurrent[function_param] = \ - self.function_object.paramInstanceDefaults[function_param] + if runtime_params: + for param_name in runtime_params: + if hasattr(self, param_name): + if param_name in {FUNCTION, INPUT_STATES, OUTPUT_STATES}: continue - self.paramsCurrent[param_name] = self.paramInstanceDefaults[param_name] - self.runtime_params_in_use = True - - # Otherwise, reset paramsCurrent to paramInstanceDefaults - elif self.runtime_params_in_use and not self.runtimeParamStickyAssignmentPref: - # Can't do the following since function could still be a class ref rather than abound method (see below) - # self.paramsCurrent = self.paramInstanceDefaults - for param_name in self.user_params: - # IMPLEMENTATION NOTE: FUNCTION_RUNTIME_PARAM_NOT_SUPPORTED - # At present, assignment of ``function`` as runtime param is not supported - # (this is because paramInstanceDefaults[FUNCTION] could be a class rather than an bound method; - # i.e., not yet instantiated; could be rectified by assignment in _instantiate_function) - if param_name is FUNCTION: - continue - if param_name is FUNCTION_PARAMS: - for function_param in self.function_object.user_params: - self.function_object.paramsCurrent[function_param] = \ - self.function_object.paramInstanceDefaults[function_param] - continue - self.paramsCurrent[param_name] = self.paramInstanceDefaults[param_name] - - self.runtime_params_in_use = False - - # If parameter_validation is set and they have changed, then validate requested values and assign to target_set - if self.prefs.paramValidationPref and params and not params is target_set: - try: - self._validate_params(variable=variable, request_set=params, target_set=target_set, context=context) - except TypeError: - self._validate_params(request_set=params, target_set=target_set, context=context) + self._runtime_params_reset[param_name] = getattr(self, param_name) + self._set_parameter_value(param_name, runtime_params[param_name]) def function(self, variable=None, From 40d9cb87440d4ca7c5c401ca29ed25dae2058043 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Wed, 2 May 2018 17:25:11 -0400 Subject: [PATCH 034/200] adding pytests for runtime params --- tests/system/test_system.py | 95 ++++++++++++++++++++++++++++++++----- 1 file changed, 83 insertions(+), 12 deletions(-) diff --git a/tests/system/test_system.py b/tests/system/test_system.py index 1396808a13f..3230415fdf0 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -681,19 +681,90 @@ def test_initialize_mechanisms(self): class TestRuntimeParams: - def test_mechanism_execute(self): + def test_mechanism_execute_function_param(self): + + # Construction T = TransferMechanism() + assert T.function_object.slope == 1.0 + assert T.parameter_states['slope'].value == 1.0 - print(" - - - - - after construction - - - - - ") - print("function attr value = ", T.function_object.slope) - print("parameter state value = ", T.parameter_states['slope'].value) + # Runtime param used for slope T.execute(runtime_params={"slope": 10.0}, input=2.0) - print(" - - - - - after running with runtime params - - - - - ") - print("function attr value = ", T.function_object.slope) - print("parameter state value = ", T.parameter_states['slope'].value) - print("mechanism value = \n\n", T.value) + assert T.function_object.slope == 10.0 + assert T.parameter_states['slope'].value == 10.0 + assert T.value == 20.0 + + # Runtime param NOT used for slope + T.execute(input=2.0) + assert T.function_object.slope == 1.0 + assert T.parameter_states['slope'].value == 1.0 + assert T.value == 2.0 + + def test_mechanism_execute_mechanism_param(self): + + # Construction + T = TransferMechanism() + assert T.noise == 0.0 + assert T.parameter_states['noise'].value == 0.0 + + # Runtime param used for noise + T.execute(runtime_params={"noise": 10.0}, input=2.0) + assert T.noise == 10.0 + assert T.parameter_states['noise'].value == 10.0 + assert T.value == 12.0 + + # Runtime param NOT used for noise T.execute(input=2.0) - print(" - - - - - after executing again without runtime params - - - - - ") - print("function attr value = ", T.function_object.slope) - print("parameter state value = ", T.parameter_states['slope'].value) - print("mechanism value = \n\n", T.value) + assert T.noise == 0.0 + assert T.parameter_states['noise'].value == 0.0 + assert T.value == 2.0 + + def test_runtime_params_reset_isolated(self): + + T = TransferMechanism() + + # Intercept attr updated + T.function_object.intercept = 2.0 + assert T.function_object.intercept == 2.0 + + # Runtime param used for slope + T.execute(runtime_params={"slope": 10.0}, input=2.0) + assert T.function_object.slope == 10.0 + assert T.parameter_states['slope'].value == 10.0 + + # Intercept attr NOT affected by runtime params + assert T.function_object.intercept == 2.0 + assert T.value == 22.0 + + # Runtime param NOT used for slope + T.execute(input=2.0) + assert T.function_object.slope == 1.0 + assert T.parameter_states['slope'].value == 1.0 + + # Intercept attr NOT affected by runtime params reset + assert T.function_object.intercept == 2.0 + assert T.value == 4.0 + + def test_runtime_params_reset_to_most_recent_val(self): + # NOT instance defaults + + # Construction + T = TransferMechanism() + assert T.function_object.slope == 1.0 + assert T.parameter_states['slope'].value == 1.0 + + # Set slope attribute value directly + T.function_object.slope = 2.0 + assert T.function_object.slope == 2.0 + + # Runtime param used for slope + T.execute(runtime_params={"slope": 10.0}, input=2.0) + assert T.function_object.slope == 10.0 + assert T.parameter_states['slope'].value == 10.0 + assert T.value == 20.0 + + # Runtime param NOT used for slope - reset to most recent slope value (2.0) + T.execute(input=2.0) + assert T.function_object.slope == 2.0 + assert T.value == 4.0 + From 4bdecbf01cfef2f76c66414598b0163a77b0e2f0 Mon Sep 17 00:00:00 2001 From: jdcpni Date: Thu, 3 May 2018 12:07:04 -0400 Subject: [PATCH 035/200] Fix/input state/variable (#781) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * • LCA - _execute: eliminated (by consolidating remaining bits into helper methods * • MaskMappingProjection - corrections to docstrings * • MaskMappingProjection - corrections to docstrings * Merge branch 'devel' of https://github.com/PrincetonUniversity/PsyNeuLink into devel # Conflicts: # tests/projections/test_projections_specifications.py * Merge branch 'devel' of https://github.com/PrincetonUniversity/PsyNeuLink into devel # Conflicts: # tests/projections/test_projections_specifications.py * Merge branch 'devel' of https://github.com/PrincetonUniversity/PsyNeuLink into devel # Conflicts: # tests/projections/test_projections_specifications.py * Merge branch 'devel' of https://github.com/PrincetonUniversity/PsyNeuLink into devel # Conflicts: # tests/projections/test_projections_specifications.py * - * Merge branch 'devel' of https://github.com/PrincetonUniversity/PsyNeuLink into devel # Conflicts: # tests/projections/test_projections_specifications.py * - * Merge branch 'devel' of https://github.com/PrincetonUniversity/PsyNeuLink into devel # Conflicts: # tests/projections/test_projections_specifications.py * Merge branch 'devel' of https://github.com/PrincetonUniversity/PsyNeuLink into devel # Conflicts: # tests/projections/test_projections_specifications.py * Merge branch 'devel' of https://github.com/PrincetonUniversity/PsyNeuLink into devel # Conflicts: # tests/projections/test_projections_specifications.py * Merge branch 'devel' of https://github.com/PrincetonUniversity/PsyNeuLink into devel # Conflicts: # tests/projections/test_projections_specifications.py * Merge branch 'devel' of https://github.com/PrincetonUniversity/PsyNeuLink into devel # Conflicts: # tests/projections/test_projections_specifications.py * Merge branch 'devel' of https://github.com/PrincetonUniversity/PsyNeuLink into devel # Conflicts: # tests/projections/test_projections_specifications.py * Merge branch 'devel' of https://github.com/PrincetonUniversity/PsyNeuLink into devel # Conflicts: # tests/projections/test_projections_specifications.py * - * - * - * - * • State._parse_input_state, InputState._parse_state_specific_specs: - refactored to insure that number of items in InputState's variable corresponds to number of path_afferent Projections it receives. • Project - many bug fixes related to parsing of InputState variable --- psyneulink/components/component.py | 1 + psyneulink/components/functions/function.py | 11 +- psyneulink/components/mechanisms/mechanism.py | 124 ++++++----- .../projections/pathway/mappingprojection.py | 3 +- psyneulink/components/states/inputstate.py | 194 ++++++++++-------- psyneulink/components/states/outputstate.py | 21 +- .../components/states/parameterstate.py | 6 +- psyneulink/components/states/state.py | 51 ++--- tests/mechanisms/test_input_state_spec.py | 44 ++-- 9 files changed, 247 insertions(+), 208 deletions(-) diff --git a/psyneulink/components/component.py b/psyneulink/components/component.py index c87fb4f5226..7a1c9100abb 100644 --- a/psyneulink/components/component.py +++ b/psyneulink/components/component.py @@ -2696,6 +2696,7 @@ def initialize(self): def execute(self, variable=None, runtime_params=None, context=None): function_variable = self._parse_function_variable(variable) return self._execute(variable=variable, function_variable=function_variable, runtime_params=runtime_params, context=context) + # return self._execute(variable=variable, runtime_params=runtime_params, context=context) def _execute(self, variable=None, function_variable=None, runtime_params=None, context=None, **kwargs): diff --git a/psyneulink/components/functions/function.py b/psyneulink/components/functions/function.py index 5210a1e02f4..22dcaa98d9d 100644 --- a/psyneulink/components/functions/function.py +++ b/psyneulink/components/functions/function.py @@ -1444,7 +1444,8 @@ class CombinationFunction(Function_Base): componentType = COMBINATION_FUNCTION_TYPE class ClassDefaults(Function_Base.ClassDefaults): - variable = np.array([0, 0]) + # variable = np.array([0, 0]) + variable = np.array([0]) # IMPLEMENTATION NOTE: THESE SHOULD SHOULD BE REPLACED WITH ABC WHEN IMPLEMENTED def __init__(self, default_variable, @@ -1996,9 +1997,8 @@ def _validate_variable(self, variable, context=None): else: new_length = len(variable[i]) if old_length != new_length: - raise FunctionError("Length of all arrays in variable {0} " - "for {1} must be the same".format(variable, - self.__class__.__name__)) + raise FunctionError("Length of all arrays in variable for {0} must be the same; variable: {1}". + format(self.__class__.__name__, variable)) return variable def _validate_params(self, request_set, target_set=None, context=None): @@ -4348,7 +4348,8 @@ def keyword(obj, keyword): if isinstance(obj.receiver.instance_defaults.variable, numbers.Number): cols = 1 else: - cols = len(obj.receiver.instance_defaults.variable) + # cols = len(obj.receiver.instance_defaults.variable) + cols = obj.receiver.instance_defaults.variable.shape[-1] matrix = get_matrix(keyword, rows, cols) if matrix is None: diff --git a/psyneulink/components/mechanisms/mechanism.py b/psyneulink/components/mechanisms/mechanism.py index e428d7dfd4f..d0e31af3071 100644 --- a/psyneulink/components/mechanisms/mechanism.py +++ b/psyneulink/components/mechanisms/mechanism.py @@ -863,7 +863,7 @@ class `UserList =2d - InputState variable must be embedded in a list (see InputState._get_state_function_value()). - so that LinearCombination (its default function) returns a variable that is >=2d intact - (rather than as arrays to be combined); + InputState variable must be embedded in a list so that LinearCombination (its default function) + returns a variable that is >=2d intact (rather than as arrays to be combined); this is normally done in State.update() (and in State._instantiate-function), but that can't be called by _parse_state_spec since the InputState itself may not yet have been instantiated. """ import inspect + if ( - ( - (inspect.isclass(function) and issubclass(function, LinearCombination)) - or isinstance(function, LinearCombination) - ) - and ( - isinstance(variable, np.matrix) - or ( - isinstance(np.array(variable)) - and variable.ndim >=2 - ) - ) + ((inspect.isclass(function) and issubclass(function, LinearCombination)) + or isinstance(function, LinearCombination)) + and (isinstance(variable, np.matrix) or + (isinstance(np.array(variable),np.ndarray) and np.array(variable).ndim>=2)) ): variable = [variable] - return function.execute(variable) + + # if function is None, use State's default function + function = function or InputState.ClassDefaults.function + + return State_Base._get_state_function_value(owner=owner, function=function, variable=variable) def _instantiate_input_states(owner, input_states=None, reference_value=None, context=None): @@ -1225,7 +1248,8 @@ def _instantiate_input_states(owner, input_states=None, reference_value=None, co state_param_identifier=INPUT_STATE, reference_value=reference_value if reference_value is not None else owner.instance_defaults.variable, - reference_value_name=VARIABLE, + # reference_value=reference_value, + reference_value_name=VALUE, context=context) # Call from Mechanism.add_states, so add to rather than assign input_states (i.e., don't replace) diff --git a/psyneulink/components/states/outputstate.py b/psyneulink/components/states/outputstate.py index fd1f8b168a7..c6bea94a236 100644 --- a/psyneulink/components/states/outputstate.py +++ b/psyneulink/components/states/outputstate.py @@ -1166,22 +1166,26 @@ def _parse_state_specific_specs(self, owner, state_dict, state_specific_spec): @staticmethod def _get_state_function_value(owner, function, variable): - # -- CALL TO GET DEFAULT VALUE AND RETURN THAT (CAN'T USE VARIABLE SINCE DON'T KNOW MECH YET) - # THOUGH COULD PASS IN OWNER TO DETERMINE IT fct_variable = _parse_output_state_variable(owner, variable) # If variable has not been specified, assume it is the default of (OWNER_VALUE,0), and use that value if fct_variable is None: - if owner.value is not None: - fct_variable = owner.value[0] - # Get owner's value by calling its function - else: - owner.function(owner.variable)[0] + try: + if owner.value is not None: + fct_variable = owner.value[0] + # Get owner's value by calling its function + else: + fct_variable = owner.function(owner.variable)[0] + except AttributeError: + fct_variable = None fct = _parse_output_state_function(owner, OutputState.__name__, function, fct_variable is PARAMS_DICT) try: - return fct(variable=fct_variable) + # return fct(variable=fct_variable) + return State_Base._get_state_function_value(owner=owner, function=fct, variable=fct_variable) + # FIX: 5/2/18 JDC IS THIS NEEDED? ISN'T IT HANDLED BY SUPER (SINCE IT CALLS WITHOUT NAME OF VARIABLE ARG)? + # IF fct IS NOT FOUND, PASS OutputState.ClassDefault.function except: try: return fct(fct_variable) @@ -1360,6 +1364,7 @@ def _instantiate_output_states(owner, output_states=None, context=None): output_state[VARIABLE]) else: output_state_value = _parse_output_state_variable(owner, output_state[VARIABLE]) + output_state[VALUE] = output_state_value output_states[i] = output_state reference_value.append(output_state_value) diff --git a/psyneulink/components/states/parameterstate.py b/psyneulink/components/states/parameterstate.py index 8efa21b7ed5..44d169d83ff 100644 --- a/psyneulink/components/states/parameterstate.py +++ b/psyneulink/components/states/parameterstate.py @@ -630,7 +630,6 @@ def _instantiate_projections(self, projections, context=None): self._instantiate_projections_to_state(projections=projections, context=context) - @tc.typecheck def _parse_state_specific_specs(self, owner, state_dict, state_specific_spec): """Get connections specified in a ParameterState specification tuple @@ -814,6 +813,11 @@ def _parse_state_specific_specs(self, owner, state_dict, state_specific_spec): return state_spec, params_dict + @staticmethod + def _get_state_function_value(owner, function, variable): + """Return parameter variable (since ParameterState's function never changes the form of its variable""" + return variable + def _execute(self, variable=None, function_variable=None, runtime_params=None, context=None): """Call self.function with current parameter value as the variable diff --git a/psyneulink/components/states/state.py b/psyneulink/components/states/state.py index 8b97ae48449..8853229ad1c 100644 --- a/psyneulink/components/states/state.py +++ b/psyneulink/components/states/state.py @@ -1496,7 +1496,8 @@ def _instantiate_projections_to_state(self, projections, context=None): # PathwayProjection: # - check that projection's value is compatible with the State's variable if isinstance(projection, PathwayProjection_Base): - if not iscompatible(projection.value, self.instance_defaults.variable): + if not iscompatible(projection.value, self.instance_defaults.variable[0]): + # if len(projection.value) != self.instance_defaults.variable.shape[-1]: raise StateError("Output of function for {} ({}) is not compatible with value of {} ({}).". format(projection.name, projection.value, self.name, self.value)) @@ -2043,8 +2044,10 @@ def _assign_default_state_name(self, context=None): @staticmethod def _get_state_function_value(owner, function, variable): """Execute the function of a State and return its value + # FIX: CONSIDER INTEGRATING THIS INTO _EXECUTE FOR STATE? - This is a stub, that a State subclass can override to treat execution of its function in a State-specific manner + This is a stub, that a State subclass can override to treat its function in a State-specific manner. + Used primarily during validation, when the function may not have been fully instantiated yet (e.g., InputState must sometimes embed its variable in a list-- see InputState._get_state_function_value). """ return function.execute(variable) @@ -2232,13 +2235,15 @@ def _instantiate_state(state_type:_is_state_class, # State's type # Parse reference value to get actual value (in case it is, itself, a specification dict) - reference_value_dict = _parse_state_spec(owner=owner, - state_type=state_type, - state_spec=reference_value, - value=None, - params=None) - # Its value is assigned to the VARIABLE entry (including if it was originally just a value) - reference_value = reference_value_dict[VARIABLE] + from psyneulink.globals.utilities import is_numeric + if not is_numeric(reference_value): + reference_value_dict = _parse_state_spec(owner=owner, + state_type=state_type, + state_spec=reference_value, + value=None, + params=None) + # Its value is assigned to the VARIABLE entry (including if it was originally just a value) + reference_value = reference_value_dict[VARIABLE] parsed_state_spec = _parse_state_spec(state_type=state_type, owner=owner, @@ -2478,7 +2483,7 @@ def _parse_state_spec(state_type=None, state_specific_args = state_spec[STATE_SPEC_ARG].copy() standard_args.update({key: state_specific_args[key] for key in state_specific_args - if key in standard_args}) + if key in standard_args and state_specific_args[key] is not None}) # Delete them from the State specification dictionary, leaving only state-specific items there for key in standard_args: state_specific_args.pop(key, None) @@ -2507,8 +2512,9 @@ def _parse_state_spec(state_type=None, params = state_specific_args # Convert reference_value to np.array to match state_variable (which, as output of function, will be an np.array) - if isinstance(reference_value, numbers.Number): - reference_value = convert_to_np_array(reference_value,1) + # if isinstance(reference_value, numbers.Number): + # FIX: 5/2/18 JDC - NOT NECESSARILY... OUTPUT_STATE FUNCTIONS CAN GENERATE ANYTHING + # reference_value = convert_to_np_array(reference_value,1) # Validate that state_type is a State class if isinstance(state_type, str): @@ -2693,7 +2699,8 @@ def _parse_state_spec(state_type=None, # Standard state specification dict # Warn if VARIABLE was not in dict - if VARIABLE not in state_dict and owner.prefs.verbosePref: + if ((VARIABLE not in state_dict or state_dict[VARIABLE] is None) + and hasattr(owner, 'prefs') and owner.prefs.verbosePref): print("{} missing from specification dict for {} of {}; " "will be inferred from context or the default ({}) will be used". format(VARIABLE, state_type, owner.name, state_dict)) @@ -2849,27 +2856,22 @@ def _parse_state_spec(state_type=None, spec_function = state_dict[PARAMS][FUNCTION] # if isinstance(spec_function, Function): if isinstance(spec_function, (Function, function_type, method_type)): - # # MODIFIED 2/21/18 OLD [KM]: - # spec_function_value = spec_function.execute(state_dict[VARIABLE]) - # MODIFIED 2/21/18 NEW [JDC]: spec_function_value = state_type._get_state_function_value(owner, spec_function, state_dict[VARIABLE]) - # MODIFIED 2/21/18 END elif inspect.isclass(spec_function) and issubclass(spec_function, Function): try: spec_function = spec_function(**state_dict[PARAMS][FUNCTION_PARAMS]) except (KeyError, TypeError): spec_function = spec_function() - # # MODIFIED 2/21/18 OLD [KM]: - # spec_function_value = spec_function.execute(state_dict[VARIABLE]) - # MODIFIED 2/21/18 NEW [JDC]: spec_function_value = state_type._get_state_function_value(owner, spec_function, state_dict[VARIABLE]) - # MODIFIED 2/21/18 END else: raise StateError('state_spec value for FUNCTION ({0}) must be a function, method, ' 'Function class or instance of one'. format(spec_function)) except (KeyError, TypeError): - spec_function_value = state_dict[VARIABLE] + # MODIFIED NEW 5/2/18 FIX: NEEDS TO RETURN None from OutputState._get_state_function_value if owner has no value + spec_function_value = state_type._get_state_function_value(owner, None, state_dict[VARIABLE]) + # MODIFIED 5/2/18 END + # Assign value based on variable if not specified if state_dict[VALUE] is None: @@ -2886,6 +2888,10 @@ def _parse_state_spec(state_type=None, ) ) + if state_dict[REFERENCE_VALUE] is not None and not iscompatible(state_dict[VALUE], state_dict[REFERENCE_VALUE]): + raise StateError("PROGRAM ERROR: State value ({}) does not match reference_value ({}) for {} of {})". + format(state_dict[VALUE], state_dict[REFERENCE_VALUE], state_type.__name__, owner.name)) + return state_dict @@ -3095,7 +3101,6 @@ def _is_legal_state_spec_tuple(owner, state_spec, state_type_name=None): isinstance(state_spec[1], (Mechanism, State)) or (isinstance(state_spec[0], Mechanism) and state_spec[1] in state_spec[0]._parameter_states)): - raise StateError("2nd item of tuple in state_spec for {} of {} ({}) must be a specification " "for a Mechanism, State, or Projection". format(state_type_name, owner.__class__.__name__, state_spec[1])) diff --git a/tests/mechanisms/test_input_state_spec.py b/tests/mechanisms/test_input_state_spec.py index 738273663fd..2a011c194da 100644 --- a/tests/mechanisms/test_input_state_spec.py +++ b/tests/mechanisms/test_input_state_spec.py @@ -7,11 +7,12 @@ from psyneulink.components.mechanisms.processing.transfermechanism import TransferMechanism from psyneulink.components.projections.pathway.mappingprojection import MappingProjection from psyneulink.components.projections.projection import ProjectionError -from psyneulink.components.states.inputstate import InputState +from psyneulink.components.states.inputstate import InputState, InputStateError from psyneulink.components.states.state import StateError from psyneulink.globals.keywords import FUNCTION, INPUT_STATES, MECHANISM, NAME, OUTPUT_STATES, PROJECTIONS, RESULTS, VARIABLE -mismatches_default_variable_error_text = 'not compatible with the specified default variable' +mismatches_specified_default_variable_error_text = 'not compatible with its specified default variable' +mismatches_default_variable_format_error_text = 'is not compatible with its expected format' mismatches_size_error_text = 'not compatible with the default variable determined from size parameter' belongs_to_another_mechanism_error_text = 'that belongs to another Mechanism' @@ -43,12 +44,12 @@ def test_match_with_default_variable(self): def test_mismatch_with_default_variable_error(self): - with pytest.raises(MechanismError) as error_text: + with pytest.raises(InputStateError) as error_text: TransferMechanism( default_variable=[[0], [0]], input_states=[[32, 24], 'HELLO'] ) - assert mismatches_default_variable_error_text in str(error_text.value) + assert mismatches_default_variable_format_error_text in str(error_text.value) # ------------------------------------------------------------------------------------------------ # TEST 3 @@ -209,7 +210,7 @@ def test_default_variable_override_mech_list(self): np.testing.assert_array_equal(T.instance_defaults.variable, np.array([[0, 0]])) assert len(T.input_states) == 1 assert len(T.input_state.path_afferents[0].sender.instance_defaults.variable) == 3 - assert len(T.input_state.instance_defaults.variable) == 2 + assert len(T.input_state.instance_defaults.variable[0]) == 2 T.execute() # ------------------------------------------------------------------------------------------------ @@ -222,7 +223,8 @@ def test_2_item_tuple_spec(self): np.testing.assert_array_equal(T.instance_defaults.variable, np.array([[0, 0]])) assert len(T.input_states) == 1 assert len(T.input_state.path_afferents[0].sender.instance_defaults.variable) == 3 - assert len(T.input_state.instance_defaults.variable) == 2 + # assert len(T.input_state.instance_defaults.variable[]) == 2 + assert T.input_state.instance_defaults.variable.shape[-1] == 2 T.execute() # ------------------------------------------------------------------------------------------------ @@ -234,8 +236,8 @@ def test_2_item_tuple_value_for_first_item(self): T = TransferMechanism(input_states=[([0,0], R2)]) np.testing.assert_array_equal(T.instance_defaults.variable, np.array([[0, 0]])) assert len(T.input_states) == 1 - assert len(T.input_state.path_afferents[0].sender.instance_defaults.variable) == 3 - assert len(T.input_state.instance_defaults.variable) == 2 + assert T.input_state.path_afferents[0].sender.instance_defaults.variable.shape[-1] == 3 + assert T.input_state.instance_defaults.variable.shape[-1] == 2 T.execute() # ------------------------------------------------------------------------------------------------ @@ -247,8 +249,8 @@ def test_projection_tuple_with_matrix_spec(self): T = TransferMechanism(size=2, input_states=[(R2, None, None, np.zeros((3, 2)))]) np.testing.assert_array_equal(T.instance_defaults.variable, np.array([[0, 0]])) assert len(T.input_states) == 1 - assert len(T.input_state.path_afferents[0].sender.instance_defaults.variable) == 3 - assert len(T.input_state.instance_defaults.variable) == 2 + assert T.input_state.path_afferents[0].sender.instance_defaults.variable.shape[-1] == 3 + assert T.input_state.instance_defaults.variable.shape[-1] == 2 T.execute() # ------------------------------------------------------------------------------------------------ @@ -381,7 +383,7 @@ def test_dict_with_variable_mismatches_default(self): default_variable=[[0]], input_states=[{NAME: 'FIRST', VARIABLE: [0, 0]}] ) - assert mismatches_default_variable_error_text in str(error_text.value) + assert mismatches_specified_default_variable_error_text in str(error_text.value) # ------------------------------------------------------------------------------------------------ # TEST 23 @@ -395,7 +397,7 @@ def test_dict_with_variable_mismatches_default_multiple_input_states(self): {NAME: 'SECOND', VARIABLE: [0]} ] ) - assert mismatches_default_variable_error_text in str(error_text.value) + assert mismatches_specified_default_variable_error_text in str(error_text.value) # ------------------------------------------------------------------------------------------------ # TEST 24 @@ -479,7 +481,7 @@ def test_InputState_mismatches_default(self): with pytest.raises(MechanismError) as error_text: i = InputState(reference_value=[0, 0, 0]) TransferMechanism(default_variable=[0, 0], input_states=[i]) - assert mismatches_default_variable_error_text in str(error_text.value) + assert mismatches_specified_default_variable_error_text in str(error_text.value) # ------------------------------------------------------------------------------------------------ # TEST 31 @@ -500,7 +502,7 @@ def test_projection_with_matrix_and_sender_mismatches_default(self): m = TransferMechanism(size=2) p = MappingProjection(sender=m, matrix=[[0, 0, 0], [0, 0, 0]]) TransferMechanism(default_variable=[0, 0], input_states=[p]) - assert mismatches_default_variable_error_text in str(error_text.value) + assert mismatches_specified_default_variable_error_text in str(error_text.value) # ------------------------------------------------------------------------------------------------ # TEST 33 @@ -550,7 +552,7 @@ def test_projection_no_args_dict_spec_mismatch_with_default(self): with pytest.raises(MechanismError) as error_text: p = MappingProjection() TransferMechanism(default_variable=[0, 0], input_states=[{VARIABLE: [0, 0, 0], PROJECTIONS: [p]}]) - assert mismatches_default_variable_error_text in str(error_text.value) + assert mismatches_specified_default_variable_error_text in str(error_text.value) # ------------------------------------------------------------------------------------------------ # TEST 38 @@ -559,7 +561,7 @@ def test_outputstate_(self): with pytest.raises(MechanismError) as error_text: p = MappingProjection() TransferMechanism(default_variable=[0, 0], input_states=[{VARIABLE: [0, 0, 0], PROJECTIONS: [p]}]) - assert mismatches_default_variable_error_text in str(error_text.value) + assert mismatches_specified_default_variable_error_text in str(error_text.value) # ------------------------------------------------------------------------------------------------ # TEST 26 @@ -641,15 +643,15 @@ def test_default_name_and_projections_listing_for_input_state_in_constructor(sel def test_2_item_tuple_with_state_name_list_and_mechanism(self): # T1 has OutputStates of with same lengths, - # so T2 should use that length for its InputState (since it is not otherwise specified + # so T2 should use that length for its InputState variable (since it is not otherwise specified) T1 = TransferMechanism(input_states=[[0,0],[0,0]]) T2 = TransferMechanism(input_states=[(['RESULT', 'RESULT-1'], T1)]) assert len(T2.input_states[0].value) == 2 assert T2.input_states[0].path_afferents[0].sender.name == 'RESULT' assert T2.input_states[0].path_afferents[1].sender.name == 'RESULT-1' - # T1 has OutputStates with different lengths, - # so T2 should use its variable default to as format for its InputStates (since it is not otherwise specified + # T1 has OutputStates with different lengths both of which are specified by T2 to project to a singe InputState, + # so T2 should use its variable default as format for the InputState (since it is not otherwise specified) T1 = TransferMechanism(input_states=[[0,0],[0,0,0]]) T2 = TransferMechanism(input_states=[(['RESULT', 'RESULT-1'], T1)]) assert len(T2.input_states[0].value) == 1 @@ -758,5 +760,5 @@ def test_mech_and_tuple_specifications_with_and_without_default_variable_or_size size=size, input_states=input_states ) - assert len(T.input_states[0].instance_defaults.variable) == variable_len_state - assert len(T.instance_defaults.variable[0]) == variable_len_mech + assert T.input_states[0].instance_defaults.variable.shape[-1] == variable_len_state + assert T.instance_defaults.variable.shape[-1] == variable_len_mech From f0bfd1643bad4c9903be94754a12afd1f896eb2d Mon Sep 17 00:00:00 2001 From: KristenManning Date: Thu, 3 May 2018 16:23:39 -0400 Subject: [PATCH 036/200] passing runtime_params into system.run() and subsequent methods --- psyneulink/components/mechanisms/mechanism.py | 39 +++++++++-------- psyneulink/components/system.py | 28 ++++++++---- psyneulink/globals/environment.py | 2 + tests/system/test_system.py | 43 +++++++++++++++++++ 4 files changed, 85 insertions(+), 27 deletions(-) diff --git a/psyneulink/components/mechanisms/mechanism.py b/psyneulink/components/mechanisms/mechanism.py index e428d7dfd4f..599e077da84 100644 --- a/psyneulink/components/mechanisms/mechanism.py +++ b/psyneulink/components/mechanisms/mechanism.py @@ -2148,25 +2148,26 @@ def execute(self, # Insure that param set is for a States: if self.prefs.paramValidationPref: if runtime_params: - # runtime_params can have entries for any of the the Mechanism's params, or - # one or more state keys, each of which should be for a params dictionary for the corresponding - # state type, and each of can contain only parameters relevant to that state - state_keys = [INPUT_STATE_PARAMS, PARAMETER_STATE_PARAMS, OUTPUT_STATE_PARAMS] - param_names = list({**self.user_params, **self.function_params}) - if not all(key in state_keys + param_names for key in runtime_params): - raise MechanismError("There is an invalid specification for a runtime parameter of {}". - format(self.name)) - # for state_key in runtime_params: - for state_key in [entry for entry in runtime_params if entry in state_keys]: - state_dict = runtime_params[state_key] - if not isinstance(state_dict, dict): - raise MechanismError("runtime_params entry for {} is not a dict". - format(self.name, state_key)) - for param_name in state_dict: - if not param_name in param_names: - raise MechanismError("{} entry in runtime_params for {} " - "contains an unrecognized parameter: {}". - format(state_key, self.name, param_name)) + pass + # # runtime_params can have entries for any of the the Mechanism's params, or + # # one or more state keys, each of which should be for a params dictionary for the corresponding + # # state type, and each of can contain only parameters relevant to that state + # state_keys = [INPUT_STATE_PARAMS, PARAMETER_STATE_PARAMS, OUTPUT_STATE_PARAMS] + # param_names = list({**self.user_params, **self.function_params}) + # if not all(key in state_keys + param_names for key in runtime_params): + # raise MechanismError("There is an invalid specification for a runtime parameter of {}". + # format(self.name)) + # # for state_key in runtime_params: + # for state_key in [entry for entry in runtime_params if entry in state_keys]: + # state_dict = runtime_params[state_key] + # if not isinstance(state_dict, dict): + # raise MechanismError("runtime_params entry for {} is not a dict". + # format(self.name, state_key)) + # for param_name in state_dict: + # if not param_name in param_names: + # raise MechanismError("{} entry in runtime_params for {} " + # "contains an unrecognized parameter: {}". + # format(state_key, self.name, param_name)) # FIX: ??MAKE CONDITIONAL ON self.prefs.paramValidationPref?? diff --git a/psyneulink/components/system.py b/psyneulink/components/system.py index 76328e7b8c9..b09f174a061 100644 --- a/psyneulink/components/system.py +++ b/psyneulink/components/system.py @@ -2451,6 +2451,7 @@ def execute(self, execution_id=None, termination_processing=None, termination_learning=None, + runtime_params=None, context=None): """Execute mechanisms in System at specified :ref:`phases ` in order \ specified by the :py:data:`execution_graph ` attribute. @@ -2590,7 +2591,7 @@ def execute(self, # sorted_list = list(object_item[0].name for object_item in self.execution_list) # Execute system without learning on projections (that will be taken care of in _execute_learning() - self._execute_processing(context=context) + self._execute_processing(runtime_params=runtime_params, context=context) # EXECUTE LEARNING FOR EACH PROCESS @@ -2632,7 +2633,7 @@ def execute(self, return self.terminal_mechanisms.outputStateValues - def _execute_processing(self, context=None): + def _execute_processing(self, runtime_params, context=None): # Execute each Mechanism in self.execution_list, in the order listed during its phase # Only update Mechanism on time_step(s) determined by its phaseSpec (specified in Mechanism's Process entry) # FIX: NEED TO IMPLEMENT FRACTIONAL UPDATES (IN Mechanism.update()) @@ -2646,11 +2647,6 @@ def _execute_processing(self, context=None): i = 0 for mechanism in next_execution_set: logger.debug('\tRunning Mechanism {0}'.format(mechanism)) - for p in self.processes: - try: - rt_params = p.runtime_params_dict[mechanism] - except: - rt_params = None processes = list(mechanism.processes.keys()) process_keys_sorted = sorted(processes, key=lambda i : processes[processes.index(i)].name) @@ -2660,8 +2656,19 @@ def _execute_processing(self, context=None): mechanism.context.string = "Mechanism: " + mechanism.name + " [in processes: " + str(process_names) + "]" mechanism.context.composition = self + execution_runtime_params = {} + if mechanism in runtime_params: + execution_runtime_params = runtime_params[mechanism] + mechanism.context.execution_phase = ContextFlags.PROCESSING - mechanism.execute(runtime_params=rt_params, context=context) + mechanism.execute(runtime_params=execution_runtime_params, context=context) + for key in mechanism._runtime_params_reset: + mechanism._set_parameter_value(key, mechanism._runtime_params_reset[key]) + mechanism._runtime_params_reset = {} + + for key in mechanism.function_object._runtime_params_reset: + mechanism.function_object._set_parameter_value(key, mechanism.function_object._runtime_params_reset[key]) + mechanism.function_object._runtime_params_reset = {} mechanism.context.execution_phase = ContextFlags.IDLE if self._report_system_output and self._report_process_output: @@ -2829,6 +2836,7 @@ def run(self, call_after_time_step=None, termination_processing=None, termination_learning=None, + runtime_params=None, context=None): """Run a sequence of executions @@ -2892,6 +2900,9 @@ def run(self, if self.scheduler_learning is None: self.scheduler_learning = Scheduler(graph=self.learning_execution_graph) + if runtime_params is None: + runtime_params = {} + self.initial_values = initial_values logger.debug(inputs) @@ -2910,6 +2921,7 @@ def run(self, call_after_time_step=call_after_time_step, termination_processing=termination_processing, termination_learning=termination_learning, + runtime_params=runtime_params, context=ContextFlags.COMPOSITION) def _report_system_initiation(self): diff --git a/psyneulink/globals/environment.py b/psyneulink/globals/environment.py index 92430279bdb..959a08daa78 100644 --- a/psyneulink/globals/environment.py +++ b/psyneulink/globals/environment.py @@ -526,6 +526,7 @@ def run(object, call_after_time_step:tc.optional(callable)=None, termination_processing=None, termination_learning=None, + runtime_params=None, context=ContextFlags.COMMAND_LINE): """run( \ inputs, \ @@ -760,6 +761,7 @@ def run(object, execution_id=execution_id, termination_processing=termination_processing, termination_learning=termination_learning, + runtime_params=runtime_params, context=context ) diff --git a/tests/system/test_system.py b/tests/system/test_system.py index 3230415fdf0..2b2c9669111 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -768,3 +768,46 @@ def test_runtime_params_reset_to_most_recent_val(self): assert T.function_object.slope == 2.0 assert T.value == 4.0 + def test_system_run_function_param(self): + + # Construction + T = TransferMechanism() + P = Process(pathway=[T]) + S = System(processes=[P]) + assert T.function_object.slope == 1.0 + assert T.parameter_states['slope'].value == 1.0 + + # Runtime param used for slope + # ONLY mechanism value should reflect runtime param -- attr should be changed back by the time we inspect it + S.run(inputs={T: 2.0}, runtime_params={T: {"slope": 10.0}}) + assert T.function_object.slope == 1.0 + assert T.parameter_states['slope'].value == 1.0 + assert T.value == 20.0 + + # Runtime param NOT used for slope + S.run(inputs={T: 2.0}) + assert T.function_object.slope == 1.0 + assert T.parameter_states['slope'].value == 1.0 + assert T.value == 2.0 + + def test_system_run_mechanism_param(self): + + # Construction + T = TransferMechanism() + P = Process(pathway=[T]) + S = System(processes=[P]) + assert T.noise == 0.0 + assert T.parameter_states['noise'].value == 0.0 + + # Runtime param used for noise + # ONLY mechanism value should reflect runtime param -- attr should be changed back by the time we inspect it + S.run(inputs={T: 2.0}, runtime_params={T: {"noise": 10.0}}) + assert T.noise == 0.0 + assert T.parameter_states['noise'].value == 0.0 + assert T.value == 12.0 + + # Runtime param NOT used for noise + S.run(inputs={T: 2.0}, ) + assert T.noise == 0.0 + assert T.parameter_states['noise'].value == 0.0 + assert T.value == 2.0 From 71b05147e097873e67d48e5cee762f6044d27c36 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Thu, 3 May 2018 17:15:17 -0400 Subject: [PATCH 037/200] refactoring runtime param spec as a tuple: (val, condition) where the condition must be satisfied in order for the runtime param to be passed to mechanism.execute --- psyneulink/components/system.py | 16 ++++++++++++++-- tests/system/test_system.py | 2 ++ 2 files changed, 16 insertions(+), 2 deletions(-) diff --git a/psyneulink/components/system.py b/psyneulink/components/system.py index b09f174a061..6669e340202 100644 --- a/psyneulink/components/system.py +++ b/psyneulink/components/system.py @@ -456,7 +456,7 @@ from psyneulink.globals.preferences.preferenceset import PreferenceLevel from psyneulink.globals.registry import register_category from psyneulink.globals.utilities import AutoNumber, ContentAddressableList, append_type_to_name, convert_to_np_array, iscompatible -from psyneulink.scheduling.scheduler import Scheduler +from psyneulink.scheduling.scheduler import Scheduler, Condition, Always __all__ = [ 'CONTROL_MECHANISM', 'CONTROL_PROJECTION_RECEIVERS', 'defaultInstanceCount', 'INPUT_ARRAY', 'kwSystemInputState', @@ -2433,6 +2433,14 @@ def _validate_control_signals(self, control_signals, context=None): raise SystemError("A parameter controlled by a ControlSignal of a controller " "being assigned to {} is not in that System".format(self.name)) + def _parse_runtime_params(self, runtime_params): + if runtime_params is None: + return {} + for mechanism in runtime_params: + for param in runtime_params[mechanism]: + if not isinstance(runtime_params[mechanism][param], tuple): + runtime_params[mechanism][param] = (runtime_params[mechanism][param], Always()) + return runtime_params def initialize(self): """Assign `initial_values ` to mechanisms designated as `INITIALIZE_CYCLE` \and contained in recurrent_init_mechanisms. @@ -2500,6 +2508,8 @@ def execute(self, if self.scheduler_learning is None: self.scheduler_learning = Scheduler(graph=self.learning_execution_graph) + runtime_params = self._parse_runtime_params(runtime_params) + if not context: context = ContextFlags.COMPOSITION self.context.execution_phase = ContextFlags.PROCESSING @@ -2658,7 +2668,9 @@ def _execute_processing(self, runtime_params, context=None): execution_runtime_params = {} if mechanism in runtime_params: - execution_runtime_params = runtime_params[mechanism] + for param in runtime_params[mechanism]: + if runtime_params[mechanism][param][1].is_satisfied(): + execution_runtime_params[param] = runtime_params[mechanism][param][0] mechanism.context.execution_phase = ContextFlags.PROCESSING mechanism.execute(runtime_params=execution_runtime_params, context=context) diff --git a/tests/system/test_system.py b/tests/system/test_system.py index 2b2c9669111..cdfb8586e7d 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -811,3 +811,5 @@ def test_system_run_mechanism_param(self): assert T.noise == 0.0 assert T.parameter_states['noise'].value == 0.0 assert T.value == 2.0 + + From f15a805404b9f550c8483bb88e012deebe2cf6ea Mon Sep 17 00:00:00 2001 From: KristenManning Date: Fri, 4 May 2018 14:39:00 -0400 Subject: [PATCH 038/200] passing scheduler into the runtime param condition evaluation so that conditions have access to timescale counts, and adding pytests for runtime params with conditions --- psyneulink/components/system.py | 2 +- tests/system/test_system.py | 74 +++++++++++++++++++++++++++++++-- 2 files changed, 72 insertions(+), 4 deletions(-) diff --git a/psyneulink/components/system.py b/psyneulink/components/system.py index 6669e340202..90ec14c858c 100644 --- a/psyneulink/components/system.py +++ b/psyneulink/components/system.py @@ -2669,7 +2669,7 @@ def _execute_processing(self, runtime_params, context=None): execution_runtime_params = {} if mechanism in runtime_params: for param in runtime_params[mechanism]: - if runtime_params[mechanism][param][1].is_satisfied(): + if runtime_params[mechanism][param][1].is_satisfied(scheduler=self.scheduler_processing): execution_runtime_params[param] = runtime_params[mechanism][param][0] mechanism.context.execution_phase = ContextFlags.PROCESSING diff --git a/tests/system/test_system.py b/tests/system/test_system.py index cdfb8586e7d..bd2018473f3 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -10,7 +10,7 @@ from psyneulink.globals.keywords import CYCLE, INITIALIZE_CYCLE, INTERNAL, ORIGIN, TERMINAL from psyneulink.library.mechanisms.processing.integrator.ddm import DDM from psyneulink.library.subsystems.evc.evccontrolmechanism import EVCControlMechanism - +from psyneulink.scheduling.condition import Any, AtTrial, AfterTrial def test_danglingControlledMech(): # @@ -768,7 +768,7 @@ def test_runtime_params_reset_to_most_recent_val(self): assert T.function_object.slope == 2.0 assert T.value == 4.0 - def test_system_run_function_param(self): + def test_system_run_function_param_no_condition(self): # Construction T = TransferMechanism() @@ -790,7 +790,7 @@ def test_system_run_function_param(self): assert T.parameter_states['slope'].value == 1.0 assert T.value == 2.0 - def test_system_run_mechanism_param(self): + def test_system_run_mechanism_param_no_condition(self): # Construction T = TransferMechanism() @@ -812,4 +812,72 @@ def test_system_run_mechanism_param(self): assert T.parameter_states['noise'].value == 0.0 assert T.value == 2.0 + def test_system_run_with_condition(self): + + # Construction + T = TransferMechanism() + P = Process(pathway=[T]) + S = System(processes=[P]) + + # Runtime param used for noise + # ONLY mechanism value should reflect runtime param -- attr should be changed back by the time we inspect it + S.run(inputs={T: 2.0}, + runtime_params={T: {"noise": (10.0, AtTrial(1))}}, + num_trials=4) + + # Runtime param NOT used for noise + S.run(inputs={T: 2.0}) + + assert np.allclose(S.results, [[np.array([2.])], # Trial 0 - condition not satisfied yet + [np.array([12.])], # Trial 1 - condition satisfied + [np.array([2.])], # Trial 2 - condition no longer satisfied (not sticky) + [np.array([2.])], # Trial 3 - condition no longer satisfied (not sticky) + [np.array([2.])]]) # New run (runtime param no longer applies) + + def test_system_run_with_sticky_condition(self): + + # Construction + T = TransferMechanism() + P = Process(pathway=[T]) + S = System(processes=[P]) + assert T.noise == 0.0 + assert T.parameter_states['noise'].value == 0.0 + + # Runtime param used for noise + # ONLY mechanism value should reflect runtime param -- attr should be changed back by the time we inspect it + S.run(inputs={T: 2.0}, + runtime_params={T: {"noise": (10.0, AfterTrial(1))}}, + num_trials=4) + + # Runtime param NOT used for noise + S.run(inputs={T: 2.0}) + + assert np.allclose(S.results, [[np.array([2.])], # Trial 0 - condition not satisfied yet + [np.array([2.])], # Trial 1 - condition not satisfied yet + [np.array([12.])], # Trial 2 - condition satisfied + [np.array([12.])], # Trial 3 - condition satisfied (sticky) + [np.array([2.])]]) # New run (runtime param no longer applies) + + def test_system_run_with_combined_condition(self): + + # Construction + T = TransferMechanism() + P = Process(pathway=[T]) + S = System(processes=[P]) + + # Runtime param used for noise + # ONLY mechanism value should reflect runtime param -- attr should be changed back by the time we inspect it + S.run(inputs={T: 2.0}, + runtime_params={T: {"noise": (10.0, Any(AtTrial(1), AfterTrial(2)))}}, + num_trials=5) + + # Runtime param NOT used for noise + S.run(inputs={T: 2.0}) + + assert np.allclose(S.results, [[np.array([2.])], # Trial 0 - NOT condition 0, NOT condition 1 + [np.array([12.])], # Trial 1 - condition 0, NOT condition 1 + [np.array([2.])], # Trial 2 - NOT condition 0, NOT condition 1 + [np.array([12.])], # Trial 3 - NOT condition 0, condition 1 + [np.array([12.])], # Trial 4 - NOT condition 0, condition 1 + [np.array([2.])]]) # New run (runtime param no longer applies) From 6baa395246acb7a79ec75db52f66bc402ccc61fc Mon Sep 17 00:00:00 2001 From: KristenManning Date: Fri, 4 May 2018 14:55:11 -0400 Subject: [PATCH 039/200] removing old references to sticky runtime params now that this is handled by conditions --- psyneulink/components/component.py | 13 +-- psyneulink/components/functions/function.py | 14 +-- psyneulink/components/mechanisms/mechanism.py | 7 +- .../processing/transfermechanism.py | 3 +- .../preferences/componentpreferenceset.py | 48 ++------ .../preferences/mechanismpreferenceset.py | 109 +----------------- .../mechanisms/processing/leabramechanism.py | 8 +- .../library/subsystems/evc/evcauxiliary.py | 5 +- 8 files changed, 23 insertions(+), 184 deletions(-) diff --git a/psyneulink/components/component.py b/psyneulink/components/component.py index 790e77313d9..7eff4b69c97 100644 --- a/psyneulink/components/component.py +++ b/psyneulink/components/component.py @@ -1693,9 +1693,8 @@ def _check_args(self, variable=None, params=None, target_set=None, context=None) # If params have been passed, treat as runtime params and assign to paramsCurrent # (relabel params as runtime_params for clarity) - if not self.runtimeParamStickyAssignmentPref: - for key in self._runtime_params_reset: - self._set_parameter_value(key, self._runtime_params_reset[key]) + for key in self._runtime_params_reset: + self._set_parameter_value(key, self._runtime_params_reset[key]) self._runtime_params_reset = {} runtime_params = params @@ -2912,14 +2911,6 @@ def runtimeParamModulationPref(self): def runtimeParamModulationPref(self, setting): self.prefs.runtimeParamModulationPref = setting - @property - def runtimeParamStickyAssignmentPref(self): - return self.prefs.runtimeParamStickyAssignmentPref - - @runtimeParamStickyAssignmentPref.setter - def runtimeParamStickyAssignmentPref(self, setting): - self.prefs.runtimeParamStickyAssignmentPref = setting - @property def context(self): try: diff --git a/psyneulink/components/functions/function.py b/psyneulink/components/functions/function.py index c9518cf89fc..a153b7cdf22 100644 --- a/psyneulink/components/functions/function.py +++ b/psyneulink/components/functions/function.py @@ -193,7 +193,7 @@ from psyneulink.components.shellclasses import Function from psyneulink.globals.context import ContextFlags from psyneulink.globals.keywords import ACCUMULATOR_INTEGRATOR_FUNCTION, ADAPTIVE_INTEGRATOR_FUNCTION, ALL, ARGUMENT_THERAPY_FUNCTION, AUTO_ASSIGN_MATRIX, AUTO_DEPENDENT, BACKPROPAGATION_FUNCTION, BETA, BIAS, COMBINATION_FUNCTION_TYPE, COMBINE_MEANS_FUNCTION, CONSTANT_INTEGRATOR_FUNCTION, CONTEXT, CORRELATION, CROSS_ENTROPY, CUSTOM_FUNCTION, DECAY, DIFFERENCE, DISTANCE_FUNCTION, DISTANCE_METRICS, DIST_FUNCTION_TYPE, DIST_MEAN, DIST_SHAPE, DRIFT_DIFFUSION_INTEGRATOR_FUNCTION, DistanceMetrics, ENERGY, ENTROPY, EUCLIDEAN, EXAMPLE_FUNCTION_TYPE, EXECUTING, EXPONENTIAL_DIST_FUNCTION, EXPONENTIAL_FUNCTION, EXPONENTS, FHN_INTEGRATOR_FUNCTION, FULL_CONNECTIVITY_MATRIX, FUNCTION, FUNCTION_OUTPUT_TYPE, FUNCTION_OUTPUT_TYPE_CONVERSION, FUNCTION_PARAMS, GAIN, GAMMA_DIST_FUNCTION, HEBBIAN_FUNCTION, HIGH, HOLLOW_MATRIX, IDENTITY_MATRIX, INCREMENT, INITIALIZER, INITIALIZING, INPUT_STATES, INTEGRATOR_FUNCTION, INTEGRATOR_FUNCTION_TYPE, INTERCEPT, LEARNING, LEARNING_FUNCTION_TYPE, LEARNING_RATE, LINEAR_COMBINATION_FUNCTION, LINEAR_FUNCTION, LINEAR_MATRIX_FUNCTION, LOGISTIC_FUNCTION, LOW, MATRIX, MATRIX_KEYWORD_NAMES, MATRIX_KEYWORD_VALUES, MAX_ABS_INDICATOR, MAX_ABS_VAL, MAX_INDICATOR, MAX_VAL, NOISE, NORMALIZING_FUNCTION_TYPE, NORMAL_DIST_FUNCTION, OBJECTIVE_FUNCTION_TYPE, OFFSET, ONE_HOT_FUNCTION, OPERATION, ORNSTEIN_UHLENBECK_INTEGRATOR_FUNCTION, OUTPUT_STATES, OUTPUT_TYPE, PARAMETER_STATE_PARAMS, PARAMS, PEARSON, PREDICTION_ERROR_DELTA_FUNCTION, PROB, PROB_INDICATOR, PRODUCT, RANDOM_CONNECTIVITY_MATRIX, RATE, RECEIVER, REDUCE_FUNCTION, RL_FUNCTION, SCALE, SIMPLE_INTEGRATOR_FUNCTION, SLOPE, SOFTMAX_FUNCTION, STABILITY_FUNCTION, STANDARD_DEVIATION, SUM, TDLEARNING_FUNCTION, TIME_STEP_SIZE, TRANSFER_FUNCTION_TYPE, UNIFORM_DIST_FUNCTION, USER_DEFINED_FUNCTION, USER_DEFINED_FUNCTION_TYPE, UTILITY_INTEGRATOR_FUNCTION, VARIABLE, WALD_DIST_FUNCTION, WEIGHTS, kwComponentCategory, kwPreferenceSetName -from psyneulink.globals.preferences.componentpreferenceset import is_pref_set, kpReportOutputPref, kpRuntimeParamStickyAssignmentPref +from psyneulink.globals.preferences.componentpreferenceset import is_pref_set, kpReportOutputPref from psyneulink.globals.preferences.preferenceset import PreferenceEntry, PreferenceLevel from psyneulink.globals.registry import register_category from psyneulink.globals.utilities import is_distance_metric, is_iterable, is_matrix, is_numeric, iscompatible, np_array_less_than_2d, parameter_spec @@ -811,7 +811,6 @@ class ArgumentTherapy(Function_Base): classPreferences = { kwPreferenceSetName: 'ExampleClassPreferences', kpReportOutputPref: PreferenceEntry(False, PreferenceLevel.INSTANCE), - kpRuntimeParamStickyAssignmentPref: PreferenceEntry(False, PreferenceLevel.INSTANCE) } # Variable class default @@ -1928,7 +1927,6 @@ class LinearCombination(CombinationFunction): # ------------------------------- classPreferences = { kwPreferenceSetName: 'LinearCombinationCustomClassPreferences', kpReportOutputPref: PreferenceEntry(False, PreferenceLevel.INSTANCE), - kpRuntimeParamStickyAssignmentPref: PreferenceEntry(False, PreferenceLevel.INSTANCE) } multiplicative_param = SCALE @@ -2402,7 +2400,6 @@ class CombineMeans(CombinationFunction): # ------------------------------------ classPreferences = { kwPreferenceSetName: 'CombineMeansCustomClassPreferences', kpReportOutputPref: PreferenceEntry(False, PreferenceLevel.INSTANCE), - kpRuntimeParamStickyAssignmentPref: PreferenceEntry(False, PreferenceLevel.INSTANCE) } multiplicative_param = SCALE @@ -2648,8 +2645,6 @@ class PredictionErrorDeltaFunction(CombinationFunction): classPreferences = { kwPreferenceSetName: 'PredictionErrorDeltaCustomClassPreferences', kpReportOutputPref: PreferenceEntry(False, PreferenceLevel.INSTANCE), - kpRuntimeParamStickyAssignmentPref: PreferenceEntry(False, - PreferenceLevel.INSTANCE) } class ClassDefaults(CombinationFunction.ClassDefaults): @@ -2941,7 +2936,6 @@ class Linear(TransferFunction): # --------------------------------------------- classPreferences = { kwPreferenceSetName: 'LinearClassPreferences', kpReportOutputPref: PreferenceEntry(False, PreferenceLevel.INSTANCE), - kpRuntimeParamStickyAssignmentPref: PreferenceEntry(False, PreferenceLevel.INSTANCE) } paramClassDefaults = Function_Base.paramClassDefaults.copy() @@ -3470,7 +3464,6 @@ class OneHot(TransferFunction): # --------------------------------------------- classPreferences = { kwPreferenceSetName: 'OneHotClassPreferences', kpReportOutputPref: PreferenceEntry(False, PreferenceLevel.INSTANCE), - kpRuntimeParamStickyAssignmentPref: PreferenceEntry(False, PreferenceLevel.INSTANCE) } paramClassDefaults = Function_Base.paramClassDefaults.copy() @@ -7288,9 +7281,8 @@ def _accumulator_check_args(self, variable=None, params=None, target_set=None, c # If params have been passed, treat as runtime params and assign to paramsCurrent # (relabel params as runtime_params for clarity) - if not self.runtimeParamStickyAssignmentPref: - for key in self._runtime_params_reset: - self._set_parameter_value(key, self._runtime_params_reset[key]) + for key in self._runtime_params_reset: + self._set_parameter_value(key, self._runtime_params_reset[key]) self._runtime_params_reset = {} runtime_params = params diff --git a/psyneulink/components/mechanisms/mechanism.py b/psyneulink/components/mechanisms/mechanism.py index 599e077da84..2e6cd882d29 100644 --- a/psyneulink/components/mechanisms/mechanism.py +++ b/psyneulink/components/mechanisms/mechanism.py @@ -783,8 +783,7 @@ class `UserList ` method, or in a `tuple with the Mechanism ` in the `pathway` of a `Process`. Any value assigned to a parameter in a **runtime_params** dictionary will override the current value of that parameter for the (and *only* the) current execution of the Mechanism; the value will return to its previous value -following that execution, unless the `runtimeParamStickyAssignmentPref` is set for the component to which the parameter -belongs. +following that execution. The runtime parameters for a Mechanism are specified using a dictionary that contains one or more entries, each of which is for a parameter of the Mechanism or its `function `, or for one of the `Mechanism's States @@ -2071,9 +2070,7 @@ def execute(self, a dictionary that can include any of the parameters used as arguments to instantiate the Mechanism, its function, or `Projection(s) to any of its States `. Any value assigned to a parameter will override the current value of that parameter for the (and only the current) execution of - the Mechanism, and will return to its previous value following execution (unless the - `runtimeParamStickyAssignmentPref` is set for the Component to which the parameter belongs). See - `runtime_params ` above for details concerning specification. + the Mechanism. Returns ------- diff --git a/psyneulink/components/mechanisms/processing/transfermechanism.py b/psyneulink/components/mechanisms/processing/transfermechanism.py index 24b01cbc28b..0e9309c72f4 100644 --- a/psyneulink/components/mechanisms/processing/transfermechanism.py +++ b/psyneulink/components/mechanisms/processing/transfermechanism.py @@ -669,8 +669,7 @@ class TransferMechanism(ProcessingMechanism_Base): # classPreferences = { # kwPreferenceSetName: 'TransferCustomClassPreferences', # # kpReportOutputPref: PreferenceEntry(False, PreferenceLevel.INSTANCE), - # kpRuntimeParamStickyAssignmentPref: PreferenceEntry(False, PreferenceLevel.INSTANCE) - # } + # } # TransferMechanism parameter and control signal assignments): paramClassDefaults = ProcessingMechanism_Base.paramClassDefaults.copy() diff --git a/psyneulink/globals/preferences/componentpreferenceset.py b/psyneulink/globals/preferences/componentpreferenceset.py index 79481c8027d..35e1ba812b6 100644 --- a/psyneulink/globals/preferences/componentpreferenceset.py +++ b/psyneulink/globals/preferences/componentpreferenceset.py @@ -23,8 +23,7 @@ 'InstanceDefaultPreferencesDict', 'is_pref', 'is_pref_set', 'kwCategoryDefaultPreferences', 'kwInstanceDefaultPreferences', 'kwSubtypeDefaultPreferences', 'kwSystemDefaultPreferences', 'kwTypeDefaultPreferences', 'LOG_PREF', 'PARAM_VALIDATION_PREF', 'REPORT_OUTPUT_PREF', 'RUNTIME_PARAM_MODULATION_PREF', - 'RUNTIME_PARAM_STICKY_ASSIGNMENT_PREF', 'SubtypeDefaultPreferencesDict', 'SystemDefaultPreferencesDict', - 'TypeDefaultPreferencesDict', 'VERBOSE_PREF', + 'SubtypeDefaultPreferencesDict', 'SystemDefaultPreferencesDict', 'TypeDefaultPreferencesDict', 'VERBOSE_PREF', ] # Keypaths for preferences: @@ -33,7 +32,6 @@ PARAM_VALIDATION_PREF = kpParamValidationPref = '_param_validation_pref' VERBOSE_PREF = kpVerbosePref = '_verbose_pref' RUNTIME_PARAM_MODULATION_PREF = kpRuntimeParamModulationPref = '_runtime_param_modulation_pref' -RUNTIME_PARAM_STICKY_ASSIGNMENT_PREF = kpRuntimeParamStickyAssignmentPref = '_runtime_param_sticky_assignment_pref' # Keywords for generic level default preference sets kwSystemDefaultPreferences = 'SystemDefaultPreferences' @@ -49,8 +47,7 @@ kpParamValidationPref, kpReportOutputPref, kpLogPref, - kpRuntimeParamModulationPref, - kpRuntimeParamStickyAssignmentPref + kpRuntimeParamModulationPref } SystemDefaultPreferencesDict = { @@ -59,8 +56,7 @@ kpParamValidationPref: PreferenceEntry(True, PreferenceLevel.SYSTEM), kpReportOutputPref: PreferenceEntry(False, PreferenceLevel.SYSTEM), kpLogPref: PreferenceEntry(LogCondition.OFF, PreferenceLevel.CATEGORY), - kpRuntimeParamModulationPref: PreferenceEntry(Modulation.MULTIPLY, PreferenceLevel.SYSTEM), - kpRuntimeParamStickyAssignmentPref: PreferenceEntry(False, PreferenceLevel.SYSTEM)} + kpRuntimeParamModulationPref: PreferenceEntry(Modulation.MULTIPLY, PreferenceLevel.SYSTEM)} CategoryDefaultPreferencesDict = { kwPreferenceSetName: kwCategoryDefaultPreferences, @@ -68,8 +64,7 @@ kpParamValidationPref: PreferenceEntry(True, PreferenceLevel.CATEGORY), kpReportOutputPref: PreferenceEntry(False, PreferenceLevel.CATEGORY), kpLogPref: PreferenceEntry(LogCondition.OFF, PreferenceLevel.CATEGORY), - kpRuntimeParamModulationPref: PreferenceEntry(Modulation.MULTIPLY,PreferenceLevel.CATEGORY), - kpRuntimeParamStickyAssignmentPref: PreferenceEntry(False, PreferenceLevel.CATEGORY)} + kpRuntimeParamModulationPref: PreferenceEntry(Modulation.MULTIPLY,PreferenceLevel.CATEGORY)} TypeDefaultPreferencesDict = { kwPreferenceSetName: kwTypeDefaultPreferences, @@ -77,8 +72,7 @@ kpParamValidationPref: PreferenceEntry(True, PreferenceLevel.TYPE), kpReportOutputPref: PreferenceEntry(False, PreferenceLevel.TYPE), kpLogPref: PreferenceEntry(LogCondition.OFF, PreferenceLevel.CATEGORY), # This gives control to Mechanisms - kpRuntimeParamModulationPref: PreferenceEntry(Modulation.ADD,PreferenceLevel.TYPE), - kpRuntimeParamStickyAssignmentPref: PreferenceEntry(False, PreferenceLevel.TYPE)} + kpRuntimeParamModulationPref: PreferenceEntry(Modulation.ADD,PreferenceLevel.TYPE)} SubtypeDefaultPreferencesDict = { kwPreferenceSetName: kwSubtypeDefaultPreferences, @@ -86,8 +80,7 @@ kpParamValidationPref: PreferenceEntry(True, PreferenceLevel.SUBTYPE), kpReportOutputPref: PreferenceEntry(False, PreferenceLevel.SUBTYPE), kpLogPref: PreferenceEntry(LogCondition.OFF, PreferenceLevel.CATEGORY), # This gives control to Mechanisms - kpRuntimeParamModulationPref: PreferenceEntry(Modulation.ADD,PreferenceLevel.SUBTYPE), - kpRuntimeParamStickyAssignmentPref: PreferenceEntry(False, PreferenceLevel.SUBTYPE)} + kpRuntimeParamModulationPref: PreferenceEntry(Modulation.ADD,PreferenceLevel.SUBTYPE)} InstanceDefaultPreferencesDict = { kwPreferenceSetName: kwInstanceDefaultPreferences, @@ -95,8 +88,7 @@ kpParamValidationPref: PreferenceEntry(False, PreferenceLevel.INSTANCE), kpReportOutputPref: PreferenceEntry(False, PreferenceLevel.INSTANCE), kpLogPref: PreferenceEntry(LogCondition.OFF, PreferenceLevel.CATEGORY), # This gives control to Mechanisms - kpRuntimeParamModulationPref: PreferenceEntry(Modulation.OVERRIDE, PreferenceLevel.INSTANCE), - kpRuntimeParamStickyAssignmentPref: PreferenceEntry(False, PreferenceLevel.INSTANCE)} + kpRuntimeParamModulationPref: PreferenceEntry(Modulation.OVERRIDE, PreferenceLevel.INSTANCE)} # Dict of default dicts ComponentDefaultPrefDicts = { @@ -176,7 +168,6 @@ class ComponentPreferenceSet(PreferenceSet): + kpReportOutputPref: report object's ouptut during execution + kpLogPref: record attribute data for the object during execution + kpRuntimeParamModulationPref: modulate parameters using runtime specification (in pathway) - + kpRuntimeParamStickAssignmentPref: assignments remain in effect until replaced value that is either a PreferenceSet, valid setting for the preference, or a PreferenceLevel; defaults - level (PreferenceLevel): ?? - name (str): name of PreferenceSet @@ -216,11 +207,6 @@ class ComponentPreferenceSet(PreferenceSet): runtimeParamModulation PreferenceEntry of owner's Preference object - runtimeParamModulationPref(setting=): assigns the value of the setting arg to the runtimeParamModulationPref of the owner's Preference object - - runtimeParamStickyAssignmentPref(): - returns setting for runtimeParamStickyAssignment preference at level specified in - runtimeParamStickyAssignment PreferenceEntry of owner's Preference object - - runtimeParamStickyAssignmentPref(setting=): - assigns value of the setting arg to the runtimeParamStickyAssignmentPref of the owner's Preference object """ # Use this as both: @@ -232,8 +218,7 @@ class ComponentPreferenceSet(PreferenceSet): kpParamValidationPref: PreferenceEntry(True, PreferenceLevel.SYSTEM), kpReportOutputPref: PreferenceEntry(True, PreferenceLevel.SYSTEM), kpLogPref: PreferenceEntry(LogCondition.OFF, PreferenceLevel.CATEGORY), - kpRuntimeParamModulationPref: PreferenceEntry(Modulation.MULTIPLY, PreferenceLevel.SYSTEM), - kpRuntimeParamStickyAssignmentPref: PreferenceEntry(False, PreferenceLevel.SYSTEM) + kpRuntimeParamModulationPref: PreferenceEntry(Modulation.MULTIPLY, PreferenceLevel.SYSTEM) } @@ -436,20 +421,3 @@ def runtimeParamModulationPref(self, setting): :return: """ self.set_preference(candidate_info=setting, pref_ivar_name=kpRuntimeParamModulationPref) - - @property - def runtimeParamStickyAssignmentPref(self): - """Returns owner's runtimeParamStickyAssignmentPref - :return: - """ - # return self._runtime_param_sticky_assignment_pref - return self.get_pref_setting_for_level(kpRuntimeParamStickyAssignmentPref, - self._runtime_param_sticky_assignment_pref.level)[0] - - @runtimeParamStickyAssignmentPref.setter - def runtimeParamStickyAssignmentPref(self, setting): - """Assign runtimeParamStickyAssignmentPref - :param entry: - :return: - """ - self.set_preference(candidate_info=setting, pref_ivar_name=kpRuntimeParamStickyAssignmentPref) diff --git a/psyneulink/globals/preferences/mechanismpreferenceset.py b/psyneulink/globals/preferences/mechanismpreferenceset.py index 1cdb978fa8a..620785c226c 100644 --- a/psyneulink/globals/preferences/mechanismpreferenceset.py +++ b/psyneulink/globals/preferences/mechanismpreferenceset.py @@ -13,14 +13,13 @@ import inspect from psyneulink.globals.keywords import NAME, kwPrefLevel, kwPrefsOwner -from psyneulink.globals.preferences.componentpreferenceset import ComponentPreferenceSet, kpLogPref, kpParamValidationPref, kpReportOutputPref, kpRuntimeParamModulationPref, kpRuntimeParamStickyAssignmentPref, kpVerbosePref +from psyneulink.globals.preferences.componentpreferenceset import ComponentPreferenceSet, kpLogPref, kpParamValidationPref, kpReportOutputPref, kpRuntimeParamModulationPref, kpVerbosePref from psyneulink.globals.preferences.preferenceset import PreferenceEntry, PreferenceLevel from psyneulink.globals.utilities import Modulation __all__ = [ 'MechanismPreferenceSet', 'runtimeParamModulationPrefCategoryDefault', 'runtimeParamModulationPrefInstanceDefault', - 'runtimeParamModulationPrefTypeDefault', 'runtimeParamStickyAssignmentPrefCategoryDefault', - 'runtimeParamStickyAssignmentPrefInstanceDefault', 'runtimeParamStickyAssignmentPrefTypeDefault', + 'runtimeParamModulationPrefTypeDefault' ] # MODIFIED 11/29/16 OLD: @@ -34,10 +33,6 @@ # runtimeParamModulationPrefCategoryDefault = PreferenceEntry(Modulation.MULTIPLY, PreferenceLevel.CATEGORY) runtimeParamModulationPrefCategoryDefault = PreferenceEntry(False, PreferenceLevel.CATEGORY) -runtimeParamStickyAssignmentPrefInstanceDefault = PreferenceEntry(False, PreferenceLevel.INSTANCE) -runtimeParamStickyAssignmentPrefTypeDefault = PreferenceEntry(False, PreferenceLevel.TYPE) -runtimeParamStickyAssignmentPrefCategoryDefault = PreferenceEntry(False, PreferenceLevel.CATEGORY) - reportOutputPrefInstanceDefault = PreferenceEntry(False, PreferenceLevel.INSTANCE) logPrefInstanceDefault = PreferenceEntry(False, PreferenceLevel.INSTANCE) verbosePrefInstanceDefault = PreferenceEntry(False, PreferenceLevel.INSTANCE) @@ -67,22 +62,6 @@ class MechanismPreferenceSet(ComponentPreferenceSet): assigns PreferenceEntry to runtimeParamModulationPref attribute of the owner's Preference object - runtimeParamModulationPrefEntry(entry=): returns PreferenceEntry for the runtimeParamModulationPref attribute of the owner's Preference object - - RuntimeParamStickyAssignmentPref(): - returns setting for runtimeParamStickyAssignment preference at level specified in - runtimeParamStickyAssignment - PreferenceEntry of owner's Preference object - - RuntimeParamStickyAssignmentPref(setting=): - assigns the value of the setting item in the RuntimeParamStickyAssignmentPref PreferenceEntry of the - owner's Preference object - - RuntimeParamStickyAssignmentPrefLevel() - returns level in the RuntimeParamStickyAssignmentPref PreferenceEntry of the owner's Preference object - - RuntimeParamStickyAssignmentPrefLevel(level=): - assigns the value of the level item in the RuntimeParamStickyAssignmentPref PreferenceEntry of the - owner's Preference object - - RuntimeParamStickyAssignmentPrefEntry(): - assigns PreferenceEntry to RuntimeParamStickyAssignmentPref attribute of the owner's Preference object - - RuntimeParamStickyAssignmentPrefEntry(entry=): - returns PreferenceEntry for the RuntimeParamStickyAssignmentPref attribute of the owner's Preference object """ def __init__(self, owner=None, @@ -107,10 +86,6 @@ def __init__(self, runtime_param_modulation_pref = kargs[kpRuntimeParamModulationPref] except (KeyError, NameError): pass - try: - runtime_param_sticky_assignment_pref = kargs[kpRuntimeParamStickyAssignmentPref] - except (KeyError, NameError): - pass try: log_pref = kargs[kpLogPref] except (KeyError, NameError): @@ -141,7 +116,6 @@ def __init__(self, name=name) # self._report_output_pref = reportOutput_pref self._runtime_param_modulation_pref = runtime_param_modulation_pref - self._runtime_param_sticky_assignment_pref = runtime_param_sticky_assignment_pref # runtimeParamModulation entry ------------------------------------------------------------------------------------ @@ -218,82 +192,3 @@ def runtimeParamModulationPrefEntry(self, entry): self._runtime_param_modulation_pref = entry - - # runtimeParamStickyAssignment entry ------------------------------------------------------------------------------- - - @property - def runtimeParamStickyAssignmentPref(self): - """Returns setting of owner's runtimeParamStickyAssignment pref at level specified in its PreferenceEntry.level - :param level: - :return: - """ - # If the level of the object is below the Preference level, - # recursively calls base (super) classes to get preference at specified level - return self.get_pref_setting_for_level(kpRuntimeParamStickyAssignmentPref, - self._runtime_param_sticky_assignment_pref.level)[0] - - - @runtimeParamStickyAssignmentPref.setter - def runtimeParamStickyAssignmentPref(self, setting): - """Assigns setting to owner's runtimeParamStickyAssignment pref - :param setting: - :return: - """ - if isinstance(setting, PreferenceEntry): - self._runtime_param_sticky_assignment_pref = setting - - # elif not iscompatible(setting, runtimeParamStickyAssignmentPrefInstanceDefault.setting): - elif not inspect.isfunction(runtimeParamStickyAssignmentPrefInstanceDefault.setting): - print("setting of runtimeParamStickyAssignment preference ({0}) must be a {1} or a function;" - " it will remain unchanged ({2})". - format(setting, - Modulation.__class__.__name__, - self._runtime_param_sticky_assignment_pref.setting)) - return - - else: - self._runtime_param_sticky_assignment_pref = \ - self._runtime_param_sticky_assignment_pref._replace(setting=setting) - - @property - def runtimeParamStickyAssignmentPrefLevel(self): - """Returns level for owner's runtimeParamStickyAssignment pref - :return: - """ - return self._runtime_param_sticky_assignment_pref.level - - @runtimeParamStickyAssignmentPrefLevel.setter - def runtimeParamStickyAssignmentPrefLevel(self, level): - """Sets level for owner's runtimeParamStickyAssignment pref - :param level: - :return: - """ - if not isinstance(level, PreferenceLevel): - print("Level of runtimeParamStickyAssignment preference ({0}) must be a PreferenceLevel setting; " - "it will remain unchanged ({1})". - format(level, self._runtime_param_sticky_assignment_pref.setting)) - return - self._runtime_param_sticky_assignment_pref = self._runtime_param_sticky_assignment_pref._replace(level=level) - - @property - def runtimeParamStickyAssignmentPrefEntry(self): - """Returns owner's runtimeParamStickyAssignment PreferenceEntry tuple (setting, level) - :return: - """ - return self._runtime_param_sticky_assignment_pref - - @runtimeParamStickyAssignmentPrefEntry.setter - def runtimeParamStickyAssignmentPrefEntry(self, entry): - """Assigns runtimeParamStickyAssignment PreferenceEntry to owner - :param entry: - :return: - """ - if not isinstance(entry, PreferenceEntry): - print("runtimeParamStickyAssignmentPrefEntry ({0}) must be a PreferenceEntry; " - "it will remain unchanged ({1})". - format(entry, self._runtime_param_sticky_assignment_pref)) - return - self._runtime_param_sticky_assignment_pref = entry - - - diff --git a/psyneulink/library/mechanisms/processing/leabramechanism.py b/psyneulink/library/mechanisms/processing/leabramechanism.py index 96e66db051c..e2b3f4f0102 100644 --- a/psyneulink/library/mechanisms/processing/leabramechanism.py +++ b/psyneulink/library/mechanisms/processing/leabramechanism.py @@ -106,7 +106,7 @@ from psyneulink.globals.context import ContextFlags from psyneulink.globals.keywords import FUNCTION, INITIALIZING, INPUT_STATES, LEABRA_FUNCTION, LEABRA_FUNCTION_TYPE,\ LEABRA_MECHANISM, NETWORK, OUTPUT_STATES, kwPreferenceSetName -from psyneulink.globals.preferences.componentpreferenceset import is_pref_set, kpReportOutputPref, kpRuntimeParamStickyAssignmentPref +from psyneulink.globals.preferences.componentpreferenceset import is_pref_set, kpReportOutputPref from psyneulink.globals.preferences.preferenceset import PreferenceEntry, PreferenceLevel from psyneulink.scheduling.time import TimeScale @@ -194,8 +194,7 @@ class LeabraFunction(Function_Base): classPreferences = { kwPreferenceSetName: 'LeabraFunctionClassPreferences', - kpReportOutputPref: PreferenceEntry(False, PreferenceLevel.INSTANCE), - kpRuntimeParamStickyAssignmentPref: PreferenceEntry(False, PreferenceLevel.INSTANCE) + kpReportOutputPref: PreferenceEntry(False, PreferenceLevel.INSTANCE) } paramClassDefaults = Function_Base.paramClassDefaults.copy() @@ -437,8 +436,7 @@ class LeabraMechanism(ProcessingMechanism_Base): # These will override those specified in TypeDefaultPreferences classPreferences = { kwPreferenceSetName: 'TransferCustomClassPreferences', - kpReportOutputPref: PreferenceEntry(False, PreferenceLevel.INSTANCE), - kpRuntimeParamStickyAssignmentPref: PreferenceEntry(False, PreferenceLevel.INSTANCE) + kpReportOutputPref: PreferenceEntry(False, PreferenceLevel.INSTANCE) } # LeabraMechanism parameter and control signal assignments): diff --git a/psyneulink/library/subsystems/evc/evcauxiliary.py b/psyneulink/library/subsystems/evc/evcauxiliary.py index b623ddc4bef..b3b8945af3c 100644 --- a/psyneulink/library/subsystems/evc/evcauxiliary.py +++ b/psyneulink/library/subsystems/evc/evcauxiliary.py @@ -20,7 +20,7 @@ from psyneulink.globals.context import ContextFlags from psyneulink.globals.defaults import MPI_IMPLEMENTATION, defaultControlAllocation from psyneulink.globals.keywords import COMBINE_OUTCOME_AND_COST_FUNCTION, COST_FUNCTION, EVC_SIMULATION, EXECUTING, FUNCTION_OUTPUT_TYPE_CONVERSION, INITIALIZING, PARAMETER_STATE_PARAMS, SAVE_ALL_VALUES_AND_POLICIES, VALUE_FUNCTION, kwPreferenceSetName, kwProgressBarChar -from psyneulink.globals.preferences.componentpreferenceset import is_pref_set, kpReportOutputPref, kpRuntimeParamStickyAssignmentPref +from psyneulink.globals.preferences.componentpreferenceset import is_pref_set, kpReportOutputPref from psyneulink.globals.preferences.preferenceset import PreferenceEntry, PreferenceLevel __all__ = [ @@ -70,8 +70,7 @@ class ClassDefaults(Function_Base.ClassDefaults): classPreferences = { kwPreferenceSetName: 'ValueFunctionCustomClassPreferences', kpReportOutputPref: PreferenceEntry(False, PreferenceLevel.INSTANCE), - kpRuntimeParamStickyAssignmentPref: PreferenceEntry(False, PreferenceLevel.INSTANCE) - } + } @tc.typecheck def __init__(self, From d24762066837f9107e5bab763ad64ef51d48a4b1 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Fri, 4 May 2018 16:28:30 -0400 Subject: [PATCH 040/200] removing '_standardize_config_entries' which used to parse mechanism tuples; runtime params were the last remaining piece of mechanism tuples in use, but these are now handled by run --- psyneulink/components/process.py | 69 -------------------------------- 1 file changed, 69 deletions(-) diff --git a/psyneulink/components/process.py b/psyneulink/components/process.py index 8e7bcc8a735..257b7b5e5a9 100644 --- a/psyneulink/components/process.py +++ b/psyneulink/components/process.py @@ -966,8 +966,6 @@ def _instantiate_pathway(self, context): self._learning_mechs = [] self._target_mechs = [] - self._standardize_config_entries(pathway=pathway, context=context) - # VALIDATE PATHWAY THEN PARSE AND INSTANTIATE MECHANISM ENTRIES ------------------------------------ self._parse_and_instantiate_mechanism_entries(pathway=pathway, context=context) @@ -1030,73 +1028,6 @@ def _instantiate_value(self, context=None): # Immutable, so just assign value self.instance_defaults.value = value - def _standardize_config_entries(self, pathway, context=None): - - from psyneulink.components.mechanisms.mechanism import _is_mechanism_spec -# FIX: SHOULD MOVE VALIDATION COMPONENTS BELOW TO Process._validate_params - self.runtime_params_dict = {} - - # Kristen modified 5/24 - # in ALL mechanism tuples, the middle entry is set to zero (formerly used for specifying runtime params) - # rightmost entry is set to zero (formerly phase spec) - # if _is_mechanism_spec, runtime_params_dict[mechanism] is set to actual runtime params - - for i in range(len(pathway)): - config_item = pathway[i] - # if this element of the pathway is a tuple - if isinstance(config_item, tuple): - # and the tuple has 1 item - if len(config_item) is 1: - # if the tuple contains either a Mechanism or a Projection - if _is_mechanism_spec(config_item[0]) or _is_projection_spec(config_item[0]): - - # Replace it with just the mech or proj - pathway[i] = config_item - # if it's a mechanism, set the runtime params to None - if _is_mechanism_spec(config_item[0]): - self.runtime_params_dict[config_item[0]] = None - # otherwise the tuple is not valid - else: - raise ProcessError("First item of tuple ({}) in entry {} of pathway for {}" - " is neither a Mechanism nor a Projection specification". - format(config_item[0], i, self.name)) - # If the tuple has two items - if len(config_item) is 2: - - # Replace it with just the mech or proj - pathway[i] = config_item[0] - - # If it's a mechanism - if _is_mechanism_spec(config_item[0]): - # and its second element is a dict - if isinstance(config_item[1], dict): - # set the mechanism's runtime params to be the second element - self.runtime_params_dict[config_item[0]] = config_item[1] - # if the second element is not a dict, then it's not valid - else: - raise ProcessError("Second item of tuple ({}) in item {} of pathway for {}" - " is not a params dict.". - format(config_item[1], i, self.name)) - # if the first element is not a mechanism, then it's not valid - else: - raise ProcessError("Projection cannot have a runtime params dict".format(config_item[0], - i, self.name)) - # config_item should not have more than 2 elements - if len(config_item) > 2: - raise ProcessError("The tuple for item {} of pathway for {} has more than two items {}". - format(i, self.name, config_item)) - else: - # If the item is a Mechanism or a Projection - if _is_mechanism_spec(pathway[i]) or _is_projection_spec(pathway[i]): - # if it's a mechanism, set runtime params to None - if _is_mechanism_spec(pathway[i]): - self.runtime_params_dict[pathway[i]] = None - - else: - raise ProcessError("Item of {} of pathway for {}" - " is neither a Mechanism nor a Projection specification". - format(i, self.name)) - def _parse_and_instantiate_mechanism_entries(self, pathway, context=None): # FIX: SHOULD MOVE VALIDATION COMPONENTS BELOW TO Process._validate_params From 66e3bd95710dabdd0dab941e7ac03350e30e2a18 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Fri, 4 May 2018 16:54:29 -0400 Subject: [PATCH 041/200] removing validation of runtime params at the mechanism level --- psyneulink/components/mechanisms/mechanism.py | 27 ------------------- 1 file changed, 27 deletions(-) diff --git a/psyneulink/components/mechanisms/mechanism.py b/psyneulink/components/mechanisms/mechanism.py index 2e6cd882d29..598be659518 100644 --- a/psyneulink/components/mechanisms/mechanism.py +++ b/psyneulink/components/mechanisms/mechanism.py @@ -2140,33 +2140,6 @@ def execute(self, ) return np.atleast_2d(return_value) - - # VALIDATE RUNTIME PARAMETER SETS - # Insure that param set is for a States: - if self.prefs.paramValidationPref: - if runtime_params: - pass - # # runtime_params can have entries for any of the the Mechanism's params, or - # # one or more state keys, each of which should be for a params dictionary for the corresponding - # # state type, and each of can contain only parameters relevant to that state - # state_keys = [INPUT_STATE_PARAMS, PARAMETER_STATE_PARAMS, OUTPUT_STATE_PARAMS] - # param_names = list({**self.user_params, **self.function_params}) - # if not all(key in state_keys + param_names for key in runtime_params): - # raise MechanismError("There is an invalid specification for a runtime parameter of {}". - # format(self.name)) - # # for state_key in runtime_params: - # for state_key in [entry for entry in runtime_params if entry in state_keys]: - # state_dict = runtime_params[state_key] - # if not isinstance(state_dict, dict): - # raise MechanismError("runtime_params entry for {} is not a dict". - # format(self.name, state_key)) - # for param_name in state_dict: - # if not param_name in param_names: - # raise MechanismError("{} entry in runtime_params for {} " - # "contains an unrecognized parameter: {}". - # format(state_key, self.name, param_name)) - - # FIX: ??MAKE CONDITIONAL ON self.prefs.paramValidationPref?? # VALIDATE INPUT STATE(S) AND RUNTIME PARAMS self._check_args( From 75761dc865d1ddc51d8c381dbe205a10d095999b Mon Sep 17 00:00:00 2001 From: KristenManning Date: Fri, 4 May 2018 17:01:46 -0400 Subject: [PATCH 042/200] adding a check to component's check args to verify that runtime params were passed as a dict --- psyneulink/components/component.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/psyneulink/components/component.py b/psyneulink/components/component.py index 7eff4b69c97..81e1bf58759 100644 --- a/psyneulink/components/component.py +++ b/psyneulink/components/component.py @@ -1647,7 +1647,8 @@ def _check_args(self, variable=None, params=None, target_set=None, context=None) Does the following: - instantiate variable (if missing or callable) - validate variable if PARAM_VALIDATION is set - - assign runtime params to paramsCurrent + - resets leftover runtime params back to original values (only if execute method was called directly) + - sets runtime params - validate params if PARAM_VALIDATION is set :param variable: (anything but a dict) - variable to validate @@ -1691,20 +1692,25 @@ def _check_args(self, variable=None, params=None, target_set=None, context=None) # # self._validate_params(params, target_set, context=FUNCTION_CHECK_ARGS) # self._validate_params(request_set=params, target_set=target_set, context=context) - # If params have been passed, treat as runtime params and assign to paramsCurrent - # (relabel params as runtime_params for clarity) + # reset any runtime params that were leftover from a direct call to .execute (atypical) for key in self._runtime_params_reset: self._set_parameter_value(key, self._runtime_params_reset[key]) self._runtime_params_reset = {} + # If params have been passed, treat as runtime params runtime_params = params - if runtime_params: + if isinstance(runtime_params, dict): for param_name in runtime_params: + # (1) store current attribute value in _runtime_params_reset so that it can be reset later + # (2) assign runtime param values to attributes (which calls validation via properties) + # (3) update parameter states if needed if hasattr(self, param_name): if param_name in {FUNCTION, INPUT_STATES, OUTPUT_STATES}: continue self._runtime_params_reset[param_name] = getattr(self, param_name) self._set_parameter_value(param_name, runtime_params[param_name]) + elif runtime_params: # not None + raise ComponentError("Invalid specification of runtime parameters for {}".format(self.name)) return variable From 9f16109057e84af4ec791b454a3bc598113a7eb2 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Fri, 4 May 2018 17:08:27 -0400 Subject: [PATCH 043/200] adding runtime params documentation to global 'run' method --- psyneulink/components/process.py | 2 +- psyneulink/globals/environment.py | 16 +++++++++++++++- 2 files changed, 16 insertions(+), 2 deletions(-) diff --git a/psyneulink/components/process.py b/psyneulink/components/process.py index 257b7b5e5a9..1400ae4acc3 100644 --- a/psyneulink/components/process.py +++ b/psyneulink/components/process.py @@ -2067,7 +2067,7 @@ def execute( self.input = self._assign_input_values(input=input, context=context) - self._check_args(self.input,runtime_params) + self._check_args(self.input, runtime_params) # Use Process self.input as input to first Mechanism in Pathway variable = self._update_variable(self.input) diff --git a/psyneulink/globals/environment.py b/psyneulink/globals/environment.py index 959a08daa78..022b7fc38cf 100644 --- a/psyneulink/globals/environment.py +++ b/psyneulink/globals/environment.py @@ -538,7 +538,11 @@ def run(object, call_before_trial=None, \ call_after_trial=None, \ call_before_time_step=None, \ - call_after_time_step=None, \) + call_after_time_step=None, \ + termination_processing=None, \ + termination_learning=None, \ + runtime_params=None, \ + ) Run a sequence of executions for a `Process` or `System`. @@ -609,6 +613,16 @@ def run(object, a dictionary containing `Condition`\\ s that signal the end of the associated `TimeScale` within the :ref:`learning phase of execution ` + runtime_params : Dict[Mechanism: Dict[Param: Tuple(Value, Condition)]] + a nested dictionary of value + condition tuples for parameters of mechanisms of the Composition. At the outer + layer of the dictionary, keys are Mechanisms and values are runtime parameter specification dictionaries. Inside + of those dictionaries, keys are keywords corresponding to Parameters of the Mechanism and values are tuples, the + index 0 item of which is the runtime parameter value, and the index 1 item of which is a `Condition`. Runtime + parameter values are subject to the same type, value, and shape requirements as the parameter in question. If a + runtime parameter is meant to be used throughout the run, then the `Condition` may be omitted and the "Always" + `Condition` will be assigned by default. See `RuntimeParameters` for examples of valid dictionaries. + + Returns ------- From 95a1408b6101521424301f4fb6857ed6d65d847a Mon Sep 17 00:00:00 2001 From: jdcpni Date: Fri, 4 May 2018 17:39:54 -0400 Subject: [PATCH 044/200] Fix/state/variable adjust and update (#782) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * • LCA - _execute: eliminated (by consolidating remaining bits into helper methods * • MaskMappingProjection - corrections to docstrings * • MaskMappingProjection - corrections to docstrings * Merge branch 'devel' of https://github.com/PrincetonUniversity/PsyNeuLink into devel # Conflicts: # tests/projections/test_projections_specifications.py * • InputState, OutputState - implemented socket_width attribute * • InputState, OutputState - implemented socket_width attribute * • State - _instantiate_projections_to_state: fixed to extend variable * • State - _instantiate_projections_to_state: fixed to extend variable * - * • State - _instantiate_projections_to_state: fixed to extend variable • InputState - implemented socket_template attribute (= np.zeros(.instance_defaults.variable) * - * - * - --- ...ut_and_output_states_to_gating_signals.xml | 19 -------- ...st_for_tests_control_test_EVC_test_EVC.xml | 21 --------- ..._mechanism_docs_test_lc_mechanism_docs.xml | 19 -------- ...test_formats_for_control_specification.xml | 19 -------- ..._test_formats_for_gating_specification.xml | 19 -------- ...jection_with_mech_and_state_name_specs.xml | 19 -------- .../py_test_in_test_lib_mechanism_docs_py.xml | 19 -------- psyneulink/components/functions/function.py | 3 +- .../processing/transfermechanism.py | 41 +++++++----------- psyneulink/components/process.py | 4 +- .../projections/pathway/mappingprojection.py | 3 +- psyneulink/components/states/inputstate.py | 18 ++++++-- psyneulink/components/states/outputstate.py | 6 +-- psyneulink/components/states/state.py | 43 +++++++++++++------ psyneulink/components/system.py | 9 ++-- psyneulink/globals/environment.py | 4 +- .../subsystems/evc/evccontrolmechanism.py | 3 +- tests/mechanisms/test_input_state_spec.py | 11 +++-- .../test_recurrent_transfer_mechanism.py | 1 + 19 files changed, 82 insertions(+), 199 deletions(-) delete mode 100644 .idea/runConfigurations/py_test_for_test_projections_specifications_TestProjectionSpecificationFormats_test_2_item_tuple_from_input_and_output_states_to_gating_signals.xml delete mode 100644 .idea/runConfigurations/py_test_for_tests_control_test_EVC_test_EVC.xml delete mode 100644 .idea/runConfigurations/py_test_for_tests_documentation_library_test_lib_mechanism_docs_test_lc_mechanism_docs.xml delete mode 100644 .idea/runConfigurations/py_test_for_tests_projections_test_projections_specifications_TestProjectionSpecificationFormats_test_formats_for_control_specification.xml delete mode 100644 .idea/runConfigurations/py_test_for_tests_projections_test_projections_specifications_TestProjectionSpecificationFormats_test_formats_for_gating_specification.xml delete mode 100644 .idea/runConfigurations/py_test_for_tests_projections_test_projections_specifications_TestProjectionSpecificationFormats_test_mapping_projection_with_mech_and_state_name_specs.xml delete mode 100644 .idea/runConfigurations/py_test_in_test_lib_mechanism_docs_py.xml diff --git a/.idea/runConfigurations/py_test_for_test_projections_specifications_TestProjectionSpecificationFormats_test_2_item_tuple_from_input_and_output_states_to_gating_signals.xml b/.idea/runConfigurations/py_test_for_test_projections_specifications_TestProjectionSpecificationFormats_test_2_item_tuple_from_input_and_output_states_to_gating_signals.xml deleted file mode 100644 index 33d006aba1f..00000000000 --- a/.idea/runConfigurations/py_test_for_test_projections_specifications_TestProjectionSpecificationFormats_test_2_item_tuple_from_input_and_output_states_to_gating_signals.xml +++ /dev/null @@ -1,19 +0,0 @@ - - - - \ No newline at end of file diff --git a/.idea/runConfigurations/py_test_for_tests_control_test_EVC_test_EVC.xml b/.idea/runConfigurations/py_test_for_tests_control_test_EVC_test_EVC.xml deleted file mode 100644 index 895e167d597..00000000000 --- a/.idea/runConfigurations/py_test_for_tests_control_test_EVC_test_EVC.xml +++ /dev/null @@ -1,21 +0,0 @@ - - - - - \ No newline at end of file diff --git a/.idea/runConfigurations/py_test_for_tests_documentation_library_test_lib_mechanism_docs_test_lc_mechanism_docs.xml b/.idea/runConfigurations/py_test_for_tests_documentation_library_test_lib_mechanism_docs_test_lc_mechanism_docs.xml deleted file mode 100644 index fde8c201993..00000000000 --- a/.idea/runConfigurations/py_test_for_tests_documentation_library_test_lib_mechanism_docs_test_lc_mechanism_docs.xml +++ /dev/null @@ -1,19 +0,0 @@ - - - - \ No newline at end of file diff --git a/.idea/runConfigurations/py_test_for_tests_projections_test_projections_specifications_TestProjectionSpecificationFormats_test_formats_for_control_specification.xml b/.idea/runConfigurations/py_test_for_tests_projections_test_projections_specifications_TestProjectionSpecificationFormats_test_formats_for_control_specification.xml deleted file mode 100644 index 3f782edc215..00000000000 --- a/.idea/runConfigurations/py_test_for_tests_projections_test_projections_specifications_TestProjectionSpecificationFormats_test_formats_for_control_specification.xml +++ /dev/null @@ -1,19 +0,0 @@ - - - - \ No newline at end of file diff --git a/.idea/runConfigurations/py_test_for_tests_projections_test_projections_specifications_TestProjectionSpecificationFormats_test_formats_for_gating_specification.xml b/.idea/runConfigurations/py_test_for_tests_projections_test_projections_specifications_TestProjectionSpecificationFormats_test_formats_for_gating_specification.xml deleted file mode 100644 index a91869735bf..00000000000 --- a/.idea/runConfigurations/py_test_for_tests_projections_test_projections_specifications_TestProjectionSpecificationFormats_test_formats_for_gating_specification.xml +++ /dev/null @@ -1,19 +0,0 @@ - - - - \ No newline at end of file diff --git a/.idea/runConfigurations/py_test_for_tests_projections_test_projections_specifications_TestProjectionSpecificationFormats_test_mapping_projection_with_mech_and_state_name_specs.xml b/.idea/runConfigurations/py_test_for_tests_projections_test_projections_specifications_TestProjectionSpecificationFormats_test_mapping_projection_with_mech_and_state_name_specs.xml deleted file mode 100644 index 0ba018a6cf3..00000000000 --- a/.idea/runConfigurations/py_test_for_tests_projections_test_projections_specifications_TestProjectionSpecificationFormats_test_mapping_projection_with_mech_and_state_name_specs.xml +++ /dev/null @@ -1,19 +0,0 @@ - - - - \ No newline at end of file diff --git a/.idea/runConfigurations/py_test_in_test_lib_mechanism_docs_py.xml b/.idea/runConfigurations/py_test_in_test_lib_mechanism_docs_py.xml deleted file mode 100644 index f054b8bc089..00000000000 --- a/.idea/runConfigurations/py_test_in_test_lib_mechanism_docs_py.xml +++ /dev/null @@ -1,19 +0,0 @@ - - - - \ No newline at end of file diff --git a/psyneulink/components/functions/function.py b/psyneulink/components/functions/function.py index 22dcaa98d9d..2198bb1faa2 100644 --- a/psyneulink/components/functions/function.py +++ b/psyneulink/components/functions/function.py @@ -4348,8 +4348,7 @@ def keyword(obj, keyword): if isinstance(obj.receiver.instance_defaults.variable, numbers.Number): cols = 1 else: - # cols = len(obj.receiver.instance_defaults.variable) - cols = obj.receiver.instance_defaults.variable.shape[-1] + cols = obj.receiver.socket_width matrix = get_matrix(keyword, rows, cols) if matrix is None: diff --git a/psyneulink/components/mechanisms/processing/transfermechanism.py b/psyneulink/components/mechanisms/processing/transfermechanism.py index 24b01cbc28b..0f75aad0ae1 100644 --- a/psyneulink/components/mechanisms/processing/transfermechanism.py +++ b/psyneulink/components/mechanisms/processing/transfermechanism.py @@ -924,27 +924,20 @@ def _get_integrated_function_input(self, function_variable, initial_value, noise if not self.integrator_function: - self.integrator_function = AdaptiveIntegrator( - function_variable, - initializer=initial_value, - noise=noise, - rate=smoothing_factor, - owner=self - ) + self.integrator_function = AdaptiveIntegrator(function_variable, + initializer=initial_value, + noise=noise, + rate=smoothing_factor, + owner=self) self.original_integrator_function = self.integrator_function - current_input = self.integrator_function.execute( - function_variable, - # Should we handle runtime params? - runtime_params={ - # FIX: 4/30/18 - SHOULDN'T THESE BE THE PARAMS PASSED IN OR RETRIEVED ABOVE?? - INITIALIZER: self.initial_value, - NOISE: self.noise, - RATE: self.smoothing_factor - }, - context=context - ) + current_input = self.integrator_function.execute(function_variable, + # Should we handle runtime params? + runtime_params={INITIALIZER: self.initial_value, + NOISE: self.noise, + RATE: self.smoothing_factor}, + context=context) return current_input @@ -960,13 +953,11 @@ def _clip_result(self, clip, current_input, runtime_params, context): outputs[maxCapIndices] = np.max(clip) return outputs - def _execute( - self, - variable=None, - function_variable=None, - runtime_params=None, - context=None - ): + def _execute(self, + variable=None, + function_variable=None, + runtime_params=None, + context=None): """Execute TransferMechanism function and return transform of input Execute TransferMechanism function on input, and assign to output_values: diff --git a/psyneulink/components/process.py b/psyneulink/components/process.py index 8e7bcc8a735..24bef6e66d5 100644 --- a/psyneulink/components/process.py +++ b/psyneulink/components/process.py @@ -1684,9 +1684,7 @@ def _assign_process_input_projections(self, mechanism, context=None): if num_process_inputs == num_mechanism_input_states: for i in range(num_mechanism_input_states): # Insure that each Process input value is compatible with corresponding variable of mechanism.input_state - # MODIFIED 4/3/17 NEW: - input_state_variable = mechanism.input_states[i].instance_defaults.variable - # MODIFIED 4/3/17 END + input_state_variable = mechanism.input_states[i].socket_template if not iscompatible(process_input[i], input_state_variable): raise ProcessError("Input value {0} ({1}) for {2} is not compatible with " "variable for corresponding inputState of {3}". diff --git a/psyneulink/components/projections/pathway/mappingprojection.py b/psyneulink/components/projections/pathway/mappingprojection.py index ef5eac1b8ca..0132c43314f 100644 --- a/psyneulink/components/projections/pathway/mappingprojection.py +++ b/psyneulink/components/projections/pathway/mappingprojection.py @@ -536,8 +536,7 @@ def _instantiate_receiver(self, context=None): except TypeError: mapping_input_len = 1 try: - # receiver_len = len(self.receiver.instance_defaults.variable) - receiver_len = self.receiver.instance_defaults.variable.shape[-1] + receiver_len = self.receiver.socket_width except TypeError: receiver_len = 1 diff --git a/psyneulink/components/states/inputstate.py b/psyneulink/components/states/inputstate.py index 249b3cccc2b..f79b6f1f6ff 100644 --- a/psyneulink/components/states/inputstate.py +++ b/psyneulink/components/states/inputstate.py @@ -684,6 +684,7 @@ class InputState(State_Base): projectionSocket = SENDER modulators = [GATING_SIGNAL] + classPreferenceLevel = PreferenceLevel.TYPE # Any preferences specified below will override those specified in TypeDefaultPreferences # Note: only need to specify setting; level will be assigned to TYPE automatically @@ -909,12 +910,17 @@ def _execute(self, variable=None, function_variable=None, runtime_params=None, c # TODO: stateful - this seems dangerous with statefulness, # maybe safe when self.value is only passed or stateful variable = np.asarray(self._path_proj_values) - self._update_variable(variable[0]) + # MODIFIED 5/4/18 OLD: + # self._update_variable(variable[0]) + # MODIFIED 5/4/18 NEW: + self._update_variable(variable) + # MODIFIED 5/4/18 END combined_values = super()._execute(variable=variable, function_variable=variable, runtime_params=runtime_params, context=context ) + return combined_values # There were no Projections else: @@ -1049,8 +1055,6 @@ def _parse_state_specific_specs(self, owner, state_dict, state_specific_spec): matrix = None elif isinstance(projection, Projection): if projection.context.initialization_status == ContextFlags.DEFERRED_INIT: - # # FIX: 5/2/18 - ??CORRECT: - # variable = None continue matrix = projection.matrix else: @@ -1186,6 +1190,14 @@ def pathway_projections(self): def pathway_projections(self, assignment): self.path_afferents = assignment + @property + def socket_width(self): + return self.instance_defaults.variable.shape[-1] + + @property + def socket_template(self): + return np.zeros(self.socket_width) + @staticmethod def _get_state_function_value(owner, function, variable): """Put InputState's variable in a list if its function is LinearCombination and variable is >=2d diff --git a/psyneulink/components/states/outputstate.py b/psyneulink/components/states/outputstate.py index c6bea94a236..b4f7dcf6ff6 100644 --- a/psyneulink/components/states/outputstate.py +++ b/psyneulink/components/states/outputstate.py @@ -1184,8 +1184,6 @@ def _get_state_function_value(owner, function, variable): try: # return fct(variable=fct_variable) return State_Base._get_state_function_value(owner=owner, function=fct, variable=fct_variable) - # FIX: 5/2/18 JDC IS THIS NEEDED? ISN'T IT HANDLED BY SUPER (SINCE IT CALLS WITHOUT NAME OF VARIABLE ARG)? - # IF fct IS NOT FOUND, PASS OutputState.ClassDefault.function except: try: return fct(fct_variable) @@ -1197,7 +1195,6 @@ def _get_state_function_value(owner, function, variable): def variable(self): return _parse_output_state_variable(self.owner, self._variable) - @variable.setter def variable(self, variable): self._variable = variable @@ -1213,6 +1210,9 @@ def _update_variable(self, value): self._variable = value return self.variable + @property + def socket_width(self): + return self.value.shape[-1] @property def owner_value_index(self): diff --git a/psyneulink/components/states/state.py b/psyneulink/components/states/state.py index 8853229ad1c..48fd4280246 100644 --- a/psyneulink/components/states/state.py +++ b/psyneulink/components/states/state.py @@ -738,7 +738,8 @@ def test_multiple_modulatory_projections_with_mech_and_state_name_specs(self): import typecheck as tc from psyneulink.components.component import Component, ComponentError, component_keywords, function_type, method_type -from psyneulink.components.functions.function import Function, Linear, LinearCombination, ModulationParam, _get_modulated_param, get_param_value_for_keyword +from psyneulink.components.functions.function import CombinationFunction, Function, Linear, LinearCombination, \ + ModulationParam, _get_modulated_param, get_param_value_for_keyword from psyneulink.components.shellclasses import Mechanism, Process_Base, Projection, State from psyneulink.globals.context import ContextFlags from psyneulink.globals.keywords import AUTO_ASSIGN_MATRIX, COMMAND_LINE, CONTEXT, CONTROL_PROJECTION_PARAMS, \ @@ -755,7 +756,7 @@ def test_multiple_modulatory_projections_with_mech_and_state_name_specs(self): from psyneulink.globals.utilities import ContentAddressableList, MODULATION_OVERRIDE, Modulation, convert_to_np_array, get_args, get_class_attributes, is_value_spec, iscompatible, merge_param_dicts, type_match __all__ = [ - 'State_Base', 'state_keywords', 'state_type_keywords', 'StateError', 'StateRegistry', + 'State_Base', 'state_keywords', 'state_type_keywords', 'StateError', 'StateRegistry' ] state_keywords = component_keywords.copy() @@ -779,13 +780,13 @@ def test_multiple_modulatory_projections_with_mech_and_state_name_specs(self): STATE_SPEC = 'state_spec' REMOVE_STATES = 'REMOVE_STATES' + def _is_state_class(spec): if inspect.isclass(spec) and issubclass(spec, State): return True return False - # Note: This is created only for assignment of default projection types for each State subclass (see .__init__.py) # Individual stateRegistries (used for naming) are created for each Mechanism StateRegistry = {} @@ -1522,11 +1523,22 @@ def _instantiate_projections_to_state(self, projections, context=None): # Avoid duplicates, since instantiation of projection may have already called this method # and assigned Projection to self.path_afferents or mod_afferents lists if isinstance(projection, PathwayProjection_Base) and not projection in self.path_afferents: - self.path_afferents.append(projection) + projs = self.path_afferents + variable = self.instance_defaults.variable + projs.append(projection) + # if len(projs)>1: + if len(projs)>1 and isinstance(self.function_object, CombinationFunction): + if variable.ndim == 1: + variable = np.atleast_2d(variable) + self.instance_defaults.variable = np.append(variable, np.atleast_2d(projection.value), axis=0) + # self.instance_defaults.variable = np.append(variable, projection.value, axis=0) + self._update_variable(self.instance_defaults.variable) + elif isinstance(projection, ModulatoryProjection_Base) and not projection in self.mod_afferents: self.mod_afferents.append(projection) + def _instantiate_projection_from_state(self, projection_spec, receiver=None, context=None): """Instantiate outgoing projection from a State and assign it to self.efferents @@ -1777,7 +1789,7 @@ def _get_receiver_state(spec): # PathwayProjection: # - check that projection's value is compatible with the receiver's variable if isinstance(projection, PathwayProjection_Base): - if not iscompatible(projection.value, receiver.instance_defaults.variable): + if not iscompatible(projection.value, receiver.socket_template): raise StateError("Output of {} ({}) is not compatible with the variable ({}) of " "the State to which it is supposed to project ({}).". format(projection.name, projection.value, @@ -1877,7 +1889,9 @@ def update(self, params=None, context=None): gating_projection_params = merge_param_dicts(self.stateParams, GATING_PROJECTION_PARAMS, PROJECTION_PARAMS) #For each projection: get its params, pass them to it, get the projection's value, and append to relevant list + # MODIFIED 5/4/18 OLD: self._path_proj_values = [] + # MODIFIED 5/4/18 END for value in self._mod_proj_values: self._mod_proj_values[value] = [] @@ -1907,6 +1921,9 @@ def update(self, params=None, context=None): modulatory_override = False # Get values of all Projections + # MODIFIED 5/4/18 NEW: + variable = [] + # MODIFIED 5/4/18 END for projection in self.all_afferents: # Only update if sender has also executed in this round @@ -1962,7 +1979,11 @@ def update(self, params=None, context=None): if isinstance(projection, PathwayProjection_Base): # Add projection_value to list of PathwayProjection values (for aggregation below) + # MODIFIED 5/4/18 OLD: self._path_proj_values.append(projection_value) + # MODIFIED 5/4/18 NEW: + variable.append(projection_value) + # MODIFIED 5/4/18 END # If it is a ModulatoryProjection, add its value to the list in the dict entry for the relevant mod_param elif isinstance(projection, ModulatoryProjection_Base): @@ -2016,7 +2037,12 @@ def update(self, params=None, context=None): function_params = self.stateParams[FUNCTION_PARAMS] except (KeyError, TypeError): function_params = None + + # # MODIFIED 5/4/18 OLD: self.value = self.execute(runtime_params=function_params, context=context) + # # MODIFIED 5/4/18 NEW: + # self.value = self._execute(function_variable=variable, runtime_params=function_params, context=context) + # MODIFIED 5/4/18 END @property def owner(self): @@ -2511,11 +2537,6 @@ def _parse_state_spec(state_type=None, variable = state_dict[VARIABLE] params = state_specific_args - # Convert reference_value to np.array to match state_variable (which, as output of function, will be an np.array) - # if isinstance(reference_value, numbers.Number): - # FIX: 5/2/18 JDC - NOT NECESSARILY... OUTPUT_STATE FUNCTIONS CAN GENERATE ANYTHING - # reference_value = convert_to_np_array(reference_value,1) - # Validate that state_type is a State class if isinstance(state_type, str): try: @@ -2868,9 +2889,7 @@ def _parse_state_spec(state_type=None, 'Function class or instance of one'. format(spec_function)) except (KeyError, TypeError): - # MODIFIED NEW 5/2/18 FIX: NEEDS TO RETURN None from OutputState._get_state_function_value if owner has no value spec_function_value = state_type._get_state_function_value(owner, None, state_dict[VARIABLE]) - # MODIFIED 5/2/18 END # Assign value based on variable if not specified diff --git a/psyneulink/components/system.py b/psyneulink/components/system.py index 76328e7b8c9..985ac0a7b39 100644 --- a/psyneulink/components/system.py +++ b/psyneulink/components/system.py @@ -1549,16 +1549,15 @@ def _instantiate_stimulus_inputs(self, context=None): # that the length of the corresponding item of self.instance_defaults.variable matches the length of the # ORIGIN inputState's instance_defaults.variable attribute for j in range(len(origin_mech.input_states)): - if len(self.instance_defaults.variable[i][j]) != \ - len(origin_mech.input_states[j].instance_defaults.variable): + if len(self.instance_defaults.variable[i][j]) != origin_mech.input_states[j].socket_width: raise SystemError("Length of input {} ({}) does not match the length of the input ({}) for the " "corresponding ORIGIN Mechanism ()". format(i, len(self.instance_defaults.variable[i][j]), - len(origin_mech.input_states[j].instance_defaults.variable), + origin_mech.input_states[j].socket_width, origin_mech.name)) stimulus_input_state = SystemInputState(owner=self, - variable=origin_mech.input_states[j].instance_defaults.variable, + variable=origin_mech.input_states[j].socket_template, prefs=self.prefs, name="System Input State to Mechansism {}, Input State {}". format(origin_mech.name,j), @@ -3243,7 +3242,7 @@ def _get_label(self, item, show_dimensions): # For Mechanisms, show length of each InputState and OutputState if isinstance(item, Mechanism): if show_dimensions in {ALL, MECHANISMS}: - input_str = "in ({})".format(",".join(str(len(input_state.variable)) + input_str = "in ({})".format(",".join(str(input_state.socket_width) for input_state in item.input_states)) output_str = "out ({})".format(",".join(str(len(np.atleast_1d(output_state.value))) for output_state in item.output_states)) diff --git a/psyneulink/globals/environment.py b/psyneulink/globals/environment.py index 92430279bdb..5675b41eb9d 100644 --- a/psyneulink/globals/environment.py +++ b/psyneulink/globals/environment.py @@ -942,7 +942,7 @@ def _adjust_target_dict(component, target_dict): num_targets = -1 for mech, target_list in target_dict.items(): if isinstance(target_list, (float, list, np.ndarray)): - input_state_variable = mech.output_state.efferents[0].receiver.owner.input_states[TARGET].instance_defaults.variable + input_state_variable = mech.output_state.efferents[0].receiver.owner.input_states[TARGET].socket_template num_targets = -1 # first check if only one target was provided: @@ -1076,7 +1076,7 @@ def get_input_for_label(mech, key, subdicts, input_array=None): def _validate_target_function(target_function, target_mechanism, sample_mechanism): generated_targets = np.atleast_1d(target_function()) - expected_shape = target_mechanism.input_states[TARGET].instance_defaults.variable + expected_shape = target_mechanism.input_states[TARGET].socket_template if np.shape(generated_targets) != np.shape(expected_shape): raise RunError("Target values generated by target function ({}) are not compatible with TARGET input state " "of {} ({}). See {} entry in target specification dictionary. " diff --git a/psyneulink/library/subsystems/evc/evccontrolmechanism.py b/psyneulink/library/subsystems/evc/evccontrolmechanism.py index 80bc34f2ed4..8fe9cbba5e6 100644 --- a/psyneulink/library/subsystems/evc/evccontrolmechanism.py +++ b/psyneulink/library/subsystems/evc/evccontrolmechanism.py @@ -787,7 +787,8 @@ def _instantiate_prediction_mechanisms(self, system:System_Base, context=None): variable = [] for state_name in origin_mech.input_states.names: state_names.append(state_name) - variable.append(origin_mech.input_states[state_name].instance_defaults.variable) + # variable.append(origin_mech.input_states[state_name].instance_defaults.variable) + variable.append(origin_mech.input_states[state_name].value) # Instantiate PredictionMechanism prediction_mechanism = self.paramsCurrent[PREDICTION_MECHANISM_TYPE]( diff --git a/tests/mechanisms/test_input_state_spec.py b/tests/mechanisms/test_input_state_spec.py index 2a011c194da..6590a6c521b 100644 --- a/tests/mechanisms/test_input_state_spec.py +++ b/tests/mechanisms/test_input_state_spec.py @@ -223,8 +223,7 @@ def test_2_item_tuple_spec(self): np.testing.assert_array_equal(T.instance_defaults.variable, np.array([[0, 0]])) assert len(T.input_states) == 1 assert len(T.input_state.path_afferents[0].sender.instance_defaults.variable) == 3 - # assert len(T.input_state.instance_defaults.variable[]) == 2 - assert T.input_state.instance_defaults.variable.shape[-1] == 2 + assert T.input_state.socket_width == 2 T.execute() # ------------------------------------------------------------------------------------------------ @@ -236,8 +235,8 @@ def test_2_item_tuple_value_for_first_item(self): T = TransferMechanism(input_states=[([0,0], R2)]) np.testing.assert_array_equal(T.instance_defaults.variable, np.array([[0, 0]])) assert len(T.input_states) == 1 - assert T.input_state.path_afferents[0].sender.instance_defaults.variable.shape[-1] == 3 - assert T.input_state.instance_defaults.variable.shape[-1] == 2 + assert T.input_state.path_afferents[0].sender.socket_width == 3 + assert T.input_state.socket_width == 2 T.execute() # ------------------------------------------------------------------------------------------------ @@ -250,7 +249,7 @@ def test_projection_tuple_with_matrix_spec(self): np.testing.assert_array_equal(T.instance_defaults.variable, np.array([[0, 0]])) assert len(T.input_states) == 1 assert T.input_state.path_afferents[0].sender.instance_defaults.variable.shape[-1] == 3 - assert T.input_state.instance_defaults.variable.shape[-1] == 2 + assert T.input_state.socket_width == 2 T.execute() # ------------------------------------------------------------------------------------------------ @@ -760,5 +759,5 @@ def test_mech_and_tuple_specifications_with_and_without_default_variable_or_size size=size, input_states=input_states ) - assert T.input_states[0].instance_defaults.variable.shape[-1] == variable_len_state + assert T.input_states[0].socket_width == variable_len_state assert T.instance_defaults.variable.shape[-1] == variable_len_mech diff --git a/tests/mechanisms/test_recurrent_transfer_mechanism.py b/tests/mechanisms/test_recurrent_transfer_mechanism.py index 34109c4666c..6ae1cfd3573 100644 --- a/tests/mechanisms/test_recurrent_transfer_mechanism.py +++ b/tests/mechanisms/test_recurrent_transfer_mechanism.py @@ -30,6 +30,7 @@ def record_trial(): results.append(recurrent_mech.value) s.run(inputs=[[1.0, 1.0, 1.0], [2.0, 2.0, 2.0]], call_after_trial=record_trial) + assert True def test_recurrent_mech_auto_associative_projection(self): From c4f8c5207e44510e6e3a6999eaf9af1be1a48299 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Fri, 4 May 2018 17:43:45 -0400 Subject: [PATCH 045/200] adding some extra checks to _parse_runtime_params to verify that a sensible tuple is passed in --- psyneulink/components/system.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/psyneulink/components/system.py b/psyneulink/components/system.py index 90ec14c858c..bdf9de34296 100644 --- a/psyneulink/components/system.py +++ b/psyneulink/components/system.py @@ -2438,9 +2438,20 @@ def _parse_runtime_params(self, runtime_params): return {} for mechanism in runtime_params: for param in runtime_params[mechanism]: - if not isinstance(runtime_params[mechanism][param], tuple): + if isinstance(runtime_params[mechanism][param], tuple): + if len(runtime_params[mechanism][param]) == 1: + runtime_params[mechanism][param] = (runtime_params[mechanism][param], Always()) + elif len(runtime_params[mechanism][param]) != 2: + raise SystemError("Invalid runtime parameter specification ({}) for {}'s {} parameter in {}. " + "Must be a tuple of the form (parameter value, condition), or simply the " + "parameter value. ".format(runtime_params[mechanism][param], + mechanism.name, + param, + self.name)) + else: runtime_params[mechanism][param] = (runtime_params[mechanism][param], Always()) return runtime_params + def initialize(self): """Assign `initial_values ` to mechanisms designated as `INITIALIZE_CYCLE` \and contained in recurrent_init_mechanisms. From 3399e85aaf437e197fb70ac4b36feb211fabd958 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Fri, 4 May 2018 17:55:40 -0400 Subject: [PATCH 046/200] continuing to add documentation for runtime parameters --- psyneulink/components/system.py | 1 - psyneulink/globals/environment.py | 17 ++++++++++++++++- 2 files changed, 16 insertions(+), 2 deletions(-) diff --git a/psyneulink/components/system.py b/psyneulink/components/system.py index bdf9de34296..03db6e1df9f 100644 --- a/psyneulink/components/system.py +++ b/psyneulink/components/system.py @@ -2451,7 +2451,6 @@ def _parse_runtime_params(self, runtime_params): else: runtime_params[mechanism][param] = (runtime_params[mechanism][param], Always()) return runtime_params - def initialize(self): """Assign `initial_values ` to mechanisms designated as `INITIALIZE_CYCLE` \and contained in recurrent_init_mechanisms. diff --git a/psyneulink/globals/environment.py b/psyneulink/globals/environment.py index 022b7fc38cf..e789cd92277 100644 --- a/psyneulink/globals/environment.py +++ b/psyneulink/globals/environment.py @@ -319,6 +319,21 @@ s.run(inputs=input_list) .. +.. _Run_Runtime_Parameters: + +Runtime_Parameters +~~~~~~~~~~~~~~~~~~ + +Runtime parameters are specified as a nested dictionary of (value, condition) tuples for parameters of mechanisms of the Composition. At the outer +layer of the dictionary, keys are Mechanisms and values are runtime parameter specification dictionaries. Inside +of those dictionaries, keys are keywords corresponding to Parameters of the Mechanism and values are tuples, the +index 0 item of which is the runtime parameter value, and the index 1 item of which is a `Condition`. Runtime +parameter values are subject to the same type, value, and shape requirements as the parameter in question. If a +runtime parameter is meant to be used throughout the run, then the `Condition` may be omitted and the "Always" +`Condition` will be assigned by default. See `RuntimeParameters` for examples of valid dictionaries. + + + COMMENT: .. _Run_Initial_Values: @@ -614,7 +629,7 @@ def run(object, phase of execution ` runtime_params : Dict[Mechanism: Dict[Param: Tuple(Value, Condition)]] - a nested dictionary of value + condition tuples for parameters of mechanisms of the Composition. At the outer + a nested dictionary of (value, condition) tuples for parameters of mechanisms of the Composition. At the outer layer of the dictionary, keys are Mechanisms and values are runtime parameter specification dictionaries. Inside of those dictionaries, keys are keywords corresponding to Parameters of the Mechanism and values are tuples, the index 0 item of which is the runtime parameter value, and the index 1 item of which is a `Condition`. Runtime From 540722785cbb1033ecea59897f10ca9776d8fe73 Mon Sep 17 00:00:00 2001 From: jdcpni Date: Fri, 4 May 2018 23:01:28 -0400 Subject: [PATCH 047/200] Docs/learning/recurrent and autoassociative (#783) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * • LCA - _execute: eliminated (by consolidating remaining bits into helper methods * • MaskMappingProjection - corrections to docstrings * • MaskMappingProjection - corrections to docstrings * Initial draft --- .../learning/autoassociativelearningmechanism.py | 13 +++++++------ .../transfer/recurrenttransfermechanism.py | 5 ++--- .../pathway/autoassociativeprojection.py | 4 ++-- 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/psyneulink/library/mechanisms/adaptive/learning/autoassociativelearningmechanism.py b/psyneulink/library/mechanisms/adaptive/learning/autoassociativelearningmechanism.py index acdeeb22ed7..8aa26ec680b 100644 --- a/psyneulink/library/mechanisms/adaptive/learning/autoassociativelearningmechanism.py +++ b/psyneulink/library/mechanisms/adaptive/learning/autoassociativelearningmechanism.py @@ -24,7 +24,7 @@ An AutoAssociativeLearningMechanism can be created directly by calling its constructor, but most commonly it is created automatically when a RecurrentTransferMechanism is `configure for learning `, -(identified in its activity_source ` attribute). +(identified in its `activity_source ` attribute). .. _AutoAssociativeLearningMechanism_Structure: @@ -133,28 +133,29 @@ class AutoAssociativeLearningMechanism(LearningMechanism): Arguments --------- - variable : List or 2d np.array + variable : List or 2d np.array : default None it must have a single item that corresponds to the value required by the AutoAssociativeLearningMechanism's `function `; it must each be compatible (in number and type) with the `value ` of the Mechanism's `InputState ` (see `variable ` for additional details). - learning_signals : List[parameter of Projection, ParameterState, Projection, tuple[str, Projection] or dict] + learning_signals : List[parameter of Projection, ParameterState, Projection, tuple[str, Projection] or dict] \ + : default None specifies the `matrix ` to be learned (see `learning_signals ` for details of specification). - modulation : ModulationParam : ModulationParam.ADDITIVE + modulation : ModulationParam : default ModulationParam.ADDITIVE specifies the default form of modulation used by the AutoAssociativeLearningMechanism's LearningSignals, unless they are `individually specified `. - function : LearningFunction or function + function : LearningFunction or function : default Hebbian specifies the function used to calculate the AutoAssociativeLearningMechanism's `learning_signal ` attribute. It must take as its **variable** argument a list or 1d array of numeric values (the "activity vector") and return a list, 2d np.array or np.matrix representing a square matrix with dimensions that equal the length of its variable (the "weight change matrix"). - learning_rate : float + learning_rate : float : default None specifies the learning rate for the AutoAssociativeLearningMechanism. (see `learning_rate ` for details). diff --git a/psyneulink/library/mechanisms/processing/transfer/recurrenttransfermechanism.py b/psyneulink/library/mechanisms/processing/transfer/recurrenttransfermechanism.py index 83fadb0c268..b2605d70dec 100644 --- a/psyneulink/library/mechanisms/processing/transfer/recurrenttransfermechanism.py +++ b/psyneulink/library/mechanisms/processing/transfer/recurrenttransfermechanism.py @@ -390,8 +390,8 @@ class RecurrentTransferMechanism(TransferMechanism): the smoothing factor for exponential time averaging of input when `integrator_mode ` is set to True:: - result = (smoothing_factor * variable) + - (1-smoothing_factor * input to mechanism's function on the previous time step) + result = (smoothing_factor * variable) + + (1-smoothing_factor * input to mechanism's function on the previous time step) clip : list [float, float] : default None (Optional) specifies the allowable range for the result of `function ` the item in @@ -399,7 +399,6 @@ class RecurrentTransferMechanism(TransferMechanism): allowable value; any element of the result that exceeds the specified minimum or maximum value is set to the value of `clip ` that it exceeds. - enable_learning : boolean : default False specifies whether the Mechanism should be configured for learning; if it is not (the default), then learning cannot be enabled until it is configured for learning by calling the Mechanism's `configure_learning diff --git a/psyneulink/library/projections/pathway/autoassociativeprojection.py b/psyneulink/library/projections/pathway/autoassociativeprojection.py index 8bc99fc0140..71b8b4c0e65 100644 --- a/psyneulink/library/projections/pathway/autoassociativeprojection.py +++ b/psyneulink/library/projections/pathway/autoassociativeprojection.py @@ -62,8 +62,8 @@ Execution --------- -An AutoAssociativeProjection uses its `matrix ` parameter to transform the value of its -`sender `, and provide the result as input for its +An AutoAssociativeProjection uses its `matrix ` parameter to transform the value of +its `sender `, and provide the result as input for its `receiver `, the primary input state of the RecurrentTransferMechanism. .. note:: From c6f0332d499870fd57e48962e1dae99e5c905bde Mon Sep 17 00:00:00 2001 From: jdcpni Date: Sat, 5 May 2018 21:47:09 -0400 Subject: [PATCH 048/200] Fix/evcauxilliary/function to execute (#784) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * • LCA - _execute: eliminated (by consolidating remaining bits into helper methods * • MaskMappingProjection - corrections to docstrings * • MaskMappingProjection - corrections to docstrings * Initial draft --- psyneulink/library/subsystems/evc/evcauxiliary.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/psyneulink/library/subsystems/evc/evcauxiliary.py b/psyneulink/library/subsystems/evc/evcauxiliary.py index b623ddc4bef..266924ba67c 100644 --- a/psyneulink/library/subsystems/evc/evcauxiliary.py +++ b/psyneulink/library/subsystems/evc/evcauxiliary.py @@ -186,15 +186,15 @@ def function( # Aggregate costs if isinstance(cost_function, UserDefinedFunction): - cost = cost_function.function(controller=controller, costs=costs) + cost = cost_function._execute(controller=controller, costs=costs) else: - cost = cost_function.function(variable=costs, context=context) + cost = cost_function._execute(variable=costs, context=context) # Combine outcome and cost to determine value if isinstance(combine_function, UserDefinedFunction): - value = combine_function.function(controller=controller, outcome=outcome, cost=cost) + value = combine_function._execute(controller=controller, outcome=outcome, cost=cost) else: - value = combine_function.function(variable=[outcome, -cost]) + value = combine_function._execute(variable=[outcome, -cost]) return (value, outcome, cost) From 5c2c9f34431573c6c510adfa44d94bca4aa68448 Mon Sep 17 00:00:00 2001 From: jdcpni Date: Sun, 6 May 2018 17:47:42 -0400 Subject: [PATCH 049/200] Fix/linearcombination/weights and exponents (#785) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * • LCA - _execute: eliminated (by consolidating remaining bits into helper methods * • MaskMappingProjection - corrections to docstrings * • MaskMappingProjection - corrections to docstrings * Merge branches 'devel' and 'feat/learning_contrastive_hebbian_JDC' of https://github.com/PrincetonUniversity/PsyNeuLink into feat/learning_contrastive_hebbian_JDC # Conflicts: # psyneulink/library/subsystems/evc/evcauxiliary.py * • Function LinearCombination: __init__ and _validate_params: eliminated reshaping of weights and exponents • ObjectiveMechanism _instantiate_function_weights_and_exponents: corrected assignment of weights to be 2d array * • Function LinearCombination: __init__ and _validate_params: eliminated reshaping of weights and exponents • ObjectiveMechanism _instantiate_function_weights_and_exponents: corrected assignment of weights to be 2d array --- .idea/runConfigurations/Make_HTML.xml | 6 +++--- .idea/runConfigurations/Scratch_Pad.xml | 6 +++--- docs/source/Function.rst | 2 ++ psyneulink/components/functions/function.py | 13 +++---------- .../mechanisms/processing/objectivemechanism.py | 4 ++-- 5 files changed, 13 insertions(+), 18 deletions(-) diff --git a/.idea/runConfigurations/Make_HTML.xml b/.idea/runConfigurations/Make_HTML.xml index 52e2eb9e7aa..7e95a8f2e27 100644 --- a/.idea/runConfigurations/Make_HTML.xml +++ b/.idea/runConfigurations/Make_HTML.xml @@ -1,9 +1,9 @@ - - + +