Skip to content

Commit

Permalink
Merge pull request #1699 from PrincetonUniversity/devel
Browse files Browse the repository at this point in the history
Devel
  • Loading branch information
dillontsmith authored Jul 6, 2020
2 parents 186ba7e + 9de26a7 commit eb2420c
Show file tree
Hide file tree
Showing 39 changed files with 209 additions and 165 deletions.
5 changes: 3 additions & 2 deletions psyneulink/core/components/component.py
Original file line number Diff line number Diff line change
Expand Up @@ -976,7 +976,7 @@ def _parse_variable(self, variable):
return variable

try:
return np.asarray(variable)
return convert_to_np_array(variable)
except ValueError:
return convert_all_elements_to_np_array(variable)

Expand Down Expand Up @@ -1507,7 +1507,8 @@ def checkAndCastInt(x):
variable = []
for s in size:
variable.append(np.zeros(s))
variable = np.array(variable)
variable = convert_to_np_array(variable)
# TODO: fix bare except
except:
raise ComponentError("variable (possibly default_variable) was not specified, but PsyNeuLink "
"was unable to infer variable from the size argument, {}. size should be"
Expand Down
4 changes: 2 additions & 2 deletions psyneulink/core/components/functions/combinationfunctions.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@
DEFAULT_VARIABLE, EXPONENTS, LINEAR_COMBINATION_FUNCTION, MULTIPLICATIVE_PARAM, OFFSET, OPERATION, \
PREDICTION_ERROR_DELTA_FUNCTION, PRODUCT, REARRANGE_FUNCTION, REDUCE_FUNCTION, SCALE, SUM, WEIGHTS, \
PREFERENCE_SET_NAME
from psyneulink.core.globals.utilities import is_numeric, np_array_less_than_2d, parameter_spec
from psyneulink.core.globals.utilities import convert_to_np_array, is_numeric, np_array_less_than_2d, parameter_spec
from psyneulink.core.globals.context import Context, ContextFlags
from psyneulink.core.globals.parameters import Parameter
from psyneulink.core.globals.preferences.basepreferenceset import \
Expand Down Expand Up @@ -552,7 +552,7 @@ def _function(self,
for index in item:
stack.append(variable[index])
result.append(np.hstack(tuple(stack)))
result = np.array(result) * scale + offset
result = convert_to_np_array(result) * scale + offset
except IndexError:
assert False, f"PROGRAM ERROR: Bad index specified in {repr(ARRANGEMENT)} arg -- " \
f"should have been caught in _validate_params or _instantiate_attributes_before_function"
Expand Down
14 changes: 10 additions & 4 deletions psyneulink/core/components/functions/distributionfunctions.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@
EXPONENTIAL_DIST_FUNCTION, GAMMA_DIST_FUNCTION, HIGH, LOW, MULTIPLICATIVE_PARAM, NOISE, NORMAL_DIST_FUNCTION, \
SCALE, STANDARD_DEVIATION, THRESHOLD, UNIFORM_DIST_FUNCTION, WALD_DIST_FUNCTION
from psyneulink.core.globals.context import ContextFlags
from psyneulink.core.globals.utilities import parameter_spec, get_global_seed
from psyneulink.core.globals.utilities import convert_to_np_array, parameter_spec, get_global_seed
from psyneulink.core.globals.preferences.basepreferenceset import is_pref_set

from psyneulink.core.globals.parameters import Parameter
Expand Down Expand Up @@ -1265,9 +1265,15 @@ def _function(self,
exp_neg2_ztilde_atilde = np.exp(-2 * ztilde * atilde)

if self.shenhav_et_al_compat_mode:
exp_neg2_x0tilde_atilde = np.nanmax([1e-12, exp_neg2_x0tilde_atilde])
exp_2_ztilde_atilde = np.nanmin([1e12, exp_2_ztilde_atilde])
exp_neg2_ztilde_atilde = np.nanmax([1e-12, exp_neg2_ztilde_atilde])
exp_neg2_x0tilde_atilde = np.nanmax(
convert_to_np_array([1e-12, exp_neg2_x0tilde_atilde])
)
exp_2_ztilde_atilde = np.nanmin(
convert_to_np_array([1e12, exp_2_ztilde_atilde])
)
exp_neg2_ztilde_atilde = np.nanmax(
convert_to_np_array([1e-12, exp_neg2_ztilde_atilde])
)

rt = ztilde * np.tanh(ztilde * atilde) + \
((2 * ztilde * (1 - exp_neg2_x0tilde_atilde)) / (
Expand Down
6 changes: 3 additions & 3 deletions psyneulink/core/components/functions/function.py
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,7 @@
from psyneulink.core.globals.preferences.preferenceset import PreferenceEntry, PreferenceLevel
from psyneulink.core.globals.registry import register_category
from psyneulink.core.globals.utilities import (
get_global_seed, object_has_single_value, parameter_spec, safe_len
convert_to_np_array, get_global_seed, object_has_single_value, parameter_spec, safe_len
)

__all__ = [
Expand Down Expand Up @@ -649,7 +649,7 @@ def convert_output_type(self, value, output_type=None):
else:
output_type = self.output_type

value = np.asarray(value)
value = convert_to_np_array(value)

# Type conversion (specified by output_type):

Expand Down Expand Up @@ -1006,7 +1006,7 @@ def get_matrix(specification, rows=1, cols=1, context=None):

# Matrix provided (and validated in _validate_params); convert to array
if isinstance(specification, (list, np.matrix)):
specification = np.array(specification)
return convert_to_np_array(specification)

if isinstance(specification, np.ndarray):
if specification.ndim == 2:
Expand Down
6 changes: 5 additions & 1 deletion psyneulink/core/components/functions/interfacefunctions.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
from psyneulink.core.globals.parameters import Parameter
from psyneulink.core.globals.preferences.basepreferenceset import \
PreferenceEntry, PreferenceLevel, is_pref_set, REPORT_OUTPUT_PREF
from psyneulink.core.globals.utilities import convert_to_np_array


__all__ = ['InterfaceFunction', 'InterfacePortMap']
Expand Down Expand Up @@ -173,7 +174,10 @@ def _function(
if self.corresponding_input_port.owner.parameters.value._get(context) is not None:

# If CIM's variable does not match its value, then a new pair of ports was added since the last execution
if not np.shape(self.corresponding_input_port.owner.get_input_values(context)) == np.shape(self.corresponding_input_port.owner.parameters.value._get(context)):
input_values = convert_to_np_array(
self.corresponding_input_port.owner.get_input_values(context)
)
if not np.shape(input_values) == np.shape(self.corresponding_input_port.owner.parameters.value._get(context)):
return self.corresponding_input_port.owner.defaults.variable[index]

# If the variable is 1D (e.g. [0. , 0.], NOT [[0. , 0.]]), and the index is 0, then return whole variable
Expand Down
2 changes: 1 addition & 1 deletion psyneulink/core/components/functions/learningfunctions.py
Original file line number Diff line number Diff line change
Expand Up @@ -585,7 +585,7 @@ def _function(


variable = self._check_args(
[np.atleast_2d(variable[0]), np.atleast_2d(variable[1])],
[convert_to_np_array(variable[0], dimension=2), convert_to_np_array(variable[1], dimension=2)],
params,
context,
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1639,7 +1639,7 @@ def _run_cuda_grid(self, ocm, variable, context):
# Compiled evaluate expects the same variable as mech function
new_variable = [np.asfarray(ip.parameters.value.get(context))
for ip in ocm.input_ports]
new_variable = np.atleast_2d(new_variable)
new_variable = np.array(new_variable, dtype=np.object)
# Map allocations to values
comp_exec = pnlvm.execution.CompExecution(ocm.agent_rep, [context.execution_id])
ct_alloc, ct_values = comp_exec.cuda_evaluate(new_variable,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@
INCREMENT, INITIALIZER, INPUT_PORTS, INTEGRATOR_FUNCTION, INTEGRATOR_FUNCTION_TYPE, \
INTERACTIVE_ACTIVATION_INTEGRATOR_FUNCTION, LEAKY_COMPETING_INTEGRATOR_FUNCTION, \
MULTIPLICATIVE_PARAM, NOISE, OFFSET, OPERATION, ORNSTEIN_UHLENBECK_INTEGRATOR_FUNCTION, OUTPUT_PORTS, PRODUCT, \
RATE, REST, SIMPLE_INTEGRATOR_FUNCTION, SUM, TIME_STEP_SIZE, THRESHOLD
RATE, REST, SIMPLE_INTEGRATOR_FUNCTION, SUM, TIME_STEP_SIZE, THRESHOLD, VARIABLE
from psyneulink.core.globals.parameters import Parameter
from psyneulink.core.globals.utilities import parameter_spec, all_within_range, iscompatible, get_global_seed
from psyneulink.core.globals.context import Context, ContextFlags, handle_external_context
Expand Down Expand Up @@ -1203,7 +1203,12 @@ def _function(self,
previous_value = self.get_previous_value(context)
# MODIFIED 6/14/19 END

value = self._EWMA_filter(previous_value, rate, variable) + noise
try:
value = self._EWMA_filter(previous_value, rate, variable) + noise
except TypeError:
# TODO: this should be standardized along with the other instances
# of this error
raise FunctionError("Unrecognized type for {} of {} ({})".format(VARIABLE, self.name, variable))

adjusted_value = value + offset

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@
from psyneulink.core.globals.keywords import \
ADDITIVE_PARAM, BUFFER_FUNCTION, MEMORY_FUNCTION, COSINE, ContentAddressableMemory_FUNCTION, \
MIN_INDICATOR, MULTIPLICATIVE_PARAM, NEWEST, NOISE, OLDEST, OVERWRITE, RATE, RANDOM
from psyneulink.core.globals.utilities import all_within_range, parameter_spec, get_global_seed
from psyneulink.core.globals.utilities import all_within_range, convert_to_np_array, parameter_spec, get_global_seed
from psyneulink.core.globals.context import Context, ContextFlags, handle_external_context
from psyneulink.core.globals.parameters import Parameter
from psyneulink.core.globals.preferences.basepreferenceset import is_pref_set
Expand Down Expand Up @@ -330,7 +330,7 @@ def _function(self,

# Apply rate and/or noise, if they are specified, to all stored items
if len(previous_value):
previous_value = previous_value * rate + noise
previous_value = convert_to_np_array(previous_value) * rate + noise

previous_value = deque(previous_value, maxlen=self.parameters.history._get(context))

Expand Down Expand Up @@ -995,7 +995,7 @@ def _initialize_previous_value(self, initializer, context=None):
warnings.warn(f"Attempt to initialize memory of {self.__class__.__name__} with an entry ({entry}) "
f"that has the same key as a previous one, while 'duplicate_keys'==False; "
f"that entry has been skipped")
return np.asarray(self._memory)
return convert_to_np_array(self._memory)

def _instantiate_attributes_before_function(self, function=None, context=None):
self.parameters.previous_value._set(
Expand Down Expand Up @@ -1125,7 +1125,7 @@ def _function(self,
# Return 3d array with keys and vals as lists
# IMPLEMENTATION NOTE: if try to create np.ndarray directly, and keys and vals have same length
# end up with array of arrays, rather than array of lists
ret_val = np.array([list(memory[0]),[]])
ret_val = convert_to_np_array([list(memory[0]),[]])
ret_val[1] = list(memory[1])
return ret_val

Expand Down Expand Up @@ -1326,7 +1326,7 @@ def delete_from_memory(self, memories:tc.any(list, np.ndarray), key_only:bool= T

def _parse_memories(self, memories, method, context=None):
"""Parse passing of single vs. multiple memories, validate memories, and return ndarray"""
memories = np.array(memories)
memories = convert_to_np_array(memories)
if not 1 <= memories.ndim <= 3:
raise FunctionError(f"'memories' arg for {method} method of {self.__class__.__name__} "
f"must be a 2-item list or 2d array, or a list or 3d array containing those")
Expand Down
16 changes: 8 additions & 8 deletions psyneulink/core/components/mechanisms/mechanism.py
Original file line number Diff line number Diff line change
Expand Up @@ -2397,7 +2397,7 @@ def execute(self,

return return_value
else:
converted_to_2d = np.atleast_2d(return_value)
converted_to_2d = convert_to_np_array(return_value, dimension=2)
# If return_value is a list of heterogenous elements, return as is
# (satisfies requirement that return_value be an array of possibly multidimensional values)
if converted_to_2d.dtype == object:
Expand All @@ -2411,7 +2411,7 @@ def execute(self,
return_value = super()._execute(variable=self.defaults.variable,
context=context,
runtime_params=runtime_params)
return np.atleast_2d(return_value)
return convert_to_np_array(return_value, dimension=2)

# SET UP RUNTIME PARAMS if any

Expand Down Expand Up @@ -2491,7 +2491,7 @@ def execute(self,
for item in value))):
pass
else:
converted_to_2d = np.atleast_2d(value)
converted_to_2d = convert_to_np_array(value, dimension=2)
# If return_value is a list of heterogenous elements, return as is
# (satisfies requirement that return_value be an array of possibly multidimensional values)
if converted_to_2d.dtype == object:
Expand Down Expand Up @@ -2537,7 +2537,7 @@ def execute(self,
return value

def _get_variable_from_input(self, input, context=None):
input = np.atleast_2d(input)
input = convert_to_np_array(input, dimension=2)
num_inputs = np.size(input, 0)
num_input_ports = len(self.input_ports)
if num_inputs != num_input_ports:
Expand All @@ -2559,7 +2559,7 @@ def _get_variable_from_input(self, input, context=None):
f"required length ({len(input_port.defaults.variable)}) for input "
f"to {InputPort.__name__} {repr(input_port.name)} of {self.name}.")

return np.array(self.get_input_values(context))
return convert_to_np_array(self.get_input_values(context))

def _update_input_ports(self, runtime_input_port_params=None, context=None):
"""Update value for each InputPort in self.input_ports:
Expand All @@ -2574,7 +2574,7 @@ def _update_input_ports(self, runtime_input_port_params=None, context=None):
port= self.input_ports[i]
port._update(params=runtime_input_port_params,
context=context)
return np.array(self.get_input_values(context))
return convert_to_np_array(self.get_input_values(context))

def _update_parameter_ports(self, runtime_parameter_port_params=None, context=None):

Expand Down Expand Up @@ -3569,7 +3569,7 @@ def add_ports(self, ports, context=None):
else:
old_variable = self.defaults.variable
old_variable.extend(added_variable)
self.defaults.variable = np.array(old_variable)
self.defaults.variable = convert_to_np_array(old_variable)
instantiated_input_ports = _instantiate_input_ports(self,
input_ports,
added_variable,
Expand Down Expand Up @@ -3993,7 +3993,7 @@ def _is_mechanism_spec(spec):
from collections import UserList
class MechanismList(UserList):
"""Provides access to Mechanisms and their attributes in a list Mechanisms of an owner.
Properties return dicts with item : attribute pairs.
Recursively process any item that itself is a MechanismList (e.g., a `Nested Composition <Composition_Nested>`.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -587,7 +587,7 @@
from psyneulink.core.globals.parameters import Parameter
from psyneulink.core.globals.preferences.basepreferenceset import is_pref_set
from psyneulink.core.globals.preferences.preferenceset import PreferenceLevel
from psyneulink.core.globals.utilities import ContentAddressableList, convert_to_list, copy_iterable_with_shared, is_iterable
from psyneulink.core.globals.utilities import ContentAddressableList, convert_to_list, convert_to_np_array, copy_iterable_with_shared, is_iterable

__all__ = [
'CONTROL_ALLOCATION', 'GATING_ALLOCATION', 'ControlMechanism', 'ControlMechanismError', 'ControlMechanismRegistry',
Expand Down Expand Up @@ -674,9 +674,13 @@ def validate_monitored_port_spec(owner, spec_list):
def _control_mechanism_costs_getter(owning_component=None, context=None):
# NOTE: In cases where there is a reconfiguration_cost, that cost is not returned by this method
try:
costs = [c.compute_costs(c.parameters.value._get(context), context=context)
for c in owning_component.control_signals
if hasattr(c, 'compute_costs')] # GatingSignals don't have cost fcts
costs = [
convert_to_np_array(
c.compute_costs(c.parameters.value._get(context), context=context)
)
for c in owning_component.control_signals
if hasattr(c, 'compute_costs')
] # GatingSignals don't have cost fcts
return costs

except TypeError:
Expand All @@ -692,8 +696,10 @@ def _net_outcome_getter(owning_component=None, context=None):
# NOTE: In cases where there is a reconfiguration_cost, that cost is not included in the net_outcome
try:
c = owning_component
return c.compute_net_outcome(c.parameters.outcome._get(context),
c.combine_costs(c.parameters.costs._get(context)))
return c.compute_net_outcome(
c.parameters.outcome._get(context),
c.combine_costs()
)
except TypeError:
return [0]

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -413,6 +413,7 @@
DEFAULT_VARIABLE, EID_FROZEN, FUNCTION, INTERNAL_ONLY, NAME, \
OPTIMIZATION_CONTROL_MECHANISM, OBJECTIVE_MECHANISM, OUTCOME, PRODUCT, PARAMS, \
CONTROL, AUTO_ASSIGN_MATRIX
from psyneulink.core.globals.utilities import convert_to_np_array
from psyneulink.core.globals.parameters import Parameter, ParameterAlias
from psyneulink.core.globals.preferences.preferenceset import PreferenceLevel
from psyneulink.core.globals.context import handle_external_context
Expand Down Expand Up @@ -873,7 +874,7 @@ def _update_input_ports(self, runtime_params=None, context=None):
port = self.input_ports[i]
port._update(params=runtime_params, context=context)
port_values.append(port.parameters.value._get(context))
return np.array(port_values)
return convert_to_np_array(port_values)
# # MODIFIED 5/8/20 NEW:
# input_port_values = super()._update_input_ports(runtime_params, context)
# port_values.append(input_port_values)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -550,7 +550,7 @@
from psyneulink.core.globals.parameters import Parameter
from psyneulink.core.globals.preferences.basepreferenceset import is_pref_set
from psyneulink.core.globals.preferences.preferenceset import PreferenceLevel
from psyneulink.core.globals.utilities import ContentAddressableList, is_numeric, parameter_spec, convert_to_list
from psyneulink.core.globals.utilities import ContentAddressableList, convert_to_np_array, is_numeric, parameter_spec, convert_to_list

__all__ = [
'ACTIVATION_INPUT', 'ACTIVATION_INPUT_INDEX', 'ACTIVATION_OUTPUT', 'ACTIVATION_OUTPUT_INDEX',
Expand Down Expand Up @@ -1352,9 +1352,13 @@ def _execute(

# Compute learning_signal for each error_signal (and corresponding error-Matrix):
for error_signal_input, error_matrix in zip(error_signal_inputs, error_matrices):
function_variable = np.array([variable[ACTIVATION_INPUT_INDEX],
variable[ACTIVATION_OUTPUT_INDEX],
error_signal_input])
function_variable = convert_to_np_array(
[
variable[ACTIVATION_INPUT_INDEX],
variable[ACTIVATION_OUTPUT_INDEX],
error_signal_input
]
)
learning_signal, error_signal = super()._execute(variable=function_variable,
# MODIFIED CROSS_PATHWAYS 7/22/19 END
context=context,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1080,6 +1080,10 @@ class Parameters(ProcessingMechanism_Base.Parameters):
structural=True,
)

def _validate_variable(self, variable):
if 'U' in str(variable.dtype):
return 'may not contain non-numeric entries'

def _validate_integrator_mode(self, integrator_mode):
if not isinstance(integrator_mode, bool):
return 'may only be True or False.'
Expand Down
Loading

0 comments on commit eb2420c

Please sign in to comment.