Skip to content

Commit

Permalink
Merge pull request #1706 from PrincetonUniversity/devel
Browse files Browse the repository at this point in the history
Devel
  • Loading branch information
dillontsmith authored Jul 14, 2020
2 parents 627974b + 190034b commit 0374dca
Show file tree
Hide file tree
Showing 63 changed files with 1,204 additions and 1,245 deletions.
4 changes: 2 additions & 2 deletions Scripts/Debug/Yotam LCA Model.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,7 @@ def get_trained_network(bipartite_graph, num_features=3, num_hidden=200, epochs=
}

# Build network
mnet = pnl.AutodiffComposition(param_init_from_pnl=True,
mnet = pnl.AutodiffComposition(
patience=patience,
min_delta=min_delt,
learning_rate=learning_rate,
Expand Down Expand Up @@ -273,7 +273,7 @@ def get_trained_network_multLCA(bipartite_graph, num_features=3, num_hidden=200,
}

# Build network
mnet = pnl.AutodiffComposition(param_init_from_pnl=True,
mnet = pnl.AutodiffComposition(
patience=patience,
min_delta=min_delt,
learning_rate=learning_rate,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,6 @@ def gen_input_vals(nouns, relations):
#This block of code constructs the network

RumelNet = pnl.AutodiffComposition(
param_init_from_pnl=True,
patience=10,
min_delta=0.00001,
learning_rate=1,
Expand Down
4 changes: 0 additions & 4 deletions psyneulink/core/components/component.py
Original file line number Diff line number Diff line change
Expand Up @@ -3370,10 +3370,6 @@ def _variable_shape_flexibility(self):
def _variable_shape_flexibility(self, value):
self.__variable_shape_flexibility = value

@classmethod
def get_constructor_defaults(cls):
return {arg_name: arg.default for (arg_name, arg) in inspect.signature(cls.__init__).parameters.items()}

@property
def class_parameters(self):
return self.__class__.parameters
Expand Down
34 changes: 16 additions & 18 deletions psyneulink/core/components/functions/combinationfunctions.py
Original file line number Diff line number Diff line change
Expand Up @@ -197,11 +197,11 @@ class Parameters(CombinationFunction.Parameters):
@tc.typecheck
def __init__(self,
default_variable=None,
scale: parameter_spec = 1.0,
offset: parameter_spec = 0.0,
scale: tc.optional(parameter_spec) = None,
offset: tc.optional(parameter_spec) = None,
params=None,
owner=None,
prefs: is_pref_set = None):
prefs: tc.optional(is_pref_set) = None):

super().__init__(
default_variable=default_variable,
Expand Down Expand Up @@ -416,12 +416,12 @@ class Parameters(CombinationFunction.Parameters):
@tc.typecheck
def __init__(self,
default_variable=None,
scale: parameter_spec = 1.0,
offset: parameter_spec = 0.0,
scale: tc.optional(parameter_spec) = None,
offset: tc.optional(parameter_spec) = None,
arrangement:tc.optional(tc.any(int, tuple, list))=None,
params=None,
owner=None,
prefs: is_pref_set = None):
prefs: tc.optional(is_pref_set) = None):

super().__init__(
default_variable=default_variable,
Expand Down Expand Up @@ -716,12 +716,12 @@ def __init__(self,
weights=None,
exponents=None,
default_variable=None,
operation: tc.enum(SUM, PRODUCT) = SUM,
scale: parameter_spec = 1.0,
offset: parameter_spec = 0.0,
operation: tc.optional(tc.enum(SUM, PRODUCT)) = None,
scale: tc.optional(parameter_spec) = None,
offset: tc.optional(parameter_spec) = None,
params=None,
owner=None,
prefs: is_pref_set = None):
prefs: tc.optional(is_pref_set) = None):

super().__init__(
default_variable=default_variable,
Expand Down Expand Up @@ -1151,12 +1151,12 @@ def __init__(self,
# exponents: tc.optional(parameter_spec)=None,
weights=None,
exponents=None,
operation: tc.enum(SUM, PRODUCT) = SUM,
operation: tc.optional(tc.enum(SUM, PRODUCT)) = None,
scale=None,
offset=None,
params=None,
owner=None,
prefs: is_pref_set = None):
prefs: tc.optional(is_pref_set) = None):

super().__init__(
default_variable=default_variable,
Expand Down Expand Up @@ -1675,12 +1675,12 @@ def __init__(self,
# exponents:tc.optional(parameter_spec)=None,
weights=None,
exponents=None,
operation: tc.enum(SUM, PRODUCT) = SUM,
operation: tc.optional(tc.enum(SUM, PRODUCT)) = None,
scale=None,
offset=None,
params=None,
owner=None,
prefs: is_pref_set = None):
prefs: tc.optional(is_pref_set) = None):

super().__init__(
default_variable=default_variable,
Expand Down Expand Up @@ -1930,10 +1930,10 @@ class Parameters(CombinationFunction.Parameters):
@tc.typecheck
def __init__(self,
default_variable=None,
gamma: tc.optional(float) = 1.0,
gamma: tc.optional(tc.optional(float)) = None,
params=None,
owner=None,
prefs: is_pref_set = None):
prefs: tc.optional(is_pref_set) = None):

super().__init__(
default_variable=default_variable,
Expand All @@ -1943,8 +1943,6 @@ def __init__(self,
prefs=prefs,
)

self.gamma = gamma

def _validate_variable(self, variable, context=None):
"""
Insure that all items of variable are numeric
Expand Down
48 changes: 24 additions & 24 deletions psyneulink/core/components/functions/distributionfunctions.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,12 +154,12 @@ class Parameters(DistributionFunction.Parameters):
@tc.typecheck
def __init__(self,
default_variable=None,
mean=0.0,
standard_deviation=1.0,
mean=None,
standard_deviation=None,
params=None,
owner=None,
seed=None,
prefs: is_pref_set = None):
prefs: tc.optional(is_pref_set) = None):

if seed is None:
seed = get_global_seed()
Expand All @@ -179,7 +179,7 @@ def __init__(self,
def _validate_params(self, request_set, target_set=None, context=None):
super()._validate_params(request_set=request_set, target_set=target_set, context=context)

if STANDARD_DEVIATION in target_set:
if STANDARD_DEVIATION in target_set and target_set[STANDARD_DEVIATION] is not None:
if target_set[STANDARD_DEVIATION] < 0.0:
raise FunctionError("The standard_deviation parameter ({}) of {} must be greater than zero.".
format(target_set[STANDARD_DEVIATION], self.name))
Expand Down Expand Up @@ -340,12 +340,12 @@ class Parameters(DistributionFunction.Parameters):
@tc.typecheck
def __init__(self,
default_variable=None,
mean=0.0,
standard_deviation=1.0,
mean=None,
standard_deviation=None,
params=None,
owner=None,
seed=None,
prefs: is_pref_set = None):
prefs: tc.optional(is_pref_set) = None):

if seed is None:
seed = get_global_seed()
Expand Down Expand Up @@ -470,11 +470,11 @@ class Parameters(DistributionFunction.Parameters):
@tc.typecheck
def __init__(self,
default_variable=None,
beta=1.0,
beta=None,
seed=None,
params=None,
owner=None,
prefs: is_pref_set = None):
prefs: tc.optional(is_pref_set) = None):

if seed is None:
seed = get_global_seed()
Expand Down Expand Up @@ -599,12 +599,12 @@ class Parameters(DistributionFunction.Parameters):
@tc.typecheck
def __init__(self,
default_variable=None,
low=0.0,
high=1.0,
low=None,
high=None,
seed=None,
params=None,
owner=None,
prefs: is_pref_set = None):
prefs: tc.optional(is_pref_set) = None):

if seed is None:
seed = get_global_seed()
Expand Down Expand Up @@ -738,12 +738,12 @@ class Parameters(DistributionFunction.Parameters):
@tc.typecheck
def __init__(self,
default_variable=None,
scale=1.0,
dist_shape=1.0,
scale=None,
dist_shape=None,
seed=None,
params=None,
owner=None,
prefs: is_pref_set = None):
prefs: tc.optional(is_pref_set) = None):
if seed is None:
seed = get_global_seed()

Expand Down Expand Up @@ -874,12 +874,12 @@ class Parameters(DistributionFunction.Parameters):
@tc.typecheck
def __init__(self,
default_variable=None,
scale=1.0,
mean=1.0,
scale=None,
mean=None,
seed=None,
params=None,
owner=None,
prefs: is_pref_set = None):
prefs: tc.optional(is_pref_set) = None):
if seed is None:
seed = get_global_seed()

Expand Down Expand Up @@ -1113,14 +1113,14 @@ class Parameters(DistributionFunction.Parameters):
@tc.typecheck
def __init__(self,
default_variable=None,
drift_rate: parameter_spec = 1.0,
starting_point: parameter_spec = 0.0,
threshold: parameter_spec = 1.0,
noise: parameter_spec = 0.5,
t0: parameter_spec = .200,
drift_rate: tc.optional(parameter_spec) = None,
starting_point: tc.optional(parameter_spec) = None,
threshold: tc.optional(parameter_spec) = None,
noise: tc.optional(parameter_spec) = None,
t0: tc.optional(parameter_spec) = None,
params=None,
owner=None,
prefs: is_pref_set = None,
prefs: tc.optional(is_pref_set) = None,
shenhav_et_al_compat_mode=False):

self._shenhav_et_al_compat_mode = shenhav_et_al_compat_mode
Expand Down
2 changes: 1 addition & 1 deletion psyneulink/core/components/functions/function.py
Original file line number Diff line number Diff line change
Expand Up @@ -822,7 +822,7 @@ def __init__(self,
pertincacity=Manner.CONTRARIAN,
params=None,
owner=None,
prefs: is_pref_set = None):
prefs: tc.optional(is_pref_set) = None):

super().__init__(
default_variable=default_variable,
Expand Down
2 changes: 1 addition & 1 deletion psyneulink/core/components/functions/interfacefunctions.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ def __init__(self,
corresponding_input_port=None,
params=None,
owner=None,
prefs: is_pref_set = None):
prefs: tc.optional(is_pref_set) = None):

super().__init__(
default_variable=default_variable,
Expand Down
32 changes: 16 additions & 16 deletions psyneulink/core/components/functions/learningfunctions.py
Original file line number Diff line number Diff line change
Expand Up @@ -435,14 +435,14 @@ class Parameters(LearningFunction.Parameters):

def __init__(self,
default_variable=None,
mu_0=0,
sigma_0=1,
gamma_shape_0=1,
gamma_size_0=1,
mu_0=None,
sigma_0=None,
gamma_shape_0=None,
gamma_size_0=None,
params=None,
owner=None,
seed=None,
prefs: is_pref_set = None):
prefs: tc.optional(is_pref_set) = None):

self.user_specified_default_variable = default_variable

Expand Down Expand Up @@ -761,12 +761,12 @@ def _validate_distance_function(self, distance_function):

def __init__(self,
default_variable=None,
# learning_rate: tc.optional(parameter_spec) = None,
# learning_rate: tc.optional(tc.optional(parameter_spec)) = None,
learning_rate=None,
distance_function:tc.any(tc.enum(GAUSSIAN, LINEAR, EXPONENTIAL), is_function_type)=GAUSSIAN,
distance_function:tc.any(tc.enum(GAUSSIAN, LINEAR, EXPONENTIAL), is_function_type)=None,
params=None,
owner=None,
prefs: is_pref_set = None):
prefs: tc.optional(is_pref_set) = None):

super().__init__(
default_variable=default_variable,
Expand Down Expand Up @@ -1028,7 +1028,7 @@ def __init__(self,
learning_rate=None,
params=None,
owner=None,
prefs: is_pref_set = None):
prefs: tc.optional(is_pref_set) = None):

super().__init__(
default_variable=default_variable,
Expand Down Expand Up @@ -1253,11 +1253,11 @@ class Parameters(LearningFunction.Parameters):

def __init__(self,
default_variable=None,
# learning_rate: tc.optional(parameter_spec) = None,
# learning_rate: tc.optional(tc.optional(parameter_spec)) = None,
learning_rate=None,
params=None,
owner=None,
prefs: is_pref_set = None):
prefs: tc.optional(is_pref_set) = None):

super().__init__(
default_variable=default_variable,
Expand Down Expand Up @@ -1548,11 +1548,11 @@ class Parameters(LearningFunction.Parameters):

def __init__(self,
default_variable=None,
# learning_rate: tc.optional(parameter_spec) = None,
# learning_rate: tc.optional(tc.optional(parameter_spec)) = None,
learning_rate=None,
params=None,
owner=None,
prefs: is_pref_set = None):
prefs: tc.optional(is_pref_set) = None):

super().__init__(
default_variable=default_variable,
Expand Down Expand Up @@ -1900,13 +1900,13 @@ class Parameters(LearningFunction.Parameters):
@tc.typecheck
def __init__(self,
default_variable=None,
activation_derivative_fct: tc.optional(tc.any(types.FunctionType, types.MethodType)) = None,
# learning_rate: tc.optional(parameter_spec) = None,
activation_derivative_fct: tc.optional(tc.optional(tc.any(types.FunctionType, types.MethodType))) = None,
# learning_rate: tc.optional(tc.optional(parameter_spec)) = None,
learning_rate=None,
loss_function=None,
params=None,
owner=None,
prefs: is_pref_set = None):
prefs: tc.optional(is_pref_set) = None):

error_matrix = np.zeros((len(default_variable[LEARNING_ACTIVATION_OUTPUT]),
len(default_variable[LEARNING_ERROR_OUTPUT])))
Expand Down
Loading

0 comments on commit 0374dca

Please sign in to comment.