Skip to content

Commit

Permalink
remove dead code
Browse files Browse the repository at this point in the history
  • Loading branch information
luster committed Apr 30, 2016
1 parent 93f3b72 commit cfa11a2
Showing 1 changed file with 0 additions and 26 deletions.
26 changes: 0 additions & 26 deletions thesis/build_networks.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,32 +138,6 @@ def loss_func(self, lambduh=3.0):
loss = (loss.mean() + lambduh/self.mean_C * regularization_term).mean()
return loss

# def train_fn(self, training_data, training_labels, updates='adadelta'):
# self.training_labels_shared.set_value(training_labels.reshape(training_labels.shape[0], training_labels.shape[1], 1), borrow=True)
# self.training_data_shared.set_value(np.asarray(training_data, dtype=dtype), borrow=True)
# self.normlayer.set_normalisation(training_data)

# loss = self.loss_func()

# indx = theano.shared(0)
# update_args = {
# 'adadelta': (lasagne.updates.adadelta, {'learning_rate': 0.01, 'rho': 0.4, 'epsilon': 1e-6,}),
# 'adam': (lasagne.updates.adam, {},),
# }[updates]
# update_func, update_params = update_args[0], update_args[1]

# params = lasagne.layers.get_all_params(self.network, trainable=True)
# updates = update_func(loss, params, **update_params)
# updates[indx] = indx + 1
# train_fn = theano.function([], loss, updates=updates,
# givens={
# self.input_var: self.training_data_shared[indx, :, :, :, :],
# self.soft_output_var: self.training_labels_shared[indx, :, :],
# },
# allow_input_downcast=True,
# )
# return indx, train_fn

def normalize_batches(self, training_data):
self.normlayer.set_normalisation(training_data)

Expand Down

0 comments on commit cfa11a2

Please sign in to comment.