-
Notifications
You must be signed in to change notification settings - Fork 0
/
mlp.py
executable file
·49 lines (36 loc) · 1.51 KB
/
mlp.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
#!/usr/bin/env python2
import numpy as np
from keras.layers import Dense, Dropout, Activation
from keras.models import Sequential
from keras.optimizers import SGD
from keras.utils import np_utils, generic_utils
def _to_categorical(y, nb_classes):
return np_utils.to_categorical(y, nb_classes)
class MLP(Sequential):
def __init__(self):
super(MLP, self).__init__()
self.nb_classes = 0
def fit(self, X, y):
input_dim = X.shape[1]
self.nb_classes = len(np.unique(y))
assert self.nb_classes == np.max(y) + 1
y = _to_categorical(y, self.nb_classes)
# Dense(64) is a fully-connected layer with 64 hidden units.
# in the first layer, you must specify the expected input data shape:
# here, 20-dimensional vectors.
self.add(Dense(64, input_dim=input_dim, init="uniform"))
self.add(Activation("tanh"))
self.add(Dropout(0.5))
self.add(Dense(64, init="uniform"))
self.add(Activation("tanh"))
self.add(Dropout(0.5))
self.add(Dense(self.nb_classes, init="uniform"))
self.add(Activation("softmax"))
sgd = SGD(lr=0.1, decay=1e-6, momentum=0.9, nesterov=True)
self.compile(loss="categorical_crossentropy", optimizer=sgd,
metrics=["accuracy"])
super(MLP, self).fit(X, y,
nb_epoch=20,
batch_size=16)
def predict_proba(self, X):
return super(MLP, self).predict_proba(X, batch_size=16)