-
Notifications
You must be signed in to change notification settings - Fork 2
/
myAPI.py
66 lines (55 loc) · 2.09 KB
/
myAPI.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
import tensorflow as tf
import numpy as np
def weight_variable_2D(shape, wInit):
n1, n2 = shape
std = np.sqrt(2.0 / (n1 + n2))
if wInit == 'u':
initial = tf.random_uniform(shape, -np.sqrt(3)*std, np.sqrt(3)*std)
elif wInit == 'n':
initial = tf.random_normal(shape, mean=0.0, stddev=std)
return tf.Variable(initial)
def bias_variable(shape, bInit):
"""bias_variable generates a bias variable of a given shape."""
initial = tf.constant(float(bInit), shape=shape)
return tf.Variable(initial)
class MLP():
def __init__(
self, inNum, hiddenLayers, outNum,
actiFunc, outActiFunc,
wInit, bInit):
self.actiFunc = actiFunc
self.outActiFunc = outActiFunc
self.layers = [inNum] + hiddenLayers + [outNum]
self.W = []
self.b = []
for i in range(len(self.layers)-1):
self.W.append(weight_variable_2D([self.layers[i], self.layers[i+1]], wInit))
self.b.append(bias_variable([self.layers[i+1]], bInit))
def __call__(self, x, keepProb, useRes):
h = x
# non-output layers
for i in range(len(self.layers)-2):
h = tf.matmul(h, self.W[i]) + self.b[i]
if i == len(self.layers) - 2 - 1:
h = tf.cond(useRes,
lambda: h + tf.pad(x, [[0, 0], [0, self.layers[-2]-self.layers[0]]]),
lambda: h)
h = self.actiFunc(h)
h = tf.nn.dropout(h, keepProb)
# output layer
i = len(self.layers) - 2
h = self.outActiFunc(tf.matmul(h, self.W[i]) + self.b[i])
return h
def setActiFunc(funcName):
if funcName.startswith('sigm'):
if funcName == 'sigm':
return tf.nn.sigmoid
else:
try:
a = float(funcName[4:])
return lambda x: tf.nn.sigmoid(a*x)
except:
raise ValueError('Coef error for sigm: ' + funcName)
elif funcName == 'identity':
return tf.identity
raise ValueError('Unsupported activation function: ' + funcName)