-
Notifications
You must be signed in to change notification settings - Fork 0
/
activation.go
74 lines (58 loc) · 1.34 KB
/
activation.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
package whale
import "github.com/hidetatz/whale/tensor"
type Activation interface {
Activate(x *Variable) (*Variable, error)
}
// Sigmoid implements sigmoid function.
type Sigmoid struct{}
// NewSigmoid initializes sigmoid activation.
func NewSigmoid() *Sigmoid {
return &Sigmoid{}
}
func (s *Sigmoid) Activate(x *Variable) (*Variable, error) {
t1, err := Neg(x)
if err != nil {
return nil, err
}
t2, err := Exp(t1)
if err != nil {
return nil, err
}
t3, err := Add(NewVar(tensor.Scalar(1)), t2)
if err != nil {
return nil, err
}
y, err := Div(NewVar(tensor.Scalar(1)), t3)
if err != nil {
return nil, err
}
return y, nil
}
// SoftMax implements softmax for multi dimensional tensor.
type SoftMax struct {
axis []int
}
// NewSoftMaxWithAxis initializes SoftMax activation with axis specified.
// This should be used when you want to customize the axis to apply the softmax.
func NewSoftMaxWithAxis(axis ...int) *SoftMax {
return &SoftMax{axis: axis}
}
// NewSoftMax initializes SoftMax activation.
func NewSoftMax() *SoftMax {
return &SoftMax{axis: []int{1}}
}
func (s *SoftMax) Activate(x *Variable) (*Variable, error) {
y, err := Exp(x)
if err != nil {
return nil, err
}
sum, err := Sum(y, true, s.axis...)
if err != nil {
return nil, err
}
d, err := Div(y, sum)
if err != nil {
return nil, err
}
return d, nil
}