-
Notifications
You must be signed in to change notification settings - Fork 78
/
submit.m
63 lines (60 loc) · 1.6 KB
/
submit.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
function submit()
addpath('./lib');
conf.assignmentSlug = 'neural-network-learning';
conf.itemName = 'Neural Networks Learning';
conf.partArrays = { ...
{ ...
'1', ...
{ 'nnCostFunction.m' }, ...
'Feedforward and Cost Function', ...
}, ...
{ ...
'2', ...
{ 'nnCostFunction.m' }, ...
'Regularized Cost Function', ...
}, ...
{ ...
'3', ...
{ 'sigmoidGradient.m' }, ...
'Sigmoid Gradient', ...
}, ...
{ ...
'4', ...
{ 'nnCostFunction.m' }, ...
'Neural Network Gradient (Backpropagation)', ...
}, ...
{ ...
'5', ...
{ 'nnCostFunction.m' }, ...
'Regularized Gradient', ...
}, ...
};
conf.output = @output;
submitWithConfiguration(conf);
end
function out = output(partId, auxstring)
% Random Test Cases
X = reshape(3 * sin(1:1:30), 3, 10);
Xm = reshape(sin(1:32), 16, 2) / 5;
ym = 1 + mod(1:16,4)';
t1 = sin(reshape(1:2:24, 4, 3));
t2 = cos(reshape(1:2:40, 4, 5));
t = [t1(:) ; t2(:)];
if partId == '1'
[J] = nnCostFunction(t, 2, 4, 4, Xm, ym, 0);
out = sprintf('%0.5f ', J);
elseif partId == '2'
[J] = nnCostFunction(t, 2, 4, 4, Xm, ym, 1.5);
out = sprintf('%0.5f ', J);
elseif partId == '3'
out = sprintf('%0.5f ', sigmoidGradient(X));
elseif partId == '4'
[J, grad] = nnCostFunction(t, 2, 4, 4, Xm, ym, 0);
out = sprintf('%0.5f ', J);
out = [out sprintf('%0.5f ', grad)];
elseif partId == '5'
[J, grad] = nnCostFunction(t, 2, 4, 4, Xm, ym, 1.5);
out = sprintf('%0.5f ', J);
out = [out sprintf('%0.5f ', grad)];
end
end