-
Notifications
You must be signed in to change notification settings - Fork 1
/
Gradient_Descent.py
48 lines (39 loc) · 1.48 KB
/
Gradient_Descent.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
import numpy as np
import matplotlib.pyplot as plt
def draw(x1,x2):
ln = plt.plot(x1,x2,'-')
plt.pause(0.0001)
ln[0].remove()
def sigmoid(score):
return 1/(1+np.exp(-score))
def calc_error(linear_parameters,points,y):
m = points.shape[0]
p = sigmoid(points*linear_parameters)
cross_entropy = -(np.log(p).T*y + np.log(1-p).T*(1-y))/m
return cross_entropy
def gradient_descent(line_parameters,points,y,alpha):
m = points.shape[0]
for i in range(2000):
p = sigmoid(points*line_parameters)
gradient = points.T*(p-y)*(alpha/m)
line_parameters = line_parameters-gradient
w1 = line_parameters.item(0)
w2 = line_parameters.item(1)
b = line_parameters.item(2)
x1 = np.array([points[:,0].min(),points[:,0].max()])
x2 = -b/w2+(x1*(-w1/w2))
draw(x1,x2)
pts=100
np.random.seed(0)
bias = np.ones(pts)
top_region = np.array([np.random.normal(10,2,pts),np.random.normal(12,2,pts),bias]).T
bottom_region = np.array([np.random.normal(5,2,pts),np.random.normal(6,2,pts),bias]).T
all_points = np.vstack((top_region,bottom_region))
linear_parameters = np.matrix(np.zeros(3)).T
'''x1 = np.array([bottom_region[:,0].min(),top_region[:,0].max()])
x2 = -b/w1+x1*(-w1/w2)'''
y = np.array([np.zeros(pts),np.ones(pts)]).reshape(pts*2,1)
plt.scatter(top_region[:,0],top_region[:,1],c='r')
plt.scatter(bottom_region[:,0],bottom_region[:,1],c='b')
gradient_descent(linear_parameters,all_points,y,0.06)
plt.show()