-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathpython_nn.py
More file actions
111 lines (76 loc) · 3.01 KB
/
python_nn.py
File metadata and controls
111 lines (76 loc) · 3.01 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
############ Naural Network ################
import numpy as np
import matplotlib.pyplot as plt
class dlnet:
def __init__(self, x, y):
self.X=x
self.Y=y
self.Yh=np.zeros((1,self.Y.shape[1]))
self.L=2
self.dims = [9, 15, 1]
self.param = {}
self.ch = {}
self.grad = {}
self.loss = []
self.lr=0.003
self.sam = self.Y.shape[1]
def nInit(self):
np.random.seed(1)
self.param['W1'] = np.random.randn(self.dims[1], self.dims[0]) / np.sqrt(self.dims[0])
self.param['b1'] = np.zeros((self.dims[1], 1))
self.param['W2'] = np.random.randn(self.dims[2], self.dims[1]) / np.sqrt(self.dims[1])
self.param['b2'] = np.zeros((self.dims[2], 1))
return
def Sigmoid(Z):
return 1/(1+np.exp(-Z))
def Relu(Z):
return np.maximum(0,Z)
def forward(self):
Z1 = self.param['W1'].dot(self.X) + self.param['b1']
A1 = Relu(Z1)
self.ch['Z1'],self.ch['A1'] = Z1,A1
Z2 = self.param['W2'].dot(A1) + self.param['b2']
A2 = Sigmoid(Z2)
self.ch['Z2'],self.ch['A2'] = Z2,A2
self.Yh = A2
loss = self.nloss(A2)
return self.Yh, loss
squared_errors = (self.Yh - self.Y) ** 2
self.Loss= np.sum(squared_errors)
def nloss(self,Yh):
loss = (1./self.sam) * (-np.dot(self.Y,np.log(Yh).T) - np.dot(1-self.Y, np.log(1-Yh).T))
return loss
def dRelu(x):
x[x<=0] = 0
x[x>0] = 1
return x
def dSigmoid(Z):
s = 1/(1+np.exp(-Z))
dZ = s * (1-s)
return dZ
def backward(self):
dLoss_Yh = - (np.divide(self.Y, self.Yh ) - np.divide(1 - self.Y, 1 - self.Yh))
dLoss_Z2 = dLoss_Yh * dSigmoid(self.ch['Z2'])
dLoss_A1 = np.dot(self.param["W2"].T,dLoss_Z2)
dLoss_W2 = 1./self.ch['A1'].shape[1] * np.dot(dLoss_Z2,self.ch['A1'].T)
dLoss_b2 = 1./self.ch['A1'].shape[1] * np.dot(dLoss_Z2, np.ones([dLoss_Z2.shape[1],1]))
dLoss_Z1 = dLoss_A1 * dRelu(self.ch['Z1'])
dLoss_A0 = np.dot(self.param["W1"].T,dLoss_Z1)
dLoss_W1 = 1./self.X.shape[1] * np.dot(dLoss_Z1,self.X.T)
dLoss_b1 = 1./self.X.shape[1] * np.dot(dLoss_Z1, np.ones([dLoss_Z1.shape[1],1]))
self.param["W1"] = self.param["W1"] - self.lr * dLoss_W1
self.param["b1"] = self.param["b1"] - self.lr * dLoss_b1
self.param["W2"] = self.param["W2"] - self.lr * dLoss_W2
self.param["b2"] = self.param["b2"] - self.lr * dLoss_b2
def gd(self,X, Y, iter = 3000):
np.random.seed(1)
self.nInit()
for i in range(0, iter):
Yh, loss=self.forward()
self.backward()
if i % 500 == 0:
print ("Cost after iteration %i: %f" %(i, loss))
self.loss.append(loss)
return
nn = dlnet(x,y)
nn.gd(x, y, iter = 15000)