-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathnet.py
More file actions
107 lines (81 loc) · 3.24 KB
/
net.py
File metadata and controls
107 lines (81 loc) · 3.24 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
import numpy as np
class InputLayer:
def __init__(self, neuroNum):
self.neuroNum = neuroNum
self.data = np.zeros((1, self.neuroNum))
class NeuroLayer:
def __init__(self, neuroNum, preLayer, bias):
self.neuroNum = neuroNum
self.preLayer = preLayer
self.data = np.zeros((1, self.neuroNum))
r = np.sqrt(6 / (self.neuroNum + self.preLayer.neuroNum))
self.weight = np.random.uniform(-r, r, (self.neuroNum, self.preLayer.neuroNum))
self.bias = np.zeros((1, self.neuroNum))
self.bias.fill(bias)
self.nextLayer = None
self.preLayer.nextLayer = self
self.diff = np.zeros((1, self.preLayer.neuroNum))
self.diffWeight = np.zeros((self.neuroNum, self.preLayer.neuroNum))
self.diffBias = np.zeros((1, self.neuroNum))
def forward(self):
temp = np.dot(self.preLayer.data, self.weight.T)
self.data = temp + self.bias
def backward(self):
self.diffWeight += np.dot(self.nextLayer.diff.T, self.preLayer.data)
self.diffBias += self.nextLayer.diff * 1
self.diff = np.dot(self.nextLayer.diff, self.weight)
def update(self, lr):
self.bias -= self.diffBias * lr
self.weight -= self.diffWeight * lr
self.diffBias = np.zeros((1, self.neuroNum))
self.diffWeight = np.zeros((self.neuroNum, self.preLayer.neuroNum))
class Sigmoid:
def __init__(self, preLayer):
self.preLayer = preLayer
self.neuroNum = self.preLayer.neuroNum
self.data = np.zeros((1, self.preLayer.neuroNum))
self.nextLayer = None
self.preLayer.nextLayer = self
self.diff = np.zeros((1, self.preLayer.neuroNum))
def activate(self, x):
return np.ones(self.neuroNum) / (np.ones(self.neuroNum) + np.exp(-x))
def derivation(self, y):
return y * (np.ones(self.neuroNum) - y)
def forward(self):
self.data = self.activate(self.preLayer.data)
def backward(self):
self.diff = self.nextLayer.diff * self.derivation(self.data)
def update(self, lr):
pass
class PRelu:
def __init__(self, preLayer):
self.preLayer = preLayer
self.neuroNum = self.preLayer.neuroNum
self.data = np.zeros((1, self.preLayer.neuroNum))
self.nextLayer = None
self.preLayer.nextLayer = self
self.diff = np.zeros((1, self.preLayer.neuroNum))
def activate(self, x):
return np.where(x < 0, x * 0.25, x)
def derivation(self, y):
return np.where(y < 0, 0.25, 1)
def forward(self):
self.data = self.activate(self.preLayer.data)
def backward(self):
self.diff = self.nextLayer.diff * self.derivation(self.preLayer.data)
def update(self, lr):
pass
class ErrorLayer:
def __init__(self, preLayer):
self.preLayer = preLayer
self.neuroNum = self.preLayer.neuroNum
self.data = 0.0
self.target = np.zeros((1, self.neuroNum))
self.diff = np.zeros((1, self.preLayer.neuroNum))
self.preLayer.nextLayer = self
def forward(self):
self.data += np.power(self.preLayer.data - self.target, 2) * 0.5
def backward(self):
self.diff = self.preLayer.data - self.target
def update(self, lr):
self.data = 0.0