-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathmlp_layer.py
More file actions
71 lines (62 loc) · 1.94 KB
/
mlp_layer.py
File metadata and controls
71 lines (62 loc) · 1.94 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
import numpy
import math
def sigmoid(x):
return 1 / (1 + math.exp(-x))
class Layer(object):
def __init__(self, num_neurons, weight_function=lambda: numpy.random.uniform(0.0, 0.2), activation_fn=lambda x: sigmoid(x), derivative_fn=lambda x: sigmoid(x) * (1 - sigmoid(x))):
"""
do the initial setup of the layer
"""
if num_neurons <= 0:
raise ValueError
self.num_neurons = num_neurons
self.weight_function = weight_function
self.next = None
self.prev = None
self.weights = None
self.weight_changes = None
self.difs = None
self.has_bias = False
self.values = []
self.activation_fn = activation_fn
self.derivative_fn = derivative_fn
## set the next layer
def next_layer(self, layer):
"""
set the following layer
"""
self.next = layer
## set the previous layer
def prev_layer(self, layer):
"""
set the previous layer
"""
self.prev = layer
## initialize value vector
def init_values(self):
"""
initialize values
"""
self.values = [0 for _ in range(self.num_neurons)]
if self.has_bias:
self.values[-1] = 1.
## initialize weight matrix between this layer and his follower
def init_weights(self):
"""
initialize weights
"""
if self.next is not None:
self.weights = []
self.weight_changes = []
for i in range(self.num_neurons):
self.weights.append([self.weight_function()
for _ in range(self.next.num_neurons)])
self.weight_changes.append([0
for _ in range(self.next.num_neurons)])
## used to set the bias if desired
def set_bias(self):
"""
enable bias on this layer
"""
self.num_neurons += 1
self.has_bias = True