-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathlocalFunctions.py
More file actions
52 lines (33 loc) · 1.01 KB
/
localFunctions.py
File metadata and controls
52 lines (33 loc) · 1.01 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
import tensorflow as tf
import uuid
def fill_with_predefined(src):
def initializer(shape, dtype=None):
return tf.Variable(src, dtype=dtype, name=uuid.uuid4().hex)
return initializer
def activate(x, activationtype):
if activationtype is None:
return x
if 'relu' in activationtype:
return tf.keras.activations.relu(x)
if 'softmax' in activationtype:
return tf.keras.activations.softmax(x)
if 'sigmoid' in activationtype:
return tf.keras.activations.sigmoid(x)
if 'swish' in activationtype:
return tf.keras.activations.sigmoid(x) * x
if "elu" in activationtype:
return tf.keras.activations.elu(x)
if "selu" in activationtype:
return tf.keras.activations.selu(x)
@tf.custom_gradient
def to_bit(x):
y = tf.sign(tf.keras.activations.relu(x))
def grad(dy):
return dy
return y, grad
@tf.custom_gradient
def to_sign(x):
y = (-1) ** to_bit(x)
def grad(dy):
return -dy
return y, grad