-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathDNNModel.py
More file actions
37 lines (32 loc) · 1.19 KB
/
DNNModel.py
File metadata and controls
37 lines (32 loc) · 1.19 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
import random
import numpy as np
import tensorflow as tf
from keras.layers import Dense, Dropout
from keras.models import Sequential
from keras.regularizers import l1, l2
from keras.optimizers import Adam
def set_seeds(seed = 100):
random.seed(seed)
np.random.seed(seed)
tf.random.set_seed(seed)
def cw(df):
c0, c1 = np.bincount(df["dir"])
w0 = (1/c0) * (len(df)) / 2
w1 = (1/c1) * (len(df)) / 2
return {0:w0, 1:w1}
optimizer = Adam(learning_rate = 0.0001)
def create_model(hl = 2, hu = 100, dropout = False, rate = 0.3, regularize = False,
reg = l1(0.0005), optimizer = optimizer, input_dim = None):
if not regularize:
reg = None
model = Sequential()
model.add(Dense(hu, input_dim = input_dim, activity_regularizer = reg ,activation = "relu"))
if dropout:
model.add(Dropout(rate, seed = 100))
for layer in range(hl):
model.add(Dense(hu, activation = "relu", activity_regularizer = reg))
if dropout:
model.add(Dropout(rate, seed = 100))
model.add(Dense(1, activation = "sigmoid"))
model.compile(loss = "binary_crossentropy", optimizer = optimizer, metrics = ["accuracy"])
return model