-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathmodel4.py
More file actions
49 lines (36 loc) · 1.76 KB
/
model4.py
File metadata and controls
49 lines (36 loc) · 1.76 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
from keras import regularizers
from keras.models import Model
from keras.layers import Dense, Activation, Dropout, Input, Multiply, LSTM, Conv1D
from keras.layers import Bidirectional, BatchNormalization, Concatenate, GlobalMaxPooling1D
from keras.optimizers import Adam
def malware_detection_model_4():
X_input = Input(shape=(1000, 102))
# Normalization 1
X = BatchNormalization()(X_input)
# Gated CNN 1
a_sig = Conv1D(filters=128, kernel_size=(2), strides=1, kernel_regularizer=regularizers.l2(0.0005),
activation="sigmoid", padding="same")(X)
a_relu = Conv1D(filters=128, kernel_size=(2), strides=1, kernel_regularizer=regularizers.l2(0.0005),
activation="relu", padding="same")(X)
a = Multiply()([a_sig, a_relu])
# Gated CNN 2
b_sig = Conv1D(filters=128, kernel_size=(3), strides=1, kernel_regularizer=regularizers.l2(0.0005),
activation="sigmoid", padding="same")(X)
b_relu = Conv1D(filters=128, kernel_size=(3), strides=1, kernel_regularizer=regularizers.l2(0.0005),
activation="relu", padding="same")(X)
b = Multiply()([b_sig, b_relu])
# Concatenate
X = Concatenate()([a, b])
# Normalization 2
X = BatchNormalization()(X)
# BidirectionalLSTM
X = Bidirectional(LSTM(100, return_sequences=True))(X)
X = GlobalMaxPooling1D()(X)
X = Dense(64, activation='relu', kernel_regularizer=regularizers.l2(0.0005))(X)
X = Dropout(0.5)(X)
X = Dense(2, kernel_regularizer=regularizers.l2(0.0005))(X)
X = Activation("softmax")(X)
model = Model(inputs=X_input, outputs=X)
opt = Adam(learning_rate=0.001, decay=1e-8)
model.compile(loss="sparse_categorical_crossentropy", optimizer=opt, metrics=["accuracy"])
return model