change model - add dense before server output in new model

add some new run scripts
This commit is contained in:
2017-08-05 09:33:07 +02:00
parent 6e7dc1297c
commit 5a02f582cd
6 changed files with 82 additions and 21 deletions

View File

@@ -68,10 +68,10 @@ def get_new_model(dropout, flow_features, domain_features, window_size, domain_l
ipt_domains = Input(shape=(window_size, domain_length), name="ipt_domains")
ipt_flows = Input(shape=(window_size, flow_features), name="ipt_flows")
encoded = TimeDistributed(cnn)(ipt_domains)
y2 = Dense(1, activation="sigmoid", name="server")(encoded)
merged = keras.layers.concatenate([encoded, ipt_flows, y2], -1)
merged = keras.layers.concatenate([encoded, ipt_flows], -1)
y = Dense(dense_dim, activation="relu")(merged)
y2 = Dense(1, activation="sigmoid", name="server")(y)
# CNN processing a small slides of flow windows
y = Conv1D(cnn_dims,
kernel_size,
activation='relu',

View File

@@ -51,14 +51,16 @@ def get_new_model(dropout, flow_features, domain_features, window_size, domain_l
ipt_domains = Input(shape=(window_size, domain_length), name="ipt_domains")
ipt_flows = Input(shape=(window_size, flow_features), name="ipt_flows")
encoded = TimeDistributed(cnn)(ipt_domains)
y2 = Dense(1, activation="sigmoid", name="server")(encoded)
merged = keras.layers.concatenate([encoded, ipt_flows, y2], -1)
y = Conv1D(cnn_dims,
kernel_size,
activation='relu',
input_shape=(window_size, domain_features + flow_features))(merged)
merged = keras.layers.concatenate([encoded, ipt_flows], -1)
y = Dense(dense_dim, activation="relu")(merged)
y2 = Dense(1, activation="sigmoid", name="server")(y)
# CNN processing a small slides of flow windows
y = Conv1D(filters=cnn_dims, kernel_size=kernel_size, activation='relu', padding="same",
input_shape=(window_size, domain_features + flow_features))(y)
y = MaxPool1D(pool_size=3, strides=1)(y)
y = Conv1D(filters=cnn_dims, kernel_size=kernel_size, activation='relu', padding="same")(y)
y = MaxPool1D(pool_size=3, strides=1)(y)
y = Conv1D(filters=cnn_dims, kernel_size=kernel_size, activation='relu', padding="same")(y)
# remove temporal dimension by global max pooling
y = GlobalMaxPooling1D()(y)
y = Dropout(dropout)(y)