add new network architecture - server label moves to the middle

This commit is contained in:
2017-07-29 19:42:36 +02:00
parent 8cd1023165
commit 820a5d1a4d
5 changed files with 151 additions and 40 deletions

View File

@@ -32,7 +32,10 @@ def get_models_by_params(params: dict):
predict_model = networks.get_model(dropout, flow_features, domain_features, window_size, domain_length,
filter_main, kernel_main, dense_dim, embedding_model)
return embedding_model, predict_model
new_model = networks.get_new_model(dropout, flow_features, domain_features, window_size, domain_length,
filter_main, kernel_main, dense_dim, embedding_model)
return embedding_model, predict_model, new_model
def get_metrics():

View File

@@ -30,8 +30,8 @@ def get_embedding(vocab_size, embedding_size, input_length,
y = Embedding(input_dim=vocab_size, output_dim=embedding_size)(y)
y = Conv1D(filters, kernel_size, activation='relu')(y)
y = GlobalMaxPooling1D()(y)
y = Dense(hidden_dims)(y)
y = Dropout(drop_out)(y)
y = Dense(hidden_dims)(y)
y = Activation('relu')(y)
return Model(x, y)
@@ -56,3 +56,27 @@ def get_model(cnnDropout, flow_features, domain_features, window_size, domain_le
y2 = Dense(2, activation='softmax', name="server")(y)
return Model(inputs=[ipt_domains, ipt_flows], outputs=(y1, y2))
def get_new_model(dropout, flow_features, domain_features, window_size, domain_length, cnn_dims, kernel_size,
dense_dim, cnn):
ipt_domains = Input(shape=(window_size, domain_length), name="ipt_domains")
ipt_flows = Input(shape=(window_size, flow_features), name="ipt_flows")
encoded = TimeDistributed(cnn)(ipt_domains)
y2 = Dense(2, activation="softmax", name="server")(encoded)
merged = keras.layers.concatenate([encoded, ipt_flows, y2], -1)
y = Conv1D(cnn_dims,
kernel_size,
activation='relu',
input_shape=(window_size, domain_features + flow_features))(merged)
# remove temporal dimension by global max pooling
y = GlobalMaxPooling1D()(y)
y = Dropout(dropout)(y)
y = Dense(dense_dim, activation='relu')(y)
y1 = Dense(2, activation='softmax', name="client")(y)
model = Model(inputs=[ipt_domains, ipt_flows], outputs=(y1, y2))
return model

View File

@@ -42,3 +42,27 @@ def get_model(cnnDropout, flow_features, domain_features, window_size, domain_le
y2 = Dense(2, activation='softmax', name="server")(y)
return Model(inputs=[ipt_domains, ipt_flows], outputs=(y1, y2))
def get_new_model(dropout, flow_features, domain_features, window_size, domain_length, cnn_dims, kernel_size,
dense_dim, cnn):
ipt_domains = Input(shape=(window_size, domain_length), name="ipt_domains")
ipt_flows = Input(shape=(window_size, flow_features), name="ipt_flows")
encoded = TimeDistributed(cnn)(ipt_domains)
y2 = Dense(2, activation="softmax", name="server")(encoded)
merged = keras.layers.concatenate([encoded, ipt_flows, y2], -1)
y = Conv1D(cnn_dims,
kernel_size,
activation='relu',
input_shape=(window_size, domain_features + flow_features))(merged)
# remove temporal dimension by global max pooling
y = GlobalMaxPooling1D()(y)
y = Dropout(dropout)(y)
y = Dense(dense_dim, activation='relu')(y)
y1 = Dense(2, activation='softmax', name="client")(y)
model = Model(inputs=[ipt_domains, ipt_flows], outputs=(y1, y2))
return model