add new network architecture - server label moves to the middle
This commit is contained in:
@@ -32,7 +32,10 @@ def get_models_by_params(params: dict):
|
||||
predict_model = networks.get_model(dropout, flow_features, domain_features, window_size, domain_length,
|
||||
filter_main, kernel_main, dense_dim, embedding_model)
|
||||
|
||||
return embedding_model, predict_model
|
||||
new_model = networks.get_new_model(dropout, flow_features, domain_features, window_size, domain_length,
|
||||
filter_main, kernel_main, dense_dim, embedding_model)
|
||||
|
||||
return embedding_model, predict_model, new_model
|
||||
|
||||
|
||||
def get_metrics():
|
||||
|
@@ -30,8 +30,8 @@ def get_embedding(vocab_size, embedding_size, input_length,
|
||||
y = Embedding(input_dim=vocab_size, output_dim=embedding_size)(y)
|
||||
y = Conv1D(filters, kernel_size, activation='relu')(y)
|
||||
y = GlobalMaxPooling1D()(y)
|
||||
y = Dense(hidden_dims)(y)
|
||||
y = Dropout(drop_out)(y)
|
||||
y = Dense(hidden_dims)(y)
|
||||
y = Activation('relu')(y)
|
||||
return Model(x, y)
|
||||
|
||||
@@ -56,3 +56,27 @@ def get_model(cnnDropout, flow_features, domain_features, window_size, domain_le
|
||||
y2 = Dense(2, activation='softmax', name="server")(y)
|
||||
|
||||
return Model(inputs=[ipt_domains, ipt_flows], outputs=(y1, y2))
|
||||
|
||||
|
||||
def get_new_model(dropout, flow_features, domain_features, window_size, domain_length, cnn_dims, kernel_size,
|
||||
dense_dim, cnn):
|
||||
ipt_domains = Input(shape=(window_size, domain_length), name="ipt_domains")
|
||||
ipt_flows = Input(shape=(window_size, flow_features), name="ipt_flows")
|
||||
encoded = TimeDistributed(cnn)(ipt_domains)
|
||||
|
||||
y2 = Dense(2, activation="softmax", name="server")(encoded)
|
||||
merged = keras.layers.concatenate([encoded, ipt_flows, y2], -1)
|
||||
|
||||
y = Conv1D(cnn_dims,
|
||||
kernel_size,
|
||||
activation='relu',
|
||||
input_shape=(window_size, domain_features + flow_features))(merged)
|
||||
# remove temporal dimension by global max pooling
|
||||
y = GlobalMaxPooling1D()(y)
|
||||
y = Dropout(dropout)(y)
|
||||
y = Dense(dense_dim, activation='relu')(y)
|
||||
|
||||
y1 = Dense(2, activation='softmax', name="client")(y)
|
||||
model = Model(inputs=[ipt_domains, ipt_flows], outputs=(y1, y2))
|
||||
|
||||
return model
|
||||
|
@@ -42,3 +42,27 @@ def get_model(cnnDropout, flow_features, domain_features, window_size, domain_le
|
||||
y2 = Dense(2, activation='softmax', name="server")(y)
|
||||
|
||||
return Model(inputs=[ipt_domains, ipt_flows], outputs=(y1, y2))
|
||||
|
||||
|
||||
def get_new_model(dropout, flow_features, domain_features, window_size, domain_length, cnn_dims, kernel_size,
|
||||
dense_dim, cnn):
|
||||
ipt_domains = Input(shape=(window_size, domain_length), name="ipt_domains")
|
||||
ipt_flows = Input(shape=(window_size, flow_features), name="ipt_flows")
|
||||
encoded = TimeDistributed(cnn)(ipt_domains)
|
||||
|
||||
y2 = Dense(2, activation="softmax", name="server")(encoded)
|
||||
merged = keras.layers.concatenate([encoded, ipt_flows, y2], -1)
|
||||
|
||||
y = Conv1D(cnn_dims,
|
||||
kernel_size,
|
||||
activation='relu',
|
||||
input_shape=(window_size, domain_features + flow_features))(merged)
|
||||
# remove temporal dimension by global max pooling
|
||||
y = GlobalMaxPooling1D()(y)
|
||||
y = Dropout(dropout)(y)
|
||||
y = Dense(dense_dim, activation='relu')(y)
|
||||
|
||||
y1 = Dense(2, activation='softmax', name="client")(y)
|
||||
model = Model(inputs=[ipt_domains, ipt_flows], outputs=(y1, y2))
|
||||
|
||||
return model
|
||||
|
Reference in New Issue
Block a user