Commit 0ac29b2d authored by mjboos's avatar mjboos

bla

parent 83861e9d
......@@ -313,12 +313,12 @@ def simple_huge_1_layer_net(trainable=False, prune=True):
'compilation_args' : {'optimizer_func' : optimizers.Adam, 'optimizer_args' : {'lr' : 0.0005, 'clipnorm' : 1., 'clipvalue':1., 'beta_2':0.99}, 'loss':{'main_output': 'binary_crossentropy'}, 'loss_weights' : [1.]}}
return model_params
def weird_multiclass(trainable=False, prune=True):
model_func = partial(models.RNN_conc_multiclass, rnn_func=keras.layers.CuDNNGRU, no_rnn_layers=2, hidden_rnn=96, hidden_dense=None)
def weird_multiclass(trainable=False, prune=True, multiclass=7):
model_func = partial(models.RNN_conc_multiclass, rnn_func=keras.layers.CuDNNGRU, no_rnn_layers=1, hidden_rnn=128, hidden_dense=None, n_out=multiclass)
model_params = {
'max_features' : 500000, 'model_function' : model_func, 'maxlen' : 500,
'embedding_dim' : 300, 'trainable' : trainable, 'prune' : prune,
'compilation_args' : {'optimizer_func' : optimizers.Adam, 'optimizer_args' : {'lr' : 0.001, 'clipnorm' : 1., 'clipvalue':1., 'beta_2':0.99}, 'loss':{'main_output': 'sparse_categorical_crossentropy'}, 'loss_weights' : [1.]}}
'compilation_args' : {'optimizer_func' : optimizers.Adam, 'optimizer_args' : {'lr' : 0.001, 'clipnorm' : 1., 'clipvalue':1., 'beta_2':0.99}, 'loss':{'main_output': 'categorical_crossentropy'}, 'loss_weights' : [1.]}}
return model_params
def capsule_net(trainable=False, prune=True):
......
......@@ -57,6 +57,9 @@ def preds_to_norm_rank(predictions, cols=True):
which_cols = np.array([i for i,col in enumerate(all_cols) if col in cols])
return np.concatenate([norm_rank(preds)[:,None] if i in which_cols else preds[:,None] for i, preds in enumerate(predictions.T)], axis=-1)
def col_rank_features(X):
return np.concatenate([norm_rank(x_col)[:,None] for x_col in X.T], axis=-1)
def norm_rank(arr):
from sklearn.preprocessing import minmax_scale
return minmax_scale(rank(arr))
......
......@@ -1096,7 +1096,7 @@ def RNN_conc_multiclass(x, no_rnn_layers=2, hidden_rnn=48, hidden_dense=None, rn
if hidden_dense is not None:
x = Dense(int(hidden_dense), activation='relu')(x)
x = Dropout(dropout)(x)
x = Dense(1, activation="softmax", name='main_output')(x)
x = Dense(n_out, activation="softmax", name='main_output')(x)
return x, None
def RNN_conc(x, no_rnn_layers=2, hidden_rnn=48, hidden_dense=None, rnn_func=None, dropout=0.5,n_out=6):
......
This diff is collapsed.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment