diff --git a/toolkit.py b/toolkit.py index c311170e587cd44b99b97a3f53c1079dbaff1ae3..0f804b774afe24279e337a27fb0f803d8653395a 100755 --- a/toolkit.py +++ b/toolkit.py @@ -38,6 +38,7 @@ from keras.models import Sequential, Model, model_from_json from keras.layers import Dense, Dropout, Input, Masking, GRU, LSTM, concatenate, SimpleRNN from keras.callbacks import History, EarlyStopping, CSVLogger, ModelCheckpoint, TensorBoard from keras.optimizers import SGD +from keras.activations import relu import keras.initializers import keras.optimizers from keras.utils.vis_utils import model_to_dot @@ -161,6 +162,8 @@ class ClassificationProject(object): :param activation_function_output: activation function in the output layer + :param leaky_relu_alpha: set this to a non-zero value to use the LeakyReLU variant with a slope in the negative part + :param out_dir: base directory in which the project directories should be stored :param scaler_type: sklearn scaler class name to transform the data before training (options: "StandardScaler", "RobustScaler") @@ -269,6 +272,7 @@ class ClassificationProject(object): kfold_splits=None, kfold_index=0, activation_function='relu', + leaky_relu_alpha=None, activation_function_output='sigmoid', scaler_type="WeightedRobustScaler", step_signal=2, @@ -344,6 +348,9 @@ class ClassificationProject(object): self.kfold_splits = kfold_splits self.kfold_index = kfold_index self.activation_function = activation_function + self.leaky_relu_alpha = leaky_relu_alpha + if self.activation_function == "relu" and self.leaky_relu_alpha: + self.activation_function = lambda x : relu(x, alpha=self.leaky_relu_alpha) self.activation_function_output = activation_function_output self.scaler_type = scaler_type self.step_signal = step_signal @@ -805,6 +812,7 @@ class ClassificationProject(object): if self._model is None: + # input input_layer = Input((len(self.fields),))