From 4f5d19bd56e073a68c4b26064394d489f8e912f1 Mon Sep 17 00:00:00 2001
From: Nikolai <osterei33@gmx.de>
Date: Wed, 9 May 2018 09:46:11 +0200
Subject: [PATCH] Adding dropout option

---
 toolkit.py | 8 +++++++-
 1 file changed, 7 insertions(+), 1 deletion(-)

diff --git a/toolkit.py b/toolkit.py
index 473a617..d430d14 100755
--- a/toolkit.py
+++ b/toolkit.py
@@ -19,7 +19,7 @@ from sklearn.externals import joblib
 from sklearn.metrics import roc_curve, auc
 
 from keras.models import Sequential
-from keras.layers import Dense
+from keras.layers import Dense, Dropout
 from keras.models import model_from_json
 from keras.callbacks import History, EarlyStopping, CSVLogger
 from keras.optimizers import SGD
@@ -75,6 +75,8 @@ class ClassificationProject(object):
 
     :param nodes: number of nodes in each layer
 
+    :param dropout: dropout fraction after each hidden layer. Set to None for no Dropout
+
     :param batch_size: size of the training batches
 
     :param validation_split: split off this fraction of training events for loss evaluation
@@ -135,6 +137,7 @@ class ClassificationProject(object):
                         selection=None,
                         layers=3,
                         nodes=64,
+                        dropout=None,
                         batch_size=128,
                         validation_split=0.33,
                         activation_function='relu',
@@ -158,6 +161,7 @@ class ClassificationProject(object):
         self.identifiers = identifiers
         self.layers = layers
         self.nodes = nodes
+        self.dropout = dropout
         self.batch_size = batch_size
         self.validation_split = validation_split
         self.activation_function = activation_function
@@ -443,6 +447,8 @@ class ClassificationProject(object):
             # the other hidden layers
             for layer_number in range(self.layers-1):
                 self._model.add(Dense(self.nodes, activation=self.activation_function))
+                if self.dropout is not None:
+                    self._model.add(Dropout(rate=self.dropout))
             # last layer is one neuron (binary classification)
             self._model.add(Dense(1, activation=self.activation_function_output))
             logger.info("Using {}(**{}) as Optimizer".format(self.optimizer, self.optimizer_opts))
-- 
GitLab