Skip to content
Snippets Groups Projects
Commit 4f5d19bd authored by Nikolai's avatar Nikolai
Browse files

Adding dropout option

parent 71e626f2
No related branches found
No related tags found
No related merge requests found
......@@ -19,7 +19,7 @@ from sklearn.externals import joblib
from sklearn.metrics import roc_curve, auc
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import Dense, Dropout
from keras.models import model_from_json
from keras.callbacks import History, EarlyStopping, CSVLogger
from keras.optimizers import SGD
......@@ -75,6 +75,8 @@ class ClassificationProject(object):
:param nodes: number of nodes in each layer
:param dropout: dropout fraction after each hidden layer. Set to None for no Dropout
:param batch_size: size of the training batches
:param validation_split: split off this fraction of training events for loss evaluation
......@@ -135,6 +137,7 @@ class ClassificationProject(object):
selection=None,
layers=3,
nodes=64,
dropout=None,
batch_size=128,
validation_split=0.33,
activation_function='relu',
......@@ -158,6 +161,7 @@ class ClassificationProject(object):
self.identifiers = identifiers
self.layers = layers
self.nodes = nodes
self.dropout = dropout
self.batch_size = batch_size
self.validation_split = validation_split
self.activation_function = activation_function
......@@ -443,6 +447,8 @@ class ClassificationProject(object):
# the other hidden layers
for layer_number in range(self.layers-1):
self._model.add(Dense(self.nodes, activation=self.activation_function))
if self.dropout is not None:
self._model.add(Dropout(rate=self.dropout))
# last layer is one neuron (binary classification)
self._model.add(Dense(1, activation=self.activation_function_output))
logger.info("Using {}(**{}) as Optimizer".format(self.optimizer, self.optimizer_opts))
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment