Skip to content
Snippets Groups Projects
Commit d6b7647e authored by Thomas Weber's avatar Thomas Weber
Browse files

Optimize SGD parameters, stop generating plots

parent f3caa1b9
No related branches found
No related tags found
No related merge requests found
Source diff could not be displayed: it is too large. Options to address this: view the blob.
......@@ -91,15 +91,23 @@ def print_networks(networks):
def main():
"""Evolve a network."""
generations = 10 # Number of times to evole the population.
population = 20 # Number of networks in each generation.
generations = 7 # Number of times to evolve the population.
population = 5 # Number of networks in each generation.
nn_param_choices = {
'nb_neurons': [64, 128, 256, 512, 768, 1024],
'nb_layers': [1, 2, 3, 4],
'nb_neurons': [8, 16, 32, 64, 128, 256, 512, 768, 1024],
'nb_layers': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10],
'activation': ['relu', 'elu', 'tanh', 'sigmoid'],
'optimizer': ['rmsprop', 'adam', 'sgd', 'adagrad',
'adadelta', 'adamax', 'nadam'],
#'optimizer': ['rmsprop', 'adam', 'sgd', 'adagrad',
# 'adadelta', 'adamax', 'nadam'],
#'optimizer_opts': {'lr': [0.1, 0.5, 1.0, 10.0, 100.0],
# 'decay': [1e-1, 1e-2, 1e-3, 1e-4, 1e-5, 1e-6, 1e-7],
# 'momentum': [0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7,
# 0.8, 0.9, 1.0]},
'lr': [0.1, 0.5, 1.0, 10.0, 100.0],
'decay': [1e-1, 1e-2, 1e-3, 1e-4, 1e-5, 1e-6, 1e-7],
'momentum': [0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0]
}
logging.info("***Evolving %d generations with population %d***" %
......
......@@ -20,6 +20,7 @@ class Network(object):
nb_layers (list): [1, 2, 3, 4]
activation (list): ['relu', 'elu']
optimizer (list): ['rmsprop', 'adam']
optimizer_opts (dict(list)): {'lr': [0.5, ...], 'decay', ...}
"""
self.accuracy = 0.
self.nn_param_choices = nn_param_choices
......
......@@ -12,21 +12,29 @@ def init_model(network):
nb_layers = network['nb_layers']
nb_neurons = network['nb_neurons']
activation = network['activation']
optimizer = network['optimizer']
# optimizer = network['optimizer']
lr = network['lr']
decay = network['decay']
momentum = network['momentum']
filename = "/project/etp4/nhartmann/trees/allTrees_m1.8_NoSys.root"
c = KerasROOTClassification("",
signal_trees = [(filename, "GG_oneStep_1705_1105_505_NoSys")],
signal_trees = [(filename, "GG_oneStep_1545_1265_985_NoSys")],
bkg_trees = [(filename, "ttbar_NoSys"),
(filename, "wjets_Sherpa221_NoSys")
(filename, "wjets_Sherpa221_NoSys"),
(filename, "zjets_Sherpa221_NoSys"),
(filename, "diboson_Sherpa221_NoSys"),
(filename, "ttv_NoSys"),
(filename, "singletop_NoSys")
],
dumping_enabled=False,
optimizer=optimizer,
optimizer="SGD",
layers=nb_layers,
nodes=nb_neurons,
activation_function=activation,
#optimizer_opts=dict(lr=100., decay=1e-6, momentum=0.9),
optimizer_opts=dict(lr=lr, decay=decay,
momentum=momentum),
earlystopping_opts=dict(monitor='val_loss',
min_delta=0, patience=2, verbose=0, mode='auto'),
# optimizer="Adam",
......@@ -40,7 +48,7 @@ def init_model(network):
def train_and_score(network):
model = init_model(network)
model.train()
model.train(epochs=20)
score = model.score
......
......@@ -401,9 +401,10 @@ class KerasROOTClassification(object):
def train(self, epochs=10):
self.load()
for branch_index, branch in enumerate(self.branches):
self.plot_input(branch_index)
if self.dumping_enabled:
for branch_index, branch in enumerate(self.branches):
self.plot_input(branch_index)
self.total_epochs = self._read_info("epochs", 0)
......@@ -435,9 +436,9 @@ class KerasROOTClassification(object):
self.total_epochs += epochs
self._write_info("epochs", self.total_epochs)
logger.info("Create/Update predictions for ROC curve")
self.pred_test = self.model.predict(self.x_test)
self.pred_train = self.model.predict(self.x_train)
#logger.info("Create/Update predictions for ROC curve")
#self.pred_test = self.model.predict(self.x_test)
#self.pred_train = self.model.predict(self.x_train)
logger.info("Get test loss and metrics of the model")
self.score = self.model.evaluate(self.x_test, self.y_test, verbose=0, sample_weight=None)
......@@ -595,11 +596,12 @@ def create_setter(dataset_name):
setattr(self, "_"+dataset_name, value)
return setx
'''
# define getters and setters for all datasets
for dataset_name in KerasROOTClassification.dataset_names:
setattr(KerasROOTClassification, dataset_name, property(create_getter(dataset_name),
create_setter(dataset_name)))
'''
if __name__ == "__main__":
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment