Skip to content
Snippets Groups Projects
Commit cb4b04fc authored by Nikolai.Hartmann's avatar Nikolai.Hartmann
Browse files

Rename project class to ClassificationProject

parent ce7b4c4f
No related branches found
No related tags found
No related merge requests found
from toolkit import ClassificationProject
from compare import overlay_ROC, overlay_loss
...@@ -7,7 +7,7 @@ import numpy as np ...@@ -7,7 +7,7 @@ import numpy as np
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
from sklearn.metrics import roc_curve, auc from sklearn.metrics import roc_curve, auc
from toolkit import KerasROOTClassification from toolkit import ClassificationProject
""" """
A few functions to compare different setups A few functions to compare different setups
...@@ -76,14 +76,14 @@ if __name__ == "__main__": ...@@ -76,14 +76,14 @@ if __name__ == "__main__":
identifiers = ["DatasetNumber", "EventNumber"], identifiers = ["DatasetNumber", "EventNumber"],
step_bkg = 100) step_bkg = 100)
example1 = KerasROOTClassification("test_sgd", example1 = ClassificationProject("test_sgd",
optimizer="SGD", optimizer="SGD",
optimizer_opts=dict(lr=1000., decay=1e-6, momentum=0.9), optimizer_opts=dict(lr=1000., decay=1e-6, momentum=0.9),
**data_options) **data_options)
example2 = KerasROOTClassification("test_adam", example2 = ClassificationProject("test_adam",
optimizer="Adam", optimizer="Adam",
**data_options) **data_options)
if not os.path.exists("outputs/test_sgd/scores_test.h5"): if not os.path.exists("outputs/test_sgd/scores_test.h5"):
......
...@@ -41,7 +41,7 @@ K.set_session(session) ...@@ -41,7 +41,7 @@ K.set_session(session)
import ROOT import ROOT
class KerasROOTClassification(object): class ClassificationProject(object):
"""Simple framework to load data from ROOT TTrees and train Keras """Simple framework to load data from ROOT TTrees and train Keras
neural networks for classification according to some global settings. neural networks for classification according to some global settings.
...@@ -751,9 +751,9 @@ def create_setter(dataset_name): ...@@ -751,9 +751,9 @@ def create_setter(dataset_name):
return setx return setx
# define getters and setters for all datasets # define getters and setters for all datasets
for dataset_name in KerasROOTClassification.dataset_names: for dataset_name in ClassificationProject.dataset_names:
setattr(KerasROOTClassification, dataset_name, property(create_getter(dataset_name), setattr(ClassificationProject, dataset_name, property(create_getter(dataset_name),
create_setter(dataset_name))) create_setter(dataset_name)))
if __name__ == "__main__": if __name__ == "__main__":
...@@ -764,21 +764,21 @@ if __name__ == "__main__": ...@@ -764,21 +764,21 @@ if __name__ == "__main__":
filename = "/project/etp4/nhartmann/trees/allTrees_m1.8_NoSys.root" filename = "/project/etp4/nhartmann/trees/allTrees_m1.8_NoSys.root"
c = KerasROOTClassification("test4", c = ClassificationProject("test4",
signal_trees = [(filename, "GG_oneStep_1705_1105_505_NoSys")], signal_trees = [(filename, "GG_oneStep_1705_1105_505_NoSys")],
bkg_trees = [(filename, "ttbar_NoSys"), bkg_trees = [(filename, "ttbar_NoSys"),
(filename, "wjets_Sherpa221_NoSys") (filename, "wjets_Sherpa221_NoSys")
], ],
optimizer="Adam", optimizer="Adam",
#optimizer="SGD", #optimizer="SGD",
#optimizer_opts=dict(lr=100., decay=1e-6, momentum=0.9), #optimizer_opts=dict(lr=100., decay=1e-6, momentum=0.9),
earlystopping_opts=dict(monitor='val_loss', earlystopping_opts=dict(monitor='val_loss',
min_delta=0, patience=2, verbose=0, mode='auto'), min_delta=0, patience=2, verbose=0, mode='auto'),
selection="lep1Pt<5000", # cut out a few very weird outliers selection="lep1Pt<5000", # cut out a few very weird outliers
branches = ["met", "mt"], branches = ["met", "mt"],
weight_expr = "eventWeight*genWeight", weight_expr = "eventWeight*genWeight",
identifiers = ["DatasetNumber", "EventNumber"], identifiers = ["DatasetNumber", "EventNumber"],
step_bkg = 100) step_bkg = 100)
np.random.seed(42) np.random.seed(42)
c.train(epochs=20) c.train(epochs=20)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment