From 49ce290098a62bd0b66ad47c7fab1c308264b8e7 Mon Sep 17 00:00:00 2001
From: Nikolai Hartmann <Nikolai.Hartmann@physik.uni-muenchen.de>
Date: Thu, 2 Aug 2018 11:06:17 +0200
Subject: [PATCH] starting tfhelpers

---
 plotting.py  | 14 ++++----------
 tfhelpers.py | 14 ++++++++++++++
 2 files changed, 18 insertions(+), 10 deletions(-)
 create mode 100644 tfhelpers.py

diff --git a/plotting.py b/plotting.py
index 4340f7a..7341807 100644
--- a/plotting.py
+++ b/plotting.py
@@ -86,7 +86,7 @@ def plot_NN_vs_var_2D(plotname, means,
             print("Setting zmin to {}".format(zmin))
         lvls = np.logspace(math.log10(zmin), math.log10(zmax), ncontours)
         if only_pixels:
-            pcm = ax.pcolormesh(sequence1, sequence2, scores, norm=matplotlib.colors.LogNorm(vmin=zmin, vmax=zmax), cmap=cmap)
+            pcm = ax.pcolormesh(sequence1, sequence2, scores, norm=matplotlib.colors.LogNorm(vmin=zmin, vmax=zmax), cmap=cmap, linewidth=0, rasterized=True)
         else:
             pcm = ax.contourf(sequence1, sequence2, scores, levels=lvls, norm=matplotlib.colors.LogNorm(vmin=zmin, vmax=zmax), cmap=cmap)
         if black_contourlines:
@@ -95,7 +95,7 @@ def plot_NN_vs_var_2D(plotname, means,
         cbar = fig.colorbar(pcm, ax=ax, extend='max', ticks=lvls, format=l_f)
     else:
         if only_pixels:
-            pcm = ax.pcolormesh(sequence1, sequence2, scores, norm=matplotlib.colors.Normalize(vmin=zmin, vmax=zmax), cmap=cmap)
+            pcm = ax.pcolormesh(sequence1, sequence2, scores, norm=matplotlib.colors.Normalize(vmin=zmin, vmax=zmax), cmap=cmap, linewidth=0, rasterized=True)
         else:
             pcm = ax.contourf(sequence1, sequence2, scores, ncontours, norm=matplotlib.colors.Normalize(vmin=zmin, vmax=zmax), cmap=cmap)
         if black_contourlines:
@@ -248,16 +248,10 @@ if __name__ == "__main__":
                           var1_label="met [GeV]", var2_label="mt [GeV]")
 
     import keras.backend as K
-    # single neuron
-    def eval_single_neuron(x):
-        layer = 3
-        neuron = 0
-        x_eval = c.scaler.transform(x)
-        f = K.function([c.model.input]+[K.learning_phase()], [c.model.layers[layer].output])
-        return f([x_eval])[0][:,neuron]
+    from .tfhelpers import get_single_neuron_function
 
     plot_NN_vs_var_2D("mt_vs_met_crosscheck.pdf", means=mean_signal,
-                      scorefun=eval_single_neuron,
+                      scorefun=get_single_neuron_function(K, c.model, layer=3, neuron=0, scaler=c.scaler),
                       var1_index=c.branches.index("met"), var1_range=(0, 1000, 10),
                       var2_index=c.branches.index("mt"), var2_range=(0, 500, 10),
                       var1_label="met [GeV]", var2_label="mt [GeV]")
diff --git a/tfhelpers.py b/tfhelpers.py
new file mode 100644
index 0000000..3bf9a44
--- /dev/null
+++ b/tfhelpers.py
@@ -0,0 +1,14 @@
+"Helper functions using keras or tensorflow"
+
+def get_single_neuron_function(K, model, layer, neuron, scaler=None):
+
+    f = K.function([model.input]+[K.learning_phase()], [model.layers[layer].output[:,neuron]])
+
+    def eval_single_neuron(x):
+        if scaler is not None:
+            x_eval = scaler.transform(x)
+        else:
+            x_eval = x
+        return f([x_eval])[0]
+
+    return eval_single_neuron
-- 
GitLab