Skip to content
Snippets Groups Projects
Commit c4c88456 authored by Nikolai's avatar Nikolai
Browse files

single neuron output works with multiple inputs

parent 0024d69c
No related branches found
No related tags found
No related merge requests found
...@@ -126,11 +126,15 @@ if args.mode.startswith("mean"): ...@@ -126,11 +126,15 @@ if args.mode.startswith("mean"):
logscale=args.log, only_pixels=(not args.contour) logscale=args.log, only_pixels=(not args.contour)
) )
else: else:
if hasattr(c, "get_input_list"):
transform_function = lambda inp : c.get_input_list(c.scaler.transform(inp))
else:
transform_function = c.scaler.transform(inp)
plot_NN_vs_var_2D_all( plot_NN_vs_var_2D_all(
args.output_filename, args.output_filename,
means=means, means=means,
model=c.model, model=c.model,
transform_function=c.scaler.transform, transform_function=transform_function,
varx_index=varx_index, varx_index=varx_index,
vary_index=vary_index, vary_index=vary_index,
xmin=varx_range[0], xmax=varx_range[1], nbinsx=varx_range[2], xmin=varx_range[0], xmax=varx_range[1], nbinsx=varx_range[2],
......
...@@ -15,14 +15,21 @@ logger.addHandler(logging.NullHandler()) ...@@ -15,14 +15,21 @@ logger.addHandler(logging.NullHandler())
def get_single_neuron_function(model, layer, neuron, scaler=None, input_transform=None): def get_single_neuron_function(model, layer, neuron, scaler=None, input_transform=None):
f = K.function([model.input]+[K.learning_phase()], [model.layers[layer].output[:,neuron]]) inp = model.input
if not isinstance(inp, list):
inp = [inp]
f = K.function(inp+[K.learning_phase()], [model.layers[layer].output[:,neuron]])
def eval_single_neuron(x): def eval_single_neuron(x):
x_eval = x
if scaler is not None: if scaler is not None:
x_eval = scaler.transform(x) x_eval = scaler.transform(x)
if input_transform is not None:
x_eval = input_transform(x_eval)
else: else:
x_eval = x x_eval = [x_eval]
return f([x_eval])[0] return f(x_eval)[0]
return eval_single_neuron return eval_single_neuron
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment