Skip to content

Commit

Permalink
Merge pull request #93 from kundajelab/targetlayerwarningfix
Browse files Browse the repository at this point in the history
Target layer warning fix
  • Loading branch information
AvantiShri authored Jan 14, 2020
2 parents 0888551 + 58945b1 commit 667f00b
Show file tree
Hide file tree
Showing 5 changed files with 28 additions and 54 deletions.
2 changes: 1 addition & 1 deletion deeplift.egg-info/PKG-INFO
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: deeplift
Version: 0.6.9.2
Version: 0.6.9.3
Summary: DeepLIFT (Deep Learning Important FeaTures)
Home-page: https://github.com/kundajelab/deeplift
License: UNKNOWN
Expand Down
2 changes: 1 addition & 1 deletion deeplift/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = '0.6.9.2'
__version__ = '0.6.9.3'
12 changes: 9 additions & 3 deletions deeplift/conversion/kerasapi_conversion.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,9 @@ def conv2d_conversion(config,
name=("preact_" if len(converted_activation) > 0
else "")+name,
kernel=config[KerasKeys.weights][0],
bias=config[KerasKeys.weights][1],
bias=(config[KerasKeys.weights][1] if
len(config[KerasKeys.weights]) > 1
else np.zeros(config[KerasKeys.weights][0].shape[-1])),
strides=config[KerasKeys.strides],
padding=config[KerasKeys.padding].upper(),
data_format=config[KerasKeys.data_format],
Expand Down Expand Up @@ -150,7 +152,9 @@ def conv1d_conversion(config,
name=("preact_" if len(converted_activation) > 0
else "")+name,
kernel=config[KerasKeys.weights][0],
bias=config[KerasKeys.weights][1],
bias=(config[KerasKeys.weights][1] if
len(config[KerasKeys.weights]) > 1
else np.zeros(config[KerasKeys.weights][0].shape[-1])),
stride=config[KerasKeys.strides],
padding=config[KerasKeys.padding].upper(),
conv_mxts_mode=conv_mxts_mode)]
Expand All @@ -177,7 +181,9 @@ def dense_conversion(config,
name=("preact_" if len(converted_activation) > 0
else "")+name,
kernel=config[KerasKeys.weights][0],
bias=config[KerasKeys.weights][1],
bias=(config[KerasKeys.weights][1] if
len(config[KerasKeys.weights]) > 1
else np.zeros(config[KerasKeys.weights][0].shape[-1])),
verbose=verbose,
dense_mxts_mode=dense_mxts_mode)]
to_return.extend(converted_activation)
Expand Down
64 changes: 16 additions & 48 deletions deeplift/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,6 +123,7 @@ def get_target_contribs_of_input_with_filter_ref_func(
**kwargs)

def _set_scoring_mode_for_target_layer(self, target_layer):
print("TARGET LAYER SET TO "+str(target_layer.get_name()))
if (deeplift.util.is_type(target_layer,
layers.Activation)):
raise RuntimeError("You set the target layer to an"
Expand All @@ -131,55 +132,22 @@ def _set_scoring_mode_for_target_layer(self, target_layer):
+" to set the target layer to the layer *before*"
+" the activation layer instead? (recommended for "
+" classification)")
if (len(target_layer.get_output_layers())==0):
scoring_mode=ScoringMode.OneAndZeros
else:
assert len(target_layer.get_output_layers())==1,\
"at most one output was expected for target layer "\
+str(target_layer.get_name())+" but got: "+\
str(target_layer.get_output_layers())
final_activation_layer = target_layer.get_output_layers()[0]
if (deeplift.util.is_type(final_activation_layer,
layers.Activation)==False):
raise RuntimeError("There is a layer after your target"
+" layer but it is not an activation layer"
+", which seems odd...if doing regression, make"
+" sure to set the target layer to the last layer")
deeplift.util.assert_is_type(final_activation_layer,
layers.Activation,
"final_activation_layer")
final_activation_type = type(final_activation_layer).__name__

if (final_activation_type == "Sigmoid"):
scoring_mode=ScoringMode.OneAndZeros
elif (final_activation_type == "Softmax"):
#new_W, new_b =\
# deeplift.util.get_mean_normalised_softmax_weights(
# target_layer.W, target_layer.b)
#The weights need to be mean normalised before they are
#passed in because build_fwd_pass_vars() has already
#been called before this function is called,
#because get_output_layers() (used in this function)
#is updated during the build_fwd_pass_vars()
#call - that is why I can't simply mean-normalise
#the weights right here :-( (It is a pain and a
#recipe for bugs to rebuild the forward pass
#vars after they have already been built - in
#particular for a model that branches because where
#the branches unify you need really want them to be
#using the same symbolic variables - no use having
#needlessly complicated/redundant graphs and if a node
#is common to two outputs, so should its symbolic vars
#TODO: I should put in a 'reset_fwd_pass' function and use
#it to invalidate the _built_fwd_pass_vars cache and recompile
#if (np.allclose(target_layer.W, new_W)==False):
# print("Consider mean-normalising softmax layer")
#assert np.allclose(target_layer.b, new_b),\
# "Please mean-normalise weights and biases of softmax layer"
scoring_mode=ScoringMode.OneAndZeros
if (len(target_layer.get_output_layers())>0):
if (len(target_layer.get_output_layers())>1):
print("WARNING: the target layer"
+str(target_layer.get_name())
+" has multiple output layers"
+str(target_layer.get_output_layers()))
else:
final_activation_layer = target_layer.get_output_layers()[0]
if (deeplift.util.is_type(final_activation_layer,
layers.Activation)==False):
print("\n\nWARNING!!! There is a layer after your target"
+" layer but it is not an activation layer"
+", which is unusual; double check you have set"
+" the target layer correctly.\n\n")
scoring_mode=ScoringMode.OneAndZeros
else:
raise RuntimeError("Unsupported final_activation_type: "
+final_activation_type)
target_layer.set_scoring_mode(scoring_mode)

def save_to_yaml_only(self, file_name):
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
Implements the methods in "Learning Important Features Through Propagating Activation Differences" by Shrikumar, Greenside & Kundaje, as well as other commonly-used methods such as gradients, guided backprop and integrated gradients. See https://github.com/kundajelab/deeplift for documentation and FAQ.
""",
url='https://github.com/kundajelab/deeplift',
version='0.6.9.2',
version='0.6.9.3',
packages=['deeplift',
'deeplift.layers', 'deeplift.visualization',
'deeplift.conversion'],
Expand Down

0 comments on commit 667f00b

Please sign in to comment.