Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

SHAP with sequences data. #1759

Open
ph0123 opened this issue Jan 19, 2021 · 1 comment
Open

SHAP with sequences data. #1759

ph0123 opened this issue Jan 19, 2021 · 1 comment
Labels
deep explainer Relating to DeepExplainer, tensorflow or pytorch

Comments

@ph0123
Copy link

ph0123 commented Jan 19, 2021

regressor = load_model(Model_name)

pred_x = regressor.predict_classes(X_Training)
random_ind = np.random.choice(X_Training.shape[0], 1000, replace=False)
data = X_Training[random_ind[0:500]]
print(np.shape(data))
#explainer = shap.DeepExplainer((regressor.layers[0].input, regressor.layers[-1].output),data)
explainer = shap.DeepExplainer(regressor,data)
test1 = X_Training[random_ind[800:1000]]
shap_val = explainer.shap_values(test1) #error here

`WARNING:tensorflow:Layers in a Sequential model should only have a single input tensor, but we receive a <class 'list'> input: [<tf.Tensor 'shap_rAnD:0' shape=(1000, 3, 11) dtype=float32>]
Consider rewriting this model with the Functional API.


AttributeError Traceback (most recent call last)
in
21 test1 = X_Training[random_ind[800:1000]]
22 print(np.shape(test1))
---> 23 shap_val = explainer.shap_values(test1)
24 shap_val = np.array(shap_val)
25 shap_val = np.reshape(shap_val,(int(shap_val.shape[1]),int(shap_val.shape[2]),int(shap_val.shape[3])))

~\Anaconda3\envs\tf-gpu\lib\site-packages\shap\explainers_deep_init_.py in shap_values(self, X, ranked_outputs, output_rank_order, check_additivity)
122 were chosen as "top".
123 """
--> 124 return self.explainer.shap_values(X, ranked_outputs, output_rank_order, check_additivity=check_additivity)

~\Anaconda3\envs\tf-gpu\lib\site-packages\shap\explainers_deep\deep_tf.py in shap_values(self, X, ranked_outputs, output_rank_order, check_additivity)
303 # run attribution computation graph
304 feature_ind = model_output_ranks[j,i]
--> 305 sample_phis = self.run(self.phi_symbolic(feature_ind), self.model_inputs, joint_input)
306
307 # assign the attributions to the right part of the output arrays

~\Anaconda3\envs\tf-gpu\lib\site-packages\shap\explainers_deep\deep_tf.py in run(self, out, model_inputs, X)
360
361 return final_out
--> 362 return self.execute_with_overridden_gradients(anon)
363
364 def custom_grad(self, op, *grads):

~\Anaconda3\envs\tf-gpu\lib\site-packages\shap\explainers_deep\deep_tf.py in execute_with_overridden_gradients(self, f)
396 # define the computation graph for the attribution values using a custom gradient-like computation
397 try:
--> 398 out = f()
399 finally:
400 # reinstate the backpropagatable check

~\Anaconda3\envs\tf-gpu\lib\site-packages\shap\explainers_deep\deep_tf.py in anon()
356 v = tf.constant(data, dtype=self.model_inputs[i].dtype)
357 inputs.append(v)
--> 358 final_out = out(inputs)
359 tf_execute.record_gradient = tf_backprop._record_gradient
360

~\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\eager\def_function.py in call(self, *args, **kwds)
778 else:
779 compiler = "nonXla"
--> 780 result = self._call(*args, **kwds)
781
782 new_tracing_count = self._get_tracing_count()

~\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\eager\def_function.py in _call(self, *args, **kwds)
821 # This is the first call of call, so we have to initialize.
822 initializers = []
--> 823 self._initialize(args, kwds, add_initializers_to=initializers)
824 finally:
825 # At this point we know that the initialization is complete (or less

~\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\eager\def_function.py in _initialize(self, args, kwds, add_initializers_to)
695 self._concrete_stateful_fn = (
696 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access
--> 697 *args, **kwds))
698
699 def invalid_creator_scope(*unused_args, **unused_kwds):

~\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\eager\function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)
2853 args, kwargs = None, None
2854 with self._lock:
-> 2855 graph_function, _, _ = self._maybe_define_function(args, kwargs)
2856 return graph_function
2857

~\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\eager\function.py in _maybe_define_function(self, args, kwargs)
3211
3212 self._function_cache.missed.add(call_context_key)
-> 3213 graph_function = self._create_graph_function(args, kwargs)
3214 self._function_cache.primary[cache_key] = graph_function
3215 return graph_function, args, kwargs

~\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\eager\function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
3073 arg_names=arg_names,
3074 override_flat_arg_shapes=override_flat_arg_shapes,
-> 3075 capture_by_value=self._capture_by_value),
3076 self._function_attributes,
3077 function_spec=self.function_spec,

~\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\framework\func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)
984 _, original_func = tf_decorator.unwrap(python_func)
985
--> 986 func_outputs = python_func(*func_args, **func_kwargs)
987
988 # invariant: func_outputs contains only Tensors, CompositeTensors,

~\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\eager\def_function.py in wrapped_fn(*args, **kwds)
598 # wrapped allows AutoGraph to swap in a converted function. We give
599 # the function a weak reference to itself to avoid a reference cycle.
--> 600 return weak_wrapped_fn().wrapped(*args, **kwds)
601 weak_wrapped_fn = weakref.ref(wrapped_fn)
602

~\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\framework\func_graph.py in wrapper(*args, **kwargs)
971 except Exception as e: # pylint:disable=broad-except
972 if hasattr(e, "ag_error_metadata"):
--> 973 raise e.ag_error_metadata.to_exception(e)
974 else:
975 raise

AttributeError: in user code:

C:\Users\cnp\Anaconda3\envs\tf-gpu\lib\site-packages\shap\explainers\_deep\deep_tf.py:240 grad_graph  *
    out = self.model(shap_rAnD)
C:\Users\cnp\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\keras\engine\base_layer.py:985 __call__  **
    outputs = call_fn(inputs, *args, **kwargs)
C:\Users\cnp\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\keras\engine\sequential.py:372 call
    return super(Sequential, self).call(inputs, training=training, mask=mask)
C:\Users\cnp\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\keras\engine\functional.py:386 call
    inputs, training=training, mask=mask)
C:\Users\cnp\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\keras\engine\functional.py:508 _run_internal_graph
    outputs = node.layer(*args, **kwargs)
C:\Users\cnp\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\keras\layers\wrappers.py:530 __call__
    return super(Bidirectional, self).__call__(inputs, **kwargs)
C:\Users\cnp\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\keras\engine\base_layer.py:985 __call__
    outputs = call_fn(inputs, *args, **kwargs)
C:\Users\cnp\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\keras\layers\wrappers.py:644 call
    initial_state=forward_state, **kwargs)
C:\Users\cnp\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\keras\layers\recurrent.py:663 __call__
    return super(RNN, self).__call__(inputs, **kwargs)
C:\Users\cnp\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\keras\engine\base_layer.py:985 __call__
    outputs = call_fn(inputs, *args, **kwargs)
C:\Users\cnp\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\keras\layers\recurrent_v2.py:1183 call
    runtime) = lstm_with_backend_selection(**normal_lstm_kwargs)
C:\Users\cnp\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\keras\layers\recurrent_v2.py:1559 lstm_with_backend_selection
    function.register(defun_gpu_lstm, **params)
C:\Users\cnp\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\eager\function.py:3241 register
    concrete_func.add_gradient_functions_to_graph()
C:\Users\cnp\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\eager\function.py:2063 add_gradient_functions_to_graph
    self._delayed_rewrite_functions.forward_backward())
C:\Users\cnp\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\eager\function.py:621 forward_backward
    forward, backward = self._construct_forward_backward(num_doutputs)
C:\Users\cnp\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\eager\function.py:669 _construct_forward_backward
    func_graph=backwards_graph)
C:\Users\cnp\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\framework\func_graph.py:986 func_graph_from_py_func
    func_outputs = python_func(*func_args, **func_kwargs)
C:\Users\cnp\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\eager\function.py:659 _backprop_function
    src_graph=self._func_graph)
C:\Users\cnp\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\ops\gradients_util.py:669 _GradientsHelper
    lambda: grad_fn(op, *out_grads))
C:\Users\cnp\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\ops\gradients_util.py:336 _MaybeCompile
    return grad_fn()  # Exit early
C:\Users\cnp\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\ops\gradients_util.py:669 <lambda>
    lambda: grad_fn(op, *out_grads))
C:\Users\cnp\Anaconda3\envs\tf-gpu\lib\site-packages\shap\explainers\_deep\deep_tf.py:368 custom_grad
    out = op_handlers[type_name](self, op, *grads) # we cut off the shap_ prefex before the lookup
C:\Users\cnp\Anaconda3\envs\tf-gpu\lib\site-packages\shap\explainers\_deep\deep_tf.py:657 handler
    return linearity_with_excluded_handler(input_inds, explainer, op, *grads)
C:\Users\cnp\Anaconda3\envs\tf-gpu\lib\site-packages\shap\explainers\_deep\deep_tf.py:664 linearity_with_excluded_handler
    assert not explainer._variable_inputs(op)[i], str(i) + "th input to " + op.name + " cannot vary!"
C:\Users\cnp\Anaconda3\envs\tf-gpu\lib\site-packages\shap\explainers\_deep\deep_tf.py:217 _variable_inputs
    out[i] = t.name in self.between_tensors

AttributeError: 'TFDeep' object has no attribute 'between_tensors'

`

@CloseChoice
Copy link
Collaborator

Potentially related to #3419

@CloseChoice CloseChoice added the deep explainer Relating to DeepExplainer, tensorflow or pytorch label Dec 8, 2023
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
deep explainer Relating to DeepExplainer, tensorflow or pytorch
Projects
None yet
Development

No branches or pull requests

2 participants