You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
`WARNING:tensorflow:Layers in a Sequential model should only have a single input tensor, but we receive a <class 'list'> input: [<tf.Tensor 'shap_rAnD:0' shape=(1000, 3, 11) dtype=float32>]
Consider rewriting this model with the Functional API.
~\Anaconda3\envs\tf-gpu\lib\site-packages\shap\explainers_deep_init_.py in shap_values(self, X, ranked_outputs, output_rank_order, check_additivity)
122 were chosen as "top".
123 """
--> 124 return self.explainer.shap_values(X, ranked_outputs, output_rank_order, check_additivity=check_additivity)
~\Anaconda3\envs\tf-gpu\lib\site-packages\shap\explainers_deep\deep_tf.py in shap_values(self, X, ranked_outputs, output_rank_order, check_additivity)
303 # run attribution computation graph
304 feature_ind = model_output_ranks[j,i]
--> 305 sample_phis = self.run(self.phi_symbolic(feature_ind), self.model_inputs, joint_input)
306
307 # assign the attributions to the right part of the output arrays
~\Anaconda3\envs\tf-gpu\lib\site-packages\shap\explainers_deep\deep_tf.py in execute_with_overridden_gradients(self, f)
396 # define the computation graph for the attribution values using a custom gradient-like computation
397 try:
--> 398 out = f()
399 finally:
400 # reinstate the backpropagatable check
~\Anaconda3\envs\tf-gpu\lib\site-packages\shap\explainers_deep\deep_tf.py in anon()
356 v = tf.constant(data, dtype=self.model_inputs[i].dtype)
357 inputs.append(v)
--> 358 final_out = out(inputs)
359 tf_execute.record_gradient = tf_backprop._record_gradient
360
~\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\eager\def_function.py in _call(self, *args, **kwds)
821 # This is the first call of call, so we have to initialize.
822 initializers = []
--> 823 self._initialize(args, kwds, add_initializers_to=initializers)
824 finally:
825 # At this point we know that the initialization is complete (or less
~\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\eager\def_function.py in wrapped_fn(*args, **kwds)
598 # wrapped allows AutoGraph to swap in a converted function. We give
599 # the function a weak reference to itself to avoid a reference cycle.
--> 600 return weak_wrapped_fn().wrapped(*args, **kwds)
601 weak_wrapped_fn = weakref.ref(wrapped_fn)
602
~\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\framework\func_graph.py in wrapper(*args, **kwargs)
971 except Exception as e: # pylint:disable=broad-except
972 if hasattr(e, "ag_error_metadata"):
--> 973 raise e.ag_error_metadata.to_exception(e)
974 else:
975 raise
regressor = load_model(Model_name)
pred_x = regressor.predict_classes(X_Training)
random_ind = np.random.choice(X_Training.shape[0], 1000, replace=False)
data = X_Training[random_ind[0:500]]
print(np.shape(data))
#explainer = shap.DeepExplainer((regressor.layers[0].input, regressor.layers[-1].output),data)
explainer = shap.DeepExplainer(regressor,data)
test1 = X_Training[random_ind[800:1000]]
shap_val = explainer.shap_values(test1) #error here
`WARNING:tensorflow:Layers in a Sequential model should only have a single input tensor, but we receive a <class 'list'> input: [<tf.Tensor 'shap_rAnD:0' shape=(1000, 3, 11) dtype=float32>]
Consider rewriting this model with the Functional API.
AttributeError Traceback (most recent call last)
in
21 test1 = X_Training[random_ind[800:1000]]
22 print(np.shape(test1))
---> 23 shap_val = explainer.shap_values(test1)
24 shap_val = np.array(shap_val)
25 shap_val = np.reshape(shap_val,(int(shap_val.shape[1]),int(shap_val.shape[2]),int(shap_val.shape[3])))
~\Anaconda3\envs\tf-gpu\lib\site-packages\shap\explainers_deep_init_.py in shap_values(self, X, ranked_outputs, output_rank_order, check_additivity)
122 were chosen as "top".
123 """
--> 124 return self.explainer.shap_values(X, ranked_outputs, output_rank_order, check_additivity=check_additivity)
~\Anaconda3\envs\tf-gpu\lib\site-packages\shap\explainers_deep\deep_tf.py in shap_values(self, X, ranked_outputs, output_rank_order, check_additivity)
303 # run attribution computation graph
304 feature_ind = model_output_ranks[j,i]
--> 305 sample_phis = self.run(self.phi_symbolic(feature_ind), self.model_inputs, joint_input)
306
307 # assign the attributions to the right part of the output arrays
~\Anaconda3\envs\tf-gpu\lib\site-packages\shap\explainers_deep\deep_tf.py in run(self, out, model_inputs, X)
360
361 return final_out
--> 362 return self.execute_with_overridden_gradients(anon)
363
364 def custom_grad(self, op, *grads):
~\Anaconda3\envs\tf-gpu\lib\site-packages\shap\explainers_deep\deep_tf.py in execute_with_overridden_gradients(self, f)
396 # define the computation graph for the attribution values using a custom gradient-like computation
397 try:
--> 398 out = f()
399 finally:
400 # reinstate the backpropagatable check
~\Anaconda3\envs\tf-gpu\lib\site-packages\shap\explainers_deep\deep_tf.py in anon()
356 v = tf.constant(data, dtype=self.model_inputs[i].dtype)
357 inputs.append(v)
--> 358 final_out = out(inputs)
359 tf_execute.record_gradient = tf_backprop._record_gradient
360
~\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\eager\def_function.py in call(self, *args, **kwds)
778 else:
779 compiler = "nonXla"
--> 780 result = self._call(*args, **kwds)
781
782 new_tracing_count = self._get_tracing_count()
~\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\eager\def_function.py in _call(self, *args, **kwds)
821 # This is the first call of call, so we have to initialize.
822 initializers = []
--> 823 self._initialize(args, kwds, add_initializers_to=initializers)
824 finally:
825 # At this point we know that the initialization is complete (or less
~\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\eager\def_function.py in _initialize(self, args, kwds, add_initializers_to)
695 self._concrete_stateful_fn = (
696 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access
--> 697 *args, **kwds))
698
699 def invalid_creator_scope(*unused_args, **unused_kwds):
~\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\eager\function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)
2853 args, kwargs = None, None
2854 with self._lock:
-> 2855 graph_function, _, _ = self._maybe_define_function(args, kwargs)
2856 return graph_function
2857
~\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\eager\function.py in _maybe_define_function(self, args, kwargs)
3211
3212 self._function_cache.missed.add(call_context_key)
-> 3213 graph_function = self._create_graph_function(args, kwargs)
3214 self._function_cache.primary[cache_key] = graph_function
3215 return graph_function, args, kwargs
~\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\eager\function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
3073 arg_names=arg_names,
3074 override_flat_arg_shapes=override_flat_arg_shapes,
-> 3075 capture_by_value=self._capture_by_value),
3076 self._function_attributes,
3077 function_spec=self.function_spec,
~\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\framework\func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)
984 _, original_func = tf_decorator.unwrap(python_func)
985
--> 986 func_outputs = python_func(*func_args, **func_kwargs)
987
988 # invariant:
func_outputs
contains only Tensors, CompositeTensors,~\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\eager\def_function.py in wrapped_fn(*args, **kwds)
598 # wrapped allows AutoGraph to swap in a converted function. We give
599 # the function a weak reference to itself to avoid a reference cycle.
--> 600 return weak_wrapped_fn().wrapped(*args, **kwds)
601 weak_wrapped_fn = weakref.ref(wrapped_fn)
602
~\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\framework\func_graph.py in wrapper(*args, **kwargs)
971 except Exception as e: # pylint:disable=broad-except
972 if hasattr(e, "ag_error_metadata"):
--> 973 raise e.ag_error_metadata.to_exception(e)
974 else:
975 raise
AttributeError: in user code:
`
The text was updated successfully, but these errors were encountered: