We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
intercept = tf.Variable(polymodel.params[0], dtype=tf.float32) slope = tf.Variable(polymodel.params[1], dtype=tf.float32) def predict(intercept, slope, X): m = PolynomialModel([intercept, slope], 0) return m(X) def mse(labels, preds): return tf.keras.losses.mse(tf.squeeze(labels), tf.squeeze(preds)) def loss_func(intercept, slope, X, y): predictions = predict(intercept, slope, X) return mse(y, predictions) with tf.GradientTape() as tape: tape.watch(intercept) tape.watch(slope) opt = tf.keras.optimizers.Adam(lr=0.05) for i in range(20): opt.minimize(lambda : loss_func(intercept, slope, X, y), var_list=[intercept, slope])
Traceback:
--------------------------------------------------------------------------- ValueError Traceback (most recent call last) <ipython-input-20-3ef69543ef29> in <module> 19 opt = tf.keras.optimizers.Adam(learning_rate=0.01) 20 for i in range(20): ---> 21 opt.minimize(lambda : loss_func(intercept, slope, X, y), var_list=[intercept, slope]) ~\anaconda3\envs\ds-py37\lib\site-packages\tensorflow\python\keras\optimizer_v2\optimizer_v2.py in minimize(self, loss, var_list, grad_loss, name, tape) 496 grads_and_vars = self._compute_gradients( 497 loss, var_list=var_list, grad_loss=grad_loss, tape=tape) --> 498 return self.apply_gradients(grads_and_vars, name=name) 499 500 def _compute_gradients(self, loss, var_list, grad_loss=None, tape=None): ~\anaconda3\envs\ds-py37\lib\site-packages\tensorflow\python\keras\optimizer_v2\optimizer_v2.py in apply_gradients(self, grads_and_vars, name, experimental_aggregate_gradients) 596 RuntimeError: If called in a cross-replica context. 597 """ --> 598 grads_and_vars = optimizer_utils.filter_empty_gradients(grads_and_vars) 599 var_list = [v for (_, v) in grads_and_vars] 600 ~\anaconda3\envs\ds-py37\lib\site-packages\tensorflow\python\keras\optimizer_v2\utils.py in filter_empty_gradients(grads_and_vars) 77 if not filtered: 78 raise ValueError("No gradients provided for any variable: %s." % ---> 79 ([v.name for _, v in grads_and_vars],)) 80 if vars_with_empty_grads: 81 logging.warning( ValueError: No gradients provided for any variable: ['Variable:0', 'Variable:0'].
The text was updated successfully, but these errors were encountered:
Relates #19
Sorry, something went wrong.
abhi8893
No branches or pull requests
Traceback:
The text was updated successfully, but these errors were encountered: