diff --git a/beginner_source/examples_autograd/polynomial_custom_function.py b/beginner_source/examples_autograd/polynomial_custom_function.py index 39057c8fd7a..215235ef5ca 100755 --- a/beginner_source/examples_autograd/polynomial_custom_function.py +++ b/beginner_source/examples_autograd/polynomial_custom_function.py @@ -16,6 +16,16 @@ In this implementation we implement our own custom autograd function to perform :math:`P_3'(x)`. By mathematics, :math:`P_3'(x)=\\frac{3}{2}\\left(5x^2-1\\right)` + +.. note:: + This example is designed to demonstrate the mechanics of gradient descent and + backpropagation, not to achieve a perfect fit. A third-degree polynomial has + fundamental limitations in approximating :math:`\sin(x)` over the range + :math:`[-\pi, \pi]`. The Taylor series for sine requires higher-order terms + (5th, 7th degree, etc.) for better accuracy. The resulting polynomial will + fit reasonably well near zero but will diverge from :math:`\sin(x)` as you + approach :math:`\pm\pi`. This is expected and illustrates the importance of + choosing an appropriate model architecture for your problem. """ import torch import math diff --git a/beginner_source/examples_nn/polynomial_module.py b/beginner_source/examples_nn/polynomial_module.py index 77b44ae1a5d..50361ddbb36 100755 --- a/beginner_source/examples_nn/polynomial_module.py +++ b/beginner_source/examples_nn/polynomial_module.py @@ -9,6 +9,16 @@ This implementation defines the model as a custom Module subclass. Whenever you want a model more complex than a simple sequence of existing Modules you will need to define your model this way. + +.. note:: + This example is designed to demonstrate the mechanics of gradient descent and + backpropagation, not to achieve a perfect fit. A third-degree polynomial has + fundamental limitations in approximating :math:`\sin(x)` over the range + :math:`[-\pi, \pi]`. The Taylor series for sine requires higher-order terms + (5th, 7th degree, etc.) for better accuracy. The resulting polynomial will + fit reasonably well near zero but will diverge from :math:`\sin(x)` as you + approach :math:`\pm\pi`. This is expected and illustrates the importance of + choosing an appropriate model architecture for your problem. """ import torch import math diff --git a/beginner_source/examples_nn/polynomial_nn.py b/beginner_source/examples_nn/polynomial_nn.py index 70e281ed365..3e826c26ee6 100755 --- a/beginner_source/examples_nn/polynomial_nn.py +++ b/beginner_source/examples_nn/polynomial_nn.py @@ -12,6 +12,16 @@ this is where the nn package can help. The nn package defines a set of Modules, which you can think of as a neural network layer that produces output from input and may have some trainable weights. + +.. note:: + This example is designed to demonstrate the mechanics of gradient descent and + backpropagation, not to achieve a perfect fit. A third-degree polynomial has + fundamental limitations in approximating :math:`\sin(x)` over the range + :math:`[-\pi, \pi]`. The Taylor series for sine requires higher-order terms + (5th, 7th degree, etc.) for better accuracy. The resulting polynomial will + fit reasonably well near zero but will diverge from :math:`\sin(x)` as you + approach :math:`\pm\pi`. This is expected and illustrates the importance of + choosing an appropriate model architecture for your problem. """ import torch import math diff --git a/beginner_source/examples_nn/polynomial_optim.py b/beginner_source/examples_nn/polynomial_optim.py index c0d4896c8f2..a61d5c7bc60 100755 --- a/beginner_source/examples_nn/polynomial_optim.py +++ b/beginner_source/examples_nn/polynomial_optim.py @@ -12,6 +12,16 @@ we use the optim package to define an Optimizer that will update the weights for us. The optim package defines many optimization algorithms that are commonly used for deep learning, including SGD+momentum, RMSProp, Adam, etc. + +.. note:: + This example is designed to demonstrate the mechanics of gradient descent and + backpropagation, not to achieve a perfect fit. A third-degree polynomial has + fundamental limitations in approximating :math:`\sin(x)` over the range + :math:`[-\pi, \pi]`. The Taylor series for sine requires higher-order terms + (5th, 7th degree, etc.) for better accuracy. The resulting polynomial will + fit reasonably well near zero but will diverge from :math:`\sin(x)` as you + approach :math:`\pm\pi`. This is expected and illustrates the importance of + choosing an appropriate model architecture for your problem. """ import torch import math diff --git a/beginner_source/examples_tensor/polynomial_numpy.py b/beginner_source/examples_tensor/polynomial_numpy.py index 059ec286ee4..73a1d00239d 100755 --- a/beginner_source/examples_tensor/polynomial_numpy.py +++ b/beginner_source/examples_tensor/polynomial_numpy.py @@ -12,6 +12,16 @@ A numpy array is a generic n-dimensional array; it does not know anything about deep learning or gradients or computational graphs, and is just a way to perform generic numeric computations. + +.. note:: + This example is designed to demonstrate the mechanics of gradient descent and + backpropagation, not to achieve a perfect fit. A third-degree polynomial has + fundamental limitations in approximating :math:`\sin(x)` over the range + :math:`[-\pi, \pi]`. The Taylor series for sine requires higher-order terms + (5th, 7th degree, etc.) for better accuracy. The resulting polynomial will + fit reasonably well near zero but will diverge from :math:`\sin(x)` as you + approach :math:`\pm\pi`. This is expected and illustrates the importance of + choosing an appropriate model architecture for your problem. """ import numpy as np import math diff --git a/beginner_source/examples_tensor/polynomial_tensor.py b/beginner_source/examples_tensor/polynomial_tensor.py index 260cf8d2849..0694fd2bd2a 100755 --- a/beginner_source/examples_tensor/polynomial_tensor.py +++ b/beginner_source/examples_tensor/polynomial_tensor.py @@ -16,6 +16,16 @@ The biggest difference between a numpy array and a PyTorch Tensor is that a PyTorch Tensor can run on either CPU or GPU. To run operations on the GPU, just cast the Tensor to a cuda datatype. + +.. note:: + This example is designed to demonstrate the mechanics of gradient descent and + backpropagation, not to achieve a perfect fit. A third-degree polynomial has + fundamental limitations in approximating :math:`\sin(x)` over the range + :math:`[-\pi, \pi]`. The Taylor series for sine requires higher-order terms + (5th, 7th degree, etc.) for better accuracy. The resulting polynomial will + fit reasonably well near zero but will diverge from :math:`\sin(x)` as you + approach :math:`\pm\pi`. This is expected and illustrates the importance of + choosing an appropriate model architecture for your problem. """ import torch