Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chebyshev_uniform for Generator1D #213

Open
wants to merge 4 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
25 changes: 23 additions & 2 deletions neurodiffeq/generators.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,21 +4,33 @@
import numpy as np
from typing import List

def _chebyshev_noisy(a, b, n, std):
sample = torch.linspace(0, np.pi, n)
sample_noisy = sample + torch.normal(mean=0, std=std)
nodes = torch.cos(sample_noisy)
nodes = ((a + b) + (b - a) * nodes) / 2
nodes.requires_grad_(True)
return nodes

def _chebyshev_uniform(a, b, n):
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I am not sure that this makes sense. Chebyshev nodes are deterministic sequence of numbers. Its not a function which is valid for a random number.
I think Its better to define "chebyshev_noisy" which adds noise to the chebyshev sequence rather than doing this.

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thank you for your comments, Sathvik! I pushed a new commit with an attempt of chebyshev_noisy.

unif_sample = torch.rand(n) * np.pi
nodes = torch.cos(unif_sample)
nodes = ((a + b) + (b - a) * nodes) / 2
nodes.requires_grad_(True)
return nodes

def _chebyshev_first(a, b, n):
nodes = torch.cos(((torch.arange(n) + 0.5) / n) * np.pi)
nodes = ((a + b) + (b - a) * nodes) / 2
nodes.requires_grad_(True)
return nodes


def _chebyshev_second(a, b, n):
nodes = torch.cos(torch.arange(n) / float(n - 1) * np.pi)
nodes = ((a + b) + (b - a) * nodes) / 2
nodes.requires_grad_(True)
return nodes


def _compute_log_negative(t_min, t_max, whence):
if t_min <= 0 or t_max <= 0:
suggested_t_min = 10 ** t_min
Expand Down Expand Up @@ -108,8 +120,11 @@ class Generator1D(BaseGenerator):
- If set to 'equally-spaced-noisy', a normal noise will be added to the previously mentioned set of points.
- If set to 'log-spaced', the points will be fixed to a set of log-spaced points that go from t_min to t_max.
- If set to 'log-spaced-noisy', a normal noise will be added to the previously mentioned set of points,
- If set to 'chebyshev_uniform', the points are randomly sampled from an uniform distribution between 0 and π, transformed into chebyshev nodes of the first kind, and mapped to (t_min, t_max).
- If set to 'chebyshev1' or 'chebyshev', the points are chebyshev nodes of the first kind over (t_min, t_max).
- If set to 'chebyshev2', the points will be chebyshev nodes of the second kind over [t_min, t_max].
- If set to 'chebyshev_noisy', a normal noise will be added to chebyshev nodes of the first kind over (t_min, t_max).


defaults to 'uniform'.
:type method: str, optional
Expand Down Expand Up @@ -148,12 +163,18 @@ def __init__(self, size, t_min=0.0, t_max=1.0, method='uniform', noise_std=None)
start, end = _compute_log_negative(t_min, t_max, self.__class__)
self.examples = torch.logspace(start, end, self.size, requires_grad=True)
self.getter = lambda: torch.normal(mean=self.examples, std=self.noise_std)
elif method in ['chebyshev_uniform']:
self.examples = _chebyshev_uniform(t_min, t_max, size)
self.getter = lambda: self.examples
elif method in ['chebyshev', 'chebyshev1']:
self.examples = _chebyshev_first(t_min, t_max, size)
self.getter = lambda: self.examples
elif method == 'chebyshev2':
self.examples = _chebyshev_second(t_min, t_max, size)
self.getter = lambda: self.examples
elif method == 'chebyshev_noisy':
self.examples = _chebyshev_noisy(t_min, t_max, size, self.noise_std)
self.getter = lambda: self.examples
else:
raise ValueError(f'Unknown method: {method}')

Expand Down
9 changes: 9 additions & 0 deletions tests/test_generators.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,6 +120,10 @@ def test_generator1d():
x = generator.getter()
assert _check_shape_and_grad(generator, size, x)

generator = Generator1D(size=size, t_min=0.1, t_max=2.0, method='chebyshev_uniform')
x = generator.getter()
assert _check_shape_and_grad(generator, size, x)

generator = Generator1D(size=size, t_min=0.1, t_max=2.0, method='chebyshev')
x = generator.getter()
assert _check_shape_and_grad(generator, size, x)
Expand All @@ -132,6 +136,11 @@ def test_generator1d():
x = generator.getter()
assert _check_shape_and_grad(generator, size, x)

generator = Generator1D(size=size, t_min=0.1, t_max=2.0, method='chebyshev_noisy',
noise_std=0.01)
x = generator.getter()
assert _check_shape_and_grad(generator, size, x)

with raises(ValueError):
generator = Generator1D(size=size, t_min=0.0, t_max=2.0, method='magic')

Expand Down