Skip to content

Commit

Permalink
Merge e1b8f7d into aae09f4
Browse files Browse the repository at this point in the history
  • Loading branch information
fepegar committed Oct 25, 2021
2 parents aae09f4 + e1b8f7d commit f499f0e
Show file tree
Hide file tree
Showing 2 changed files with 41 additions and 29 deletions.
36 changes: 16 additions & 20 deletions tests/transforms/augmentation/test_random_affine.py
Expand Up @@ -72,26 +72,6 @@ def test_bad_center(self):
with self.assertRaises(ValueError):
tio.RandomAffine(center='bad')

def test_translation(self):
transform = tio.RandomAffine(
scales=(1, 1),
degrees=0,
translation=(5, 5)
)
transformed = transform(self.sample_subject)

# I think the right test should be the following one:
# self.assertTensorAlmostEqual(
# self.sample_subject.t1.data[:, :-5, :-5, :-5],
# transformed.t1.data[:, 5:, 5:, 5:]
# )

# However the passing test is this one:
self.assertTensorAlmostEqual(
self.sample_subject.t1.data[:, :-5, :-5, 5:],
transformed.t1.data[:, 5:, 5:, :-5]
)

def test_negative_scales(self):
with self.assertRaises(ValueError):
tio.RandomAffine(scales=(-1, 1))
Expand Down Expand Up @@ -171,3 +151,19 @@ def test_default_value_label_map(self):
aff = tio.RandomAffine(translation=(0, 1, 1), default_pad_value='otsu')
transformed = aff(image)
assert all(n in (0, 1) for n in transformed.data.flatten())

def test_no_inverse(self):
tensor = torch.zeros((1, 2, 2, 2))
tensor[0, 1, 1, 1] = 1 # most RAS voxel
expected = torch.zeros((1, 2, 2, 2))
expected[0, 0, 1, 1] = 1
scales = 1, 1, 1
degrees = 0, 0, 90 # anterior should go left
translation = 0, 0, 0
apply_affine = tio.Affine(
scales,
degrees,
translation,
)
transformed = apply_affine(tensor)
self.assertTensorAlmostEqual(transformed, expected)
34 changes: 25 additions & 9 deletions torchio/transforms/augmentation/spatial/random_affine.py
Expand Up @@ -243,34 +243,43 @@ def _get_scaling_transform(
scaling_params: Sequence[float],
center_lps: Optional[TypeTripletFloat] = None,
) -> sitk.ScaleTransform:
# scaling_params are inverted so that they are more intuitive
# For example, 1.5 means the objects look 1.5 times larger
# 1.5 means the objects look 1.5 times larger
transform = sitk.ScaleTransform(3)
scaling_params = 1 / np.array(scaling_params)
scaling_params = np.array(scaling_params).astype(float)
transform.SetScale(scaling_params)
if center_lps is not None:
transform.SetCenter(center_lps)
return transform



@staticmethod
def _get_rotation_transform(
degrees: Sequence[float],
translation: Sequence[float],
center_lps: Optional[TypeTripletFloat] = None,
) -> sitk.Euler3DTransform:

def ras_to_lps(triplet: np.ndarray):
return np.array((-1, -1, 1), dtype=float) * np.asarray(triplet)

transform = sitk.Euler3DTransform()
radians = np.radians(degrees)
transform.SetRotation(*radians)
translation = np.array(translation).astype(float)
transform.SetTranslation(translation)

# SimpleITK uses LPS
radians_lps = ras_to_lps(radians)
translation_lps = ras_to_lps(translation)

transform.SetRotation(*radians_lps)
transform.SetTranslation(translation_lps)
if center_lps is not None:
transform.SetCenter(center_lps)
return transform

def get_affine_transform(self, image):
scaling = np.array(self.scales).copy()
rotation = np.array(self.degrees).copy()
translation = np.array(self.translation).copy()
scaling = np.asarray(self.scales).copy()
rotation = np.asarray(self.degrees).copy()
translation = np.asarray(self.translation).copy()

if image.is_2d():
scaling[2] = 1
Expand Down Expand Up @@ -300,6 +309,13 @@ def get_affine_transform(self, image):
transforms = [scaling_transform, rotation_transform]
transform = sitk.CompositeTransform(transforms)

# ResampleImageFilter expects the transform from the output space to
# the input space. Intuitively, the passed arguments should take us
# from the input space to the output space, so we need to invert the
# transform.
# More info at https://github.com/fepegar/torchio/discussions/693
transform = transform.GetInverse()

if self.invert_transform:
transform = transform.GetInverse()

Expand Down

0 comments on commit f499f0e

Please sign in to comment.