Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
211 changes: 107 additions & 104 deletions test/test_transforms.py
Original file line number Diff line number Diff line change
Expand Up @@ -403,110 +403,6 @@ def test_random_crop(self):
with self.assertRaisesRegex(ValueError, r"Required crop size .+ is larger then input image size .+"):
t(img)

def test_pad(self):
height = random.randint(10, 32) * 2
width = random.randint(10, 32) * 2
img = torch.ones(3, height, width)
padding = random.randint(1, 20)
fill = random.randint(1, 50)
result = transforms.Compose([
transforms.ToPILImage(),
transforms.Pad(padding, fill=fill),
transforms.ToTensor(),
])(img)
self.assertEqual(result.size(1), height + 2 * padding)
self.assertEqual(result.size(2), width + 2 * padding)
# check that all elements in the padded region correspond
# to the pad value
fill_v = fill / 255
eps = 1e-5
h_padded = result[:, :padding, :]
w_padded = result[:, :, :padding]
torch.testing.assert_close(
h_padded, torch.full_like(h_padded, fill_value=fill_v), check_stride=False, rtol=0.0, atol=eps
)
torch.testing.assert_close(
w_padded, torch.full_like(w_padded, fill_value=fill_v), check_stride=False, rtol=0.0, atol=eps
)
self.assertRaises(ValueError, transforms.Pad(padding, fill=(1, 2)),
transforms.ToPILImage()(img))

def test_pad_with_tuple_of_pad_values(self):
height = random.randint(10, 32) * 2
width = random.randint(10, 32) * 2
img = transforms.ToPILImage()(torch.ones(3, height, width))

padding = tuple([random.randint(1, 20) for _ in range(2)])
output = transforms.Pad(padding)(img)
self.assertEqual(output.size, (width + padding[0] * 2, height + padding[1] * 2))

padding = tuple([random.randint(1, 20) for _ in range(4)])
output = transforms.Pad(padding)(img)
self.assertEqual(output.size[0], width + padding[0] + padding[2])
self.assertEqual(output.size[1], height + padding[1] + padding[3])

# Checking if Padding can be printed as string
transforms.Pad(padding).__repr__()

def test_pad_with_non_constant_padding_modes(self):
"""Unit tests for edge, reflect, symmetric padding"""
img = torch.zeros(3, 27, 27).byte()
img[:, :, 0] = 1 # Constant value added to leftmost edge
img = transforms.ToPILImage()(img)
img = F.pad(img, 1, (200, 200, 200))

# pad 3 to all sidess
edge_padded_img = F.pad(img, 3, padding_mode='edge')
# First 6 elements of leftmost edge in the middle of the image, values are in order:
# edge_pad, edge_pad, edge_pad, constant_pad, constant value added to leftmost edge, 0
edge_middle_slice = np.asarray(edge_padded_img).transpose(2, 0, 1)[0][17][:6]
assert_equal(edge_middle_slice, np.asarray([200, 200, 200, 200, 1, 0], dtype=np.uint8), check_stride=False)
self.assertEqual(transforms.ToTensor()(edge_padded_img).size(), (3, 35, 35))

# Pad 3 to left/right, 2 to top/bottom
reflect_padded_img = F.pad(img, (3, 2), padding_mode='reflect')
# First 6 elements of leftmost edge in the middle of the image, values are in order:
# reflect_pad, reflect_pad, reflect_pad, constant_pad, constant value added to leftmost edge, 0
reflect_middle_slice = np.asarray(reflect_padded_img).transpose(2, 0, 1)[0][17][:6]
assert_equal(reflect_middle_slice, np.asarray([0, 0, 1, 200, 1, 0], dtype=np.uint8), check_stride=False)
self.assertEqual(transforms.ToTensor()(reflect_padded_img).size(), (3, 33, 35))

# Pad 3 to left, 2 to top, 2 to right, 1 to bottom
symmetric_padded_img = F.pad(img, (3, 2, 2, 1), padding_mode='symmetric')
# First 6 elements of leftmost edge in the middle of the image, values are in order:
# sym_pad, sym_pad, sym_pad, constant_pad, constant value added to leftmost edge, 0
symmetric_middle_slice = np.asarray(symmetric_padded_img).transpose(2, 0, 1)[0][17][:6]
assert_equal(symmetric_middle_slice, np.asarray([0, 1, 200, 200, 1, 0], dtype=np.uint8), check_stride=False)
self.assertEqual(transforms.ToTensor()(symmetric_padded_img).size(), (3, 32, 34))

# Check negative padding explicitly for symmetric case, since it is not
# implemented for tensor case to compare to
# Crop 1 to left, pad 2 to top, pad 3 to right, crop 3 to bottom
symmetric_padded_img_neg = F.pad(img, (-1, 2, 3, -3), padding_mode='symmetric')
symmetric_neg_middle_left = np.asarray(symmetric_padded_img_neg).transpose(2, 0, 1)[0][17][:3]
symmetric_neg_middle_right = np.asarray(symmetric_padded_img_neg).transpose(2, 0, 1)[0][17][-4:]
assert_equal(symmetric_neg_middle_left, np.asarray([1, 0, 0], dtype=np.uint8), check_stride=False)
assert_equal(symmetric_neg_middle_right, np.asarray([200, 200, 0, 0], dtype=np.uint8), check_stride=False)
self.assertEqual(transforms.ToTensor()(symmetric_padded_img_neg).size(), (3, 28, 31))

def test_pad_raises_with_invalid_pad_sequence_len(self):
with self.assertRaises(ValueError):
transforms.Pad(())

with self.assertRaises(ValueError):
transforms.Pad((1, 2, 3))

with self.assertRaises(ValueError):
transforms.Pad((1, 2, 3, 4, 5))

def test_pad_with_mode_F_images(self):
pad = 2
transform = transforms.Pad(pad)

img = Image.new("F", (10, 10))
padded_img = transform(img)
self.assertSequenceEqual(padded_img.size, [edge_size + 2 * pad for edge_size in img.size])

def test_lambda(self):
trans = transforms.Lambda(lambda x: x.add(10))
x = torch.randn(10)
Expand Down Expand Up @@ -1831,6 +1727,113 @@ def test_random_erasing(self):
t.__repr__()


class TestPad:

def test_pad(self):
height = random.randint(10, 32) * 2
width = random.randint(10, 32) * 2
img = torch.ones(3, height, width)
padding = random.randint(1, 20)
fill = random.randint(1, 50)
result = transforms.Compose([
transforms.ToPILImage(),
transforms.Pad(padding, fill=fill),
transforms.ToTensor(),
])(img)
assert result.size(1) == height + 2 * padding
assert result.size(2) == width + 2 * padding
# check that all elements in the padded region correspond
# to the pad value
fill_v = fill / 255
eps = 1e-5
h_padded = result[:, :padding, :]
w_padded = result[:, :, :padding]
torch.testing.assert_close(
h_padded, torch.full_like(h_padded, fill_value=fill_v), check_stride=False, rtol=0.0, atol=eps
)
torch.testing.assert_close(
w_padded, torch.full_like(w_padded, fill_value=fill_v), check_stride=False, rtol=0.0, atol=eps
)
pytest.raises(ValueError, transforms.Pad(padding, fill=(1, 2)),
transforms.ToPILImage()(img))

def test_pad_with_tuple_of_pad_values(self):
height = random.randint(10, 32) * 2
width = random.randint(10, 32) * 2
img = transforms.ToPILImage()(torch.ones(3, height, width))

padding = tuple([random.randint(1, 20) for _ in range(2)])
output = transforms.Pad(padding)(img)
assert output.size == (width + padding[0] * 2, height + padding[1] * 2)

padding = tuple([random.randint(1, 20) for _ in range(4)])
output = transforms.Pad(padding)(img)
assert output.size[0] == width + padding[0] + padding[2]
assert output.size[1] == height + padding[1] + padding[3]

# Checking if Padding can be printed as string
transforms.Pad(padding).__repr__()

def test_pad_with_non_constant_padding_modes(self):
"""Unit tests for edge, reflect, symmetric padding"""
img = torch.zeros(3, 27, 27).byte()
img[:, :, 0] = 1 # Constant value added to leftmost edge
img = transforms.ToPILImage()(img)
img = F.pad(img, 1, (200, 200, 200))

# pad 3 to all sidess
edge_padded_img = F.pad(img, 3, padding_mode='edge')
# First 6 elements of leftmost edge in the middle of the image, values are in order:
# edge_pad, edge_pad, edge_pad, constant_pad, constant value added to leftmost edge, 0
edge_middle_slice = np.asarray(edge_padded_img).transpose(2, 0, 1)[0][17][:6]
assert_equal(edge_middle_slice, np.asarray([200, 200, 200, 200, 1, 0], dtype=np.uint8), check_stride=False)
assert transforms.ToTensor()(edge_padded_img).size() == (3, 35, 35)

# Pad 3 to left/right, 2 to top/bottom
reflect_padded_img = F.pad(img, (3, 2), padding_mode='reflect')
# First 6 elements of leftmost edge in the middle of the image, values are in order:
# reflect_pad, reflect_pad, reflect_pad, constant_pad, constant value added to leftmost edge, 0
reflect_middle_slice = np.asarray(reflect_padded_img).transpose(2, 0, 1)[0][17][:6]
assert_equal(reflect_middle_slice, np.asarray([0, 0, 1, 200, 1, 0], dtype=np.uint8), check_stride=False)
assert transforms.ToTensor()(reflect_padded_img).size() == (3, 33, 35)

# Pad 3 to left, 2 to top, 2 to right, 1 to bottom
symmetric_padded_img = F.pad(img, (3, 2, 2, 1), padding_mode='symmetric')
# First 6 elements of leftmost edge in the middle of the image, values are in order:
# sym_pad, sym_pad, sym_pad, constant_pad, constant value added to leftmost edge, 0
symmetric_middle_slice = np.asarray(symmetric_padded_img).transpose(2, 0, 1)[0][17][:6]
assert_equal(symmetric_middle_slice, np.asarray([0, 1, 200, 200, 1, 0], dtype=np.uint8), check_stride=False)
assert transforms.ToTensor()(symmetric_padded_img).size() == (3, 32, 34)

# Check negative padding explicitly for symmetric case, since it is not
# implemented for tensor case to compare to
# Crop 1 to left, pad 2 to top, pad 3 to right, crop 3 to bottom
symmetric_padded_img_neg = F.pad(img, (-1, 2, 3, -3), padding_mode='symmetric')
symmetric_neg_middle_left = np.asarray(symmetric_padded_img_neg).transpose(2, 0, 1)[0][17][:3]
symmetric_neg_middle_right = np.asarray(symmetric_padded_img_neg).transpose(2, 0, 1)[0][17][-4:]
assert_equal(symmetric_neg_middle_left, np.asarray([1, 0, 0], dtype=np.uint8), check_stride=False)
assert_equal(symmetric_neg_middle_right, np.asarray([200, 200, 0, 0], dtype=np.uint8), check_stride=False)
assert transforms.ToTensor()(symmetric_padded_img_neg).size() == (3, 28, 31)

def test_pad_raises_with_invalid_pad_sequence_len(self):
with pytest.raises(ValueError):
transforms.Pad(())

with pytest.raises(ValueError):
transforms.Pad((1, 2, 3))

with pytest.raises(ValueError):
transforms.Pad((1, 2, 3, 4, 5))

def test_pad_with_mode_F_images(self):
pad = 2
transform = transforms.Pad(pad)

img = Image.new("F", (10, 10))
padded_img = transform(img)
assert_equal(padded_img.size, [edge_size + 2 * pad for edge_size in img.size], check_stride=False)


def test_adjust_brightness():
x_shape = [2, 2, 3]
x_data = [0, 5, 13, 54, 135, 226, 37, 8, 234, 90, 255, 1]
Expand Down