diff --git a/tests/test_tokenization_layoutlmv2.py b/tests/test_tokenization_layoutlmv2.py index f654454db173b..38edd5546f7b0 100644 --- a/tests/test_tokenization_layoutlmv2.py +++ b/tests/test_tokenization_layoutlmv2.py @@ -1052,7 +1052,7 @@ def test_batch_encode_plus_tensors(self): tokenizers = self.get_tokenizers(do_lower_case=False) for tokenizer in tokenizers: with self.subTest(f"{tokenizer.__class__.__name__}"): - words, boxes = self.get_words_and_boxes() + words, boxes = self.get_words_and_boxes_batch() # A Tensor cannot be build by sequences which are not the same size self.assertRaises(ValueError, tokenizer.batch_encode_plus, words, boxes=boxes, return_tensors="pt")