Skip to content

Commit

Permalink
Speedup tf tests (huggingface#10601)
Browse files Browse the repository at this point in the history
* Pipeline tests should be slow

* Temporarily mark some tests as slow

* Temporarily mark Barthez tests as slow
  • Loading branch information
LysandreJik authored and Iwontbecreative committed Jul 15, 2021
1 parent a790445 commit 08cf5fe
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 1 deletion.
6 changes: 6 additions & 0 deletions tests/test_modeling_tf_common.py
Expand Up @@ -129,6 +129,7 @@ def test_save_load(self):

self.assert_outputs_same(after_outputs, outputs)

@slow
def test_graph_mode(self):
config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common()
for model_class in self.all_model_classes:
Expand All @@ -142,6 +143,7 @@ def run_in_graph_mode():
outputs = run_in_graph_mode()
self.assertIsNotNone(outputs)

@slow
def test_xla_mode(self):
config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common()
for model_class in self.all_model_classes:
Expand Down Expand Up @@ -182,6 +184,7 @@ def test_forward_signature(self):
expected_arg_names = ["input_ids"]
self.assertListEqual(arg_names[:1], expected_arg_names)

@slow
def test_saved_model_creation(self):
config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common()
config.output_hidden_states = False
Expand Down Expand Up @@ -311,6 +314,7 @@ def test_onnx_runtime_optimize(self):

onnxruntime.InferenceSession(onnx_model.SerializeToString())

@slow
def test_mixed_precision(self):
tf.keras.mixed_precision.experimental.set_policy("mixed_float16")

Expand Down Expand Up @@ -484,6 +488,7 @@ def test_pt_tf_model_equivalence(self):
max_diff = np.amax(np.abs(tfo - pto))
self.assertLessEqual(max_diff, 4e-2)

@slow
def test_train_pipeline_custom_model(self):
config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common()
# head_mask and decoder_head_mask has different shapes than other input args
Expand Down Expand Up @@ -904,6 +909,7 @@ def test_inputs_embeds(self):

model(inputs)

@slow
def test_graph_mode_with_inputs_embeds(self):
config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common()

Expand Down
3 changes: 2 additions & 1 deletion tests/test_tokenization_barthez.py
Expand Up @@ -17,13 +17,14 @@
import unittest

from transformers import BarthezTokenizer, BarthezTokenizerFast, BatchEncoding
from transformers.testing_utils import require_sentencepiece, require_tokenizers, require_torch
from transformers.testing_utils import require_sentencepiece, require_tokenizers, require_torch, slow

from .test_tokenization_common import TokenizerTesterMixin


@require_tokenizers
@require_sentencepiece
@slow
class BarthezTokenizationTest(TokenizerTesterMixin, unittest.TestCase):

tokenizer_class = BarthezTokenizer
Expand Down

0 comments on commit 08cf5fe

Please sign in to comment.