Skip to content

Commit

Permalink
Replace error by warning when loading an architecture in another (#11207
Browse files Browse the repository at this point in the history
)

* Replace error by warning when loading an architecture in another

* Style

* Style again

* Add a test

* Adapt old test
  • Loading branch information
sgugger committed Apr 13, 2021
1 parent 4906a29 commit cd39c8e
Show file tree
Hide file tree
Showing 3 changed files with 16 additions and 15 deletions.
9 changes: 5 additions & 4 deletions src/transformers/configuration_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -399,10 +399,11 @@ def from_pretrained(cls, pretrained_model_name_or_path: Union[str, os.PathLike],
"""
config_dict, kwargs = cls.get_config_dict(pretrained_model_name_or_path, **kwargs)
if config_dict.get("model_type", False) and hasattr(cls, "model_type"):
assert (
config_dict["model_type"] == cls.model_type
), f"You tried to initiate a model of type '{cls.model_type}' with a pretrained model of type '{config_dict['model_type']}'"
if "model_type" in config_dict and hasattr(cls, "model_type") and config_dict["model_type"] != cls.model_type:
logger.warn(
f"You are using a model of type {config_dict['model_type']} to instantiate a model of type "
f"{cls.model_type}. This is not supported for all configurations of models and can yield errors."
)

return cls.from_dict(config_dict, **kwargs)

Expand Down
13 changes: 6 additions & 7 deletions tests/test_modeling_bert_generation.py
Original file line number Diff line number Diff line change
Expand Up @@ -231,13 +231,7 @@ def create_and_check_for_causal_lm(
self.parent.assertEqual(result.logits.shape, (self.batch_size, self.seq_length, self.vocab_size))

def prepare_config_and_inputs_for_common(self):
config_and_inputs = self.prepare_config_and_inputs()
(
config,
input_ids,
input_mask,
token_labels,
) = config_and_inputs
config, input_ids, input_mask, token_labels = self.prepare_config_and_inputs()
inputs_dict = {"input_ids": input_ids, "attention_mask": input_mask}
return config, inputs_dict

Expand All @@ -259,6 +253,11 @@ def test_model(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_model(*config_and_inputs)

def test_model_as_bert(self):
config, input_ids, input_mask, token_labels = self.model_tester.prepare_config_and_inputs()
config.model_type = "bert"
self.model_tester.create_and_check_model(config, input_ids, input_mask, token_labels)

def test_model_as_decoder(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs_for_decoder()
self.model_tester.create_and_check_model_as_decoder(*config_and_inputs)
Expand Down
9 changes: 5 additions & 4 deletions tests/test_modeling_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,9 @@
import unittest
from typing import List, Tuple

from transformers import is_torch_available
from transformers import is_torch_available, logging
from transformers.file_utils import WEIGHTS_NAME
from transformers.testing_utils import require_torch, require_torch_multi_gpu, slow, torch_device
from transformers.testing_utils import CaptureLogger, require_torch, require_torch_multi_gpu, slow, torch_device


if is_torch_available():
Expand Down Expand Up @@ -1295,6 +1295,7 @@ def test_model_from_pretrained_with_different_pretrained_model_name(self):
model = T5ForConditionalGeneration.from_pretrained(TINY_T5)
self.assertIsNotNone(model)

with self.assertRaises(Exception) as context:
logger = logging.get_logger("transformers.configuration_utils")
with CaptureLogger(logger) as cl:
BertModel.from_pretrained(TINY_T5)
self.assertTrue("You tried to initiate a model of type" in str(context.exception))
self.assertTrue("You are using a model of type t5 to instantiate a model of type bert" in cl.out)

0 comments on commit cd39c8e

Please sign in to comment.