diff --git a/deepchecks/nlp/checks/data_integrity/unknown_tokens.py b/deepchecks/nlp/checks/data_integrity/unknown_tokens.py index 05f435fc79..f35cceb7bf 100644 --- a/deepchecks/nlp/checks/data_integrity/unknown_tokens.py +++ b/deepchecks/nlp/checks/data_integrity/unknown_tokens.py @@ -64,10 +64,11 @@ def __init__( if tokenizer is None: try: from transformers import BertTokenizer # pylint: disable=W0611,C0415 # noqa - except ImportError: - DeepchecksProcessError( - 'Tokenizer was not provided. In order to use checks default tokenizer (BertTokenizer),' - 'please pip install transformers>=4.27.4. ') + except ImportError as e: + raise DeepchecksProcessError( + 'Tokenizer was not provided. In order to use checks default ' + 'tokenizer (BertTokenizer), please run:\n>> pip install transformers>=4.27.4.' + ) from e self.tokenizer = BertTokenizer.from_pretrained('bert-base-uncased') else: self._validate_tokenizer()