diff --git a/src/transformers/models/m2m_100/modeling_m2m_100.py b/src/transformers/models/m2m_100/modeling_m2m_100.py index 5d01e091298264..20c4aea990ecdb 100755 --- a/src/transformers/models/m2m_100/modeling_m2m_100.py +++ b/src/transformers/models/m2m_100/modeling_m2m_100.py @@ -566,7 +566,7 @@ def _init_weights(self, module): >>> model_inputs = tokenizer(text_to_translate, return_tensors='pt') >>> # translate to French - >>> gen_tokens = model.generate( **model_inputs, forced_bos_token_id=tok.get_lang_id("fr")) + >>> gen_tokens = model.generate( **model_inputs, forced_bos_token_id=tokenizer.get_lang_id("fr")) >>> print(tokenizer.batch_decode(gen_tokens, skip_special_tokens=True)) """ @@ -1272,7 +1272,7 @@ def forward( >>> model_inputs = tokenizer(text_to_translate, return_tensors='pt') >>> # translate to French - >>> gen_tokens = model.generate( **model_inputs, forced_bos_token_id=tok.get_lang_id("fr")) + >>> gen_tokens = model.generate( **model_inputs, forced_bos_token_id=tokenizer.get_lang_id("fr")) >>> print(tokenizer.batch_decode(gen_tokens, skip_special_tokens=True)) """ return_dict = return_dict if return_dict is not None else self.config.use_return_dict