diff --git a/bindings/python/src/tokenizer.rs b/bindings/python/src/tokenizer.rs index 4acb7f9de..4aafb6275 100644 --- a/bindings/python/src/tokenizer.rs +++ b/bindings/python/src/tokenizer.rs @@ -189,34 +189,22 @@ impl Tokenizer { .into() } - #[args(kwargs = "**")] - fn decode(&self, ids: Vec, kwargs: Option<&PyDict>) -> PyResult { - let mut skip_special_tokens = true; - - if let Some(kwargs) = kwargs { - if let Some(skip) = kwargs.get_item("skip_special_tokens") { - skip_special_tokens = skip.extract()?; - } - } - - ToPyResult(self.tokenizer.decode(ids, skip_special_tokens)).into() + fn decode(&self, ids: Vec, skip_special_tokens: Option) -> PyResult { + ToPyResult(self.tokenizer.decode( + ids, + skip_special_tokens.unwrap_or(true), + )).into() } - #[args(kwargs = "**")] fn decode_batch( &self, sentences: Vec>, - kwargs: Option<&PyDict>, + skip_special_tokens: Option, ) -> PyResult> { - let mut skip_special_tokens = true; - - if let Some(kwargs) = kwargs { - if let Some(skip) = kwargs.get_item("skip_special_tokens") { - skip_special_tokens = skip.extract()?; - } - } - - ToPyResult(self.tokenizer.decode_batch(sentences, skip_special_tokens)).into() + ToPyResult(self.tokenizer.decode_batch( + sentences, + skip_special_tokens.unwrap_or(true), + )).into() } fn token_to_id(&self, token: &str) -> Option {