Skip to content

Commit

Permalink
Python - RustFmt
Browse files Browse the repository at this point in the history
  • Loading branch information
n1t0 committed Feb 18, 2020
1 parent cdd8f49 commit f263d76
Show file tree
Hide file tree
Showing 6 changed files with 27 additions and 26 deletions.
3 changes: 2 additions & 1 deletion bindings/python/src/error.rs
Expand Up @@ -23,6 +23,7 @@ impl std::error::Error for PyError {}
pub struct ToPyResult<T>(pub Result<T>);
impl<T> std::convert::Into<PyResult<T>> for ToPyResult<T> {
fn into(self) -> PyResult<T> {
self.0.map_err(|e| { exceptions::Exception::py_err(format!("{}", e)) })
self.0
.map_err(|e| exceptions::Exception::py_err(format!("{}", e)))
}
}
5 changes: 1 addition & 4 deletions bindings/python/src/models.rs
Expand Up @@ -174,10 +174,7 @@ impl WordLevel {
}
}

match tk::models::wordlevel::WordLevel::from_files(
vocab,
unk_token,
) {
match tk::models::wordlevel::WordLevel::from_files(vocab, unk_token) {
Err(e) => {
println!("Errors: {:?}", e);
Err(exceptions::Exception::py_err(
Expand Down
1 change: 0 additions & 1 deletion bindings/python/src/normalizers.rs
Expand Up @@ -158,4 +158,3 @@ impl Strip {
}))
}
}

17 changes: 10 additions & 7 deletions bindings/python/src/pre_tokenizers.rs
Expand Up @@ -91,13 +91,16 @@ pub struct CharDelimiterSplit {}
impl CharDelimiterSplit {
#[new]
pub fn new(obj: &PyRawObject, delimiter: &str) -> PyResult<()> {
let chr_delimiter = delimiter.chars().nth(0).ok_or(exceptions::Exception::py_err(
"delimiter must be a single character",
))?;
Ok(obj.init(PreTokenizer{
pretok:Container::Owned(Box::new(
tk::pre_tokenizers::delimiter::CharDelimiterSplit::new(chr_delimiter)
))
let chr_delimiter = delimiter
.chars()
.nth(0)
.ok_or(exceptions::Exception::py_err(
"delimiter must be a single character",
))?;
Ok(obj.init(PreTokenizer {
pretok: Container::Owned(Box::new(
tk::pre_tokenizers::delimiter::CharDelimiterSplit::new(chr_delimiter),
)),
}))
}
}
Expand Down
1 change: 0 additions & 1 deletion bindings/python/src/processors.rs
Expand Up @@ -29,7 +29,6 @@ impl BertProcessing {
}
}


#[pyclass(extends=PostProcessor)]
pub struct RobertaProcessing {}
#[pymethods]
Expand Down
26 changes: 14 additions & 12 deletions bindings/python/src/tokenizer.rs
Expand Up @@ -39,10 +39,10 @@ impl Tokenizer {
}

fn num_special_tokens_to_add(&self, is_pair: bool) -> PyResult<usize> {
Ok(self.tokenizer
.get_post_processor()
.map_or(0, |p| p.as_ref().added_tokens(is_pair))
)
Ok(self
.tokenizer
.get_post_processor()
.map_or(0, |p| p.as_ref().added_tokens(is_pair)))
}

#[args(kwargs = "**")]
Expand Down Expand Up @@ -197,21 +197,23 @@ impl Tokenizer {
}

fn decode(&self, ids: Vec<u32>, skip_special_tokens: Option<bool>) -> PyResult<String> {
ToPyResult(self.tokenizer.decode(
ids,
skip_special_tokens.unwrap_or(true),
)).into()
ToPyResult(
self.tokenizer
.decode(ids, skip_special_tokens.unwrap_or(true)),
)
.into()
}

fn decode_batch(
&self,
sentences: Vec<Vec<u32>>,
skip_special_tokens: Option<bool>,
) -> PyResult<Vec<String>> {
ToPyResult(self.tokenizer.decode_batch(
sentences,
skip_special_tokens.unwrap_or(true),
)).into()
ToPyResult(
self.tokenizer
.decode_batch(sentences, skip_special_tokens.unwrap_or(true)),
)
.into()
}

fn token_to_id(&self, token: &str) -> Option<u32> {
Expand Down

0 comments on commit f263d76

Please sign in to comment.