Skip to content

Commit

Permalink
Chore/clippy (#34)
Browse files Browse the repository at this point in the history
  • Loading branch information
nathanielsimard committed Sep 13, 2022
1 parent bffc543 commit 8c21cf1
Show file tree
Hide file tree
Showing 122 changed files with 428 additions and 422 deletions.
6 changes: 5 additions & 1 deletion .github/workflows/test-burn-dataset.yml
Expand Up @@ -15,7 +15,7 @@ jobs:
with:
profile: minimal
toolchain: nightly
components: rustfmt
components: rustfmt, clippy
override: true

- name: check format
Expand All @@ -32,3 +32,7 @@ jobs:
run: |
cd burn-dataset
cargo test --tests
- name: check clippy
run: |
cargo clippy -p burn-dataset -- -D warnings
8 changes: 7 additions & 1 deletion .github/workflows/test-burn-tensor.yml
Expand Up @@ -15,7 +15,7 @@ jobs:
with:
profile: minimal
toolchain: nightly
components: rustfmt
components: rustfmt, clippy
override: true

- name: check format
Expand All @@ -38,4 +38,10 @@ jobs:
cd burn-tensor
cargo test --no-default-features --features tch --tests
- name: check clippy backend tch
run: |
cargo clippy -p burn-tensor --no-default-features --features tch -- -D warnings
- name: check clippy backend ndarray
run: |
cargo clippy -p burn-tensor --no-default-features --features tch -- -D warnings
6 changes: 5 additions & 1 deletion .github/workflows/test-burn.yml
Expand Up @@ -15,7 +15,7 @@ jobs:
with:
profile: minimal
toolchain: nightly
components: rustfmt
components: rustfmt, clippy
override: true

- name: check format
Expand All @@ -33,3 +33,7 @@ jobs:
run: |
cd burn
cargo test --tests
- name: check clippy
run: |
cargo clippy -p burn -- -D warnings
@@ -1,7 +1,8 @@
use crate::DatasetIterator;

pub trait Dataset<I>: Send + Sync {
fn iter<'a>(&'a self) -> DatasetIterator<'a, I>;
fn iter(&self) -> DatasetIterator<'_, I>;
fn get(&self, index: usize) -> Option<I>;
fn len(&self) -> usize;
fn is_empty(&self) -> bool;
}
6 changes: 5 additions & 1 deletion burn-dataset/src/dataset/fake.rs
Expand Up @@ -18,7 +18,7 @@ impl<I: Dummy<Faker>> FakeDataset<I> {
}

impl<I: Send + Sync + Clone> Dataset<I> for FakeDataset<I> {
fn iter<'a>(&'a self) -> DatasetIterator<'a, I> {
fn iter(&self) -> DatasetIterator<'_, I> {
DatasetIterator::new(self)
}

Expand All @@ -29,4 +29,8 @@ impl<I: Send + Sync + Clone> Dataset<I> for FakeDataset<I> {
fn len(&self) -> usize {
self.dataset.len()
}

fn is_empty(&self) -> bool {
self.dataset.is_empty()
}
}
10 changes: 5 additions & 5 deletions burn-dataset/src/dataset/in_memory.rs
Expand Up @@ -20,17 +20,17 @@ where
I: Clone + Send + Sync,
{
fn get(&self, index: usize) -> Option<I> {
match self.items.get(index) {
Some(item) => Some(item.clone()),
None => None,
}
self.items.get(index).cloned()
}
fn iter<'a>(&'a self) -> DatasetIterator<'a, I> {
fn iter(&self) -> DatasetIterator<'_, I> {
DatasetIterator::new(self)
}
fn len(&self) -> usize {
self.items.len()
}
fn is_empty(&self) -> bool {
self.items.is_empty()
}
}

impl<I> InMemDataset<I>
Expand Down
4 changes: 2 additions & 2 deletions burn-dataset/src/dataset/iterator.rs
Expand Up @@ -3,7 +3,7 @@ use std::iter::Iterator;

pub struct DatasetIterator<'a, I> {
current: usize,
dataset: Box<&'a dyn Dataset<I>>,
dataset: &'a dyn Dataset<I>,
}

impl<'a, I> DatasetIterator<'a, I> {
Expand All @@ -13,7 +13,7 @@ impl<'a, I> DatasetIterator<'a, I> {
{
DatasetIterator {
current: 0,
dataset: Box::new(dataset),
dataset,
}
}
}
Expand Down
4 changes: 2 additions & 2 deletions burn-dataset/src/dataset/mod.rs
@@ -1,11 +1,11 @@
mod dataset;
mod base;
#[cfg(feature = "fake")]
mod fake;
mod in_memory;
mod iterator;

#[cfg(feature = "fake")]
pub use self::fake::*;
pub use dataset::*;
pub use base::*;
pub use in_memory::*;
pub use iterator::*;
4 changes: 2 additions & 2 deletions burn-dataset/src/source/huggingface/downloader.rs
Expand Up @@ -43,13 +43,13 @@ pub fn download(
};
}

if config.len() > 0 {
if !config.is_empty() {
command.arg("--config");
for config in config {
command.arg(config);
}
}
if config_named.len() > 0 {
if !config_named.is_empty() {
command.arg("--config-named");
for (key, value) in config_named {
command.arg(format!("{}={}", key, value));
Expand Down
6 changes: 5 additions & 1 deletion burn-dataset/src/source/huggingface/mnist.rs
Expand Up @@ -14,7 +14,7 @@ pub struct MNISTDataset {
}

impl Dataset<MNISTItem> for MNISTDataset {
fn iter<'a>(&'a self) -> crate::DatasetIterator<'a, MNISTItem> {
fn iter(&self) -> crate::DatasetIterator<'_, MNISTItem> {
DatasetIterator::new(self)
}

Expand All @@ -25,6 +25,10 @@ impl Dataset<MNISTItem> for MNISTDataset {
fn len(&self) -> usize {
self.dataset.len()
}

fn is_empty(&self) -> bool {
self.dataset.is_empty()
}
}

impl MNISTDataset {
Expand Down
14 changes: 13 additions & 1 deletion burn-dataset/src/transform/composed.rs
Expand Up @@ -24,7 +24,7 @@ where
}
None
}
fn iter<'a>(&'a self) -> DatasetIterator<'a, I> {
fn iter(&self) -> DatasetIterator<'_, I> {
DatasetIterator::new(self)
}
fn len(&self) -> usize {
Expand All @@ -34,4 +34,16 @@ where
}
total
}

fn is_empty(&self) -> bool {
let mut is_empty = true;

for dataset in self.datasets.iter() {
if !dataset.is_empty() {
is_empty = false;
}
}

is_empty
}
}
13 changes: 7 additions & 6 deletions burn-dataset/src/transform/mapper.rs
Expand Up @@ -23,18 +23,19 @@ where
{
fn get(&self, index: usize) -> Option<O> {
let item = self.dataset.get(index);
match item {
None => None,
Some(item) => Some(self.mapper.map(&item)),
}
item.map(|item| self.mapper.map(&item))
}

fn iter<'a>(&'a self) -> DatasetIterator<'a, O> {
fn iter(&self) -> DatasetIterator<'_, O> {
DatasetIterator::new(self)
}
fn len(&self) -> usize {
self.dataset.len()
}

fn is_empty(&self) -> bool {
self.dataset.is_empty()
}
}

#[cfg(test)]
Expand All @@ -53,7 +54,7 @@ mod tests {
}
}
let items_original = test_data::string_items();
let dataset = InMemDataset::new(items_original.clone());
let dataset = InMemDataset::new(items_original);
let dataset = MapperDataset::new(Box::new(dataset), StringToFirstChar {});

let items: Vec<String> = dataset.iter().collect();
Expand Down
9 changes: 7 additions & 2 deletions burn-dataset/src/transform/partial.rs
Expand Up @@ -51,12 +51,17 @@ where
self.dataset.get(index)
}

fn iter<'a>(&'a self) -> DatasetIterator<'a, I> {
fn iter(&self) -> DatasetIterator<'_, I> {
DatasetIterator::new(self)
}

fn len(&self) -> usize {
usize::min(self.end_index - self.start_index, self.dataset.len())
}

fn is_empty(&self) -> bool {
self.dataset.is_empty()
}
}

#[cfg(test)]
Expand Down Expand Up @@ -103,7 +108,7 @@ mod tests {
let mut items_partial = HashSet::new();

for (i, item) in dataset_original.iter().enumerate() {
if i < 10 || i >= 20 {
if !(10..20).contains(&i) {
items_original_2.insert(item);
} else {
items_original_1.insert(item);
Expand Down
13 changes: 8 additions & 5 deletions burn-dataset/src/transform/random.rs
Expand Up @@ -33,15 +33,18 @@ where
Some(index) => index,
None => return None,
};
match self.dataset.get(*index) {
Some(item) => Some(item.clone()),
None => None,
}
self.dataset.get(*index)
}
fn iter<'a>(&'a self) -> DatasetIterator<'a, I> {

fn iter(&self) -> DatasetIterator<'_, I> {
DatasetIterator::new(self)
}

fn len(&self) -> usize {
self.dataset.len()
}

fn is_empty(&self) -> bool {
self.dataset.is_empty()
}
}
2 changes: 0 additions & 2 deletions burn-derive/src/display.rs
Expand Up @@ -7,7 +7,6 @@ pub fn display_fn() -> proc_macro2::TokenStream {
write!(f, "{}[num_params={}]", self.name(), self.num_params())
}
}
.into()
}

pub fn name_fn(name: &Ident) -> proc_macro2::TokenStream {
Expand All @@ -16,5 +15,4 @@ pub fn name_fn(name: &Ident) -> proc_macro2::TokenStream {
stringify!(#name)
}
}
.into()
}
31 changes: 9 additions & 22 deletions burn-derive/src/field.rs
Expand Up @@ -14,10 +14,10 @@ impl FieldTypeAnalyzer {
self.field.ident.clone().unwrap()
}

pub fn is_of_type(&self, paths: &Vec<&str>) -> bool {
pub fn is_of_type(&self, paths: &[&str]) -> bool {
match &self.field.ty {
syn::Type::Path(path) => {
let name = Self::path_name(&path);
let name = Self::path_name(path);
paths.contains(&name.as_str())
}
_ => false,
Expand All @@ -26,37 +26,24 @@ impl FieldTypeAnalyzer {

#[allow(dead_code)]
pub fn first_generic_field(&self) -> TypePath {
let err = || {
panic!(
"Field {} as no generic",
self.field.ident.clone().unwrap().to_string()
)
};
let err = || panic!("Field {} as no generic", self.field.ident.clone().unwrap());
match &self.field.ty {
syn::Type::Path(path) => Self::path_generic_argument(path),
_ => err(),
}
}
pub fn path_generic_argument(path: &TypePath) -> TypePath {
let segment = path.path.segments.last().unwrap();
let err = || {
panic!(
"Path segment {} has no generic",
segment.ident.clone().to_string(),
)
};
let err = || panic!("Path segment {} has no generic", segment.ident.clone(),);
match &segment.arguments {
syn::PathArguments::None => err(),
syn::PathArguments::AngleBracketed(param) => {
let first_param = param.args.first().unwrap();
match first_param {
syn::GenericArgument::Type(ty) => match ty {
Type::Path(path) => {
return path.clone();
}
_ => err(),
},
_ => err(),

if let syn::GenericArgument::Type(Type::Path(path)) = first_param {
path.clone()
} else {
err()
}
}
syn::PathArguments::Parenthesized(_) => err(),
Expand Down
4 changes: 2 additions & 2 deletions burn-derive/src/lib.rs
Expand Up @@ -54,6 +54,6 @@ fn module_derive_impl(ast: &syn::DeriveInput) -> TokenStream {
#display_fn
}
};
let tokens = gen.into();
tokens

gen.into()
}

0 comments on commit 8c21cf1

Please sign in to comment.