From 8678b60ee6c6663a0a9f2e85458ea15875dc5d48 Mon Sep 17 00:00:00 2001 From: paulnovello Date: Thu, 18 Apr 2024 15:56:42 +0200 Subject: [PATCH] chore: flake8 + typos in notebooks + readme & index --- README.md | 2 ++ docs/index.md | 2 ++ docs/notebooks/tensorflow/demo_scale_tf.ipynb | 2 +- docs/notebooks/torch/demo_scale_torch.ipynb | 2 +- oodeel/extractor/torch_feature_extractor.py | 1 - 5 files changed, 6 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 553f081..5bf4347 100644 --- a/README.md +++ b/README.md @@ -177,6 +177,8 @@ Currently, **oodeel** includes the following baselines: | NMD | [Neural Mean Discrepancy for Efficient Out-of-Distribution Detection](https://openaccess.thecvf.com/content/CVPR2022/html/Dong_Neural_Mean_Discrepancy_for_Efficient_Out-of-Distribution_Detection_CVPR_2022_paper.html) | CVPR 2022 | planned | | Gram | [Detecting Out-of-Distribution Examples with Gram Matrices](https://proceedings.mlr.press/v119/sastry20a.html) | ICML 2020 | avail [tensorflow](docs/notebooks/tensorflow/demo_gram_tf.ipynb) or [torch](docs/notebooks/torch/demo_gram_torch.ipynb) | | GEN | [GEN: Pushing the Limits of Softmax-Based Out-of-Distribution Detection](https://openaccess.thecvf.com/content/CVPR2023/html/Liu_GEN_Pushing_the_Limits_of_Softmax-Based_Out-of-Distribution_Detection_CVPR_2023_paper.html) | CVPR 2023 | avail [tensorflow](docs/notebooks/tensorflow/demo_gen_tf.ipynb) or [torch](docs/notebooks/torch/demo_gen_torch.ipynb) | +| ASH | [Extremely Simple Activation Shaping for Out-of-Distribution Detection](http://arxiv.org/abs/2310.00227) | ICLR 2023 | avail [tensorflow](docs/notebooks/tensorflow/demo_ash_tf.ipynb) or [torch](docs/notebooks/torch/demo_ash_torch.ipynb) | +| SCALE | [Scaling for Training Time and Post-hoc Out-of-distribution Detection Enhancement](https://arxiv.org/abs/2111.12797) | ICLR 2024 | avail [tensorflow](docs/notebooks/tensorflow/demo_scale_tf.ipynb) or [torch](docs/notebooks/torch/demo_scale_torch.ipynb) | diff --git a/docs/index.md b/docs/index.md index 0fe92c4..bd5b83a 100644 --- a/docs/index.md +++ b/docs/index.md @@ -176,6 +176,8 @@ Currently, **oodeel** includes the following baselines: | NMD | [Neural Mean Discrepancy for Efficient Out-of-Distribution Detection](https://openaccess.thecvf.com/content/CVPR2022/html/Dong_Neural_Mean_Discrepancy_for_Efficient_Out-of-Distribution_Detection_CVPR_2022_paper.html) | CVPR 2022 | planned | | Gram | [Detecting Out-of-Distribution Examples with Gram Matrices](https://proceedings.mlr.press/v119/sastry20a.html) | ICML 2020 | avail [tensorflow](./notebooks/tensorflow/demo_gram_tf.ipynb) or [torch](./notebooks/torch/demo_gram_torch.ipynb) | | GEN | [GEN: Pushing the Limits of Softmax-Based Out-of-Distribution Detection](https://openaccess.thecvf.com/content/CVPR2023/html/Liu_GEN_Pushing_the_Limits_of_Softmax-Based_Out-of-Distribution_Detection_CVPR_2023_paper.html) | CVPR 2023 | avail [tensorflow](./notebooks/tensorflow/demo_gen_tf.ipynb) or [torch](./notebooks/torch/demo_gen_torch.ipynb) | +| ASH | [Extremely Simple Activation Shaping for Out-of-Distribution Detection](http://arxiv.org/abs/2310.00227) | ICLR 2023 | avail [tensorflow](docs/notebooks/tensorflow/demo_ash_tf.ipynb) or [torch](docs/notebooks/torch/demo_ash_torch.ipynb) | +| SCALE | [Scaling for Training Time and Post-hoc Out-of-distribution Detection Enhancement](https://arxiv.org/abs/2111.12797) | ICLR 2024 | avail [tensorflow](docs/notebooks/tensorflow/demo_scale_tf.ipynb) or [torch](docs/notebooks/torch/demo_scale_torch.ipynb) | **Oodeel** also includes standard training functions with data augmentation and learning rate scheduler for toy convnet models or models from `keras.applications` in [tf_training_tools.py](https://github.com/deel-ai/oodeel/tree/master/oodeel/utils/tf_training_tools.py) and `torchvision.models` in [torch_training_tools.py](https://github.com/deel-ai/oodeel/tree/master/oodeel/utils/torch_training_tools.py) files. These functions come in handy for benchmarks like *leave-k-classes-out* that requires retraining models on a subset of dataset classes. diff --git a/docs/notebooks/tensorflow/demo_scale_tf.ipynb b/docs/notebooks/tensorflow/demo_scale_tf.ipynb index e0fc581..1899c74 100644 --- a/docs/notebooks/tensorflow/demo_scale_tf.ipynb +++ b/docs/notebooks/tensorflow/demo_scale_tf.ipynb @@ -18,7 +18,7 @@ "\n", "**Reference** \n", "_Scaling for Training Time and Post-hoc Out-of-distribution Detection Enhancement_, ICLR 2024\n", - "" + "" ] }, { diff --git a/docs/notebooks/torch/demo_scale_torch.ipynb b/docs/notebooks/torch/demo_scale_torch.ipynb index b31fb77..55f89d3 100644 --- a/docs/notebooks/torch/demo_scale_torch.ipynb +++ b/docs/notebooks/torch/demo_scale_torch.ipynb @@ -18,7 +18,7 @@ "\n", "**Reference** \n", "_Scaling for Training Time and Post-hoc Out-of-distribution Detection Enhancement_, ICLR 2024\n", - "\n" + "\n" ] }, { diff --git a/oodeel/extractor/torch_feature_extractor.py b/oodeel/extractor/torch_feature_extractor.py index 147c59f..746024a 100644 --- a/oodeel/extractor/torch_feature_extractor.py +++ b/oodeel/extractor/torch_feature_extractor.py @@ -24,7 +24,6 @@ from typing import get_args from typing import Optional -import numpy as np import torch from torch import nn from torch.utils.data import DataLoader