diff --git a/README.md b/README.md index 6e3e146..434bf9f 100644 --- a/README.md +++ b/README.md @@ -1,9 +1,11 @@ # Unlearnable Examples -Code for ICLR2021 Paper ["Unlearnable Examples: Making Personal Data Unexploitable "](https://openreview.net/forum?id=iAmZUo0DxC0) by Hanxun Huang, Xingjun Ma, Sarah Monazam Erfani, James Bailey, Yisen Wang. ## Quick Start ##### Use the QuickStart.ipynb notebook for a quick start. In the notebook, you can find the minimal implementation for generating sample-wise unlearnable examples on CIFAR-10. +Please remove `mlconfig` from `models/__init__.py` if you only using the notebook and copy paste the model to the notebook. + + ## Experiments in the paper. Check scripts folder for *.sh for each corresponding experiments. @@ -62,13 +64,3 @@ python3 -u main.py --version resnet18 \ --perturb_tensor_filepath path/to/your/experiment/folder/perturbation.pt \ --train ``` - -## Citing this work -``` -@inproceedings{huang2021unlearnable, - title={Unlearnable Examples: Making Personal Data Unexploitable}, - author={Hanxun Huang and Xingjun Ma and Sarah Monazam Erfani and James Bailey and Yisen Wang}, - booktitle={ICLR}, - year={2021} -} -``` diff --git a/models/svhn/dense121.yaml b/models/svhn/dense121.yaml deleted file mode 100644 index 365634c..0000000 --- a/models/svhn/dense121.yaml +++ /dev/null @@ -1,27 +0,0 @@ -num_classes: 10 -epochs: 30 -grad_clip: 5.0 -log_frequency: 100 - -model: - name: DenseNet121 - num_classes: 10 - -criterion: - name: CrossEntropyLoss - -optimizer: - name: SGD - lr: 0.1 - weight_decay: 5.e-4 - momentum: 0.9 - -scheduler: - name: CosineAnnealingLR - T_max: $epochs - eta_min: 0.0 - -dataset: - name: DatasetGenerator - train_batch_size: 96 - eval_batch_size: 128 diff --git a/models/svhn/resnet18.yaml b/models/svhn/resnet18.yaml deleted file mode 100644 index 04b2b2e..0000000 --- a/models/svhn/resnet18.yaml +++ /dev/null @@ -1,27 +0,0 @@ -num_classes: 10 -epochs: 30 -grad_clip: 5.0 -log_frequency: 100 - -model: - name: ResNet18 - num_classes: 10 - -criterion: - name: CrossEntropyLoss - -optimizer: - name: SGD - lr: 0.1 - weight_decay: 5.e-4 - momentum: 0.9 - -scheduler: - name: CosineAnnealingLR - T_max: $epochs - eta_min: 0.0 - -dataset: - name: DatasetGenerator - train_batch_size: 128 - eval_batch_size: 128 diff --git a/models/svhn/resnet18_madrys.yaml b/models/svhn/resnet18_madrys.yaml deleted file mode 100644 index 5530fa4..0000000 --- a/models/svhn/resnet18_madrys.yaml +++ /dev/null @@ -1,30 +0,0 @@ -num_classes: 100 -epochs: 100 -grad_clip: 5.0 -log_frequency: 100 - -model: - name: ResNet18 - num_classes: 10 - -criterion: - name: MadrysLoss - epsilon: 0.03137254901 - perturb_steps: 10 - step_size: 0.00784313725 - -optimizer: - name: SGD - lr: 0.1 - weight_decay: 5.e-4 - momentum: 0.9 - -scheduler: - name: MultiStepLR - milestones: [75, 90, 100] - gamma: 0.1 - -dataset: - name: DatasetGenerator - train_batch_size: 128 - eval_batch_size: 128 diff --git a/models/svhn/resnet50.yaml b/models/svhn/resnet50.yaml deleted file mode 100644 index 30a5fd8..0000000 --- a/models/svhn/resnet50.yaml +++ /dev/null @@ -1,27 +0,0 @@ -num_classes: 10 -epochs: 30 -grad_clip: 5.0 -log_frequency: 100 - -model: - name: ResNet50 - num_classes: 10 - -criterion: - name: CrossEntropyLoss - -optimizer: - name: SGD - lr: 0.1 - weight_decay: 5.e-4 - momentum: 0.9 - -scheduler: - name: CosineAnnealingLR - T_max: $epochs - eta_min: 0.0 - -dataset: - name: DatasetGenerator - train_batch_size: 128 - eval_batch_size: 128 diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..d66c2b2 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,3 @@ +torch +torchvision +mlconfig