Skip to content

Commit

Permalink
added CompositeModel for blackbox transfer attack (#23)
Browse files Browse the repository at this point in the history
  • Loading branch information
jonasrauber committed Jun 20, 2017
1 parent e0b42dc commit 62aedc2
Show file tree
Hide file tree
Showing 8 changed files with 84 additions and 6 deletions.
1 change: 1 addition & 0 deletions docs/modules/models.rst
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ Wrappers

ModelWrapper
GradientLess
CompositeModel

Detailed description
--------------------
Expand Down
2 changes: 1 addition & 1 deletion foolbox/attacks/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

if sys.version_info >= (3, 4):
ABC = abc.ABC
else:
else: # pragma: no cover
ABC = abc.ABCMeta('ABC', (), {})

from ..adversarial import Adversarial
Expand Down
2 changes: 1 addition & 1 deletion foolbox/criteria.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@

if sys.version_info >= (3, 4):
ABC = abc.ABC
else:
else: # pragma: no cover
ABC = abc.ABCMeta('ABC', (), {})

import numpy as np
Expand Down
2 changes: 1 addition & 1 deletion foolbox/distances.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@

if sys.version_info >= (3, 4):
ABC = abc.ABC
else:
else: # pragma: no cover
ABC = abc.ABCMeta('ABC', (), {})

import functools
Expand Down
1 change: 1 addition & 0 deletions foolbox/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@

from .wrappers import ModelWrapper # noqa: F401
from .wrappers import GradientLess # noqa: F401
from .wrappers import CompositeModel # noqa: F401

from .tensorflow import TensorFlowModel # noqa: F401
from .pytorch import PyTorchModel # noqa: F401
Expand Down
2 changes: 1 addition & 1 deletion foolbox/models/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

if sys.version_info >= (3, 4):
ABC = abc.ABC
else:
else: # pragma: no cover
ABC = abc.ABCMeta('ABC', (), {})


Expand Down
61 changes: 59 additions & 2 deletions foolbox/models/wrappers.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from .base import Model
from .base import DifferentiableModel


class ModelWrapper(Model):
Expand Down Expand Up @@ -31,12 +32,68 @@ def __exit__(self, exc_type, exc_value, traceback):


class GradientLess(ModelWrapper):
"""
Turns a model into a model without gradients.
"""Turns a model into a model without gradients.
"""

def batch_predictions(self, images):
return self.wrapped_model.batch_predictions(images)

def num_classes(self):
return self.wrapped_model.num_classes()


class CompositeModel(DifferentiableModel):
"""Combines predictions of a (black-box) model with the gradient of a
(substitute) model.
Parameters
----------
forward_model : :class:`Model`
The model that should be fooled and will be used for predictions.
backward_model : :class:`Model`
The model that provides the gradients.
"""

def __init__(self, forward_model, backward_model):
bounds = forward_model.bounds()
assert bounds == backward_model.bounds()

channel_axis = forward_model.channel_axis()
assert channel_axis == backward_model.channel_axis()

num_classes = forward_model.num_classes()
assert num_classes == backward_model.num_classes()

super(CompositeModel, self).__init__(
bounds=bounds,
channel_axis=channel_axis)

self.forward_model = forward_model
self.backward_model = backward_model
self._num_classes = num_classes

def num_classes(self):
return self._num_classes

def batch_predictions(self, images):
return self.forward_model.batch_predictions(images)

def predictions_and_gradient(self, image, label):
return self.backward_model.predictions_and_gradient(image, label)

def gradient(self, image, label):
return self.backward_model.gradient(image, label)

def __enter__(self):
assert self.forward_model.__enter__() == self.forward_model
assert self.backward_model.__enter__() == self.backward_model
return self

def __exit__(self, exc_type, exc_value, traceback):
r1 = self.forward_model.__exit__(exc_type, exc_value, traceback)
r2 = self.backward_model.__exit__(exc_type, exc_value, traceback)
if r1 is None and r2 is None:
return None
return (r1, r2) # pragma: no cover
19 changes: 19 additions & 0 deletions foolbox/tests/test_model_wrappers.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import numpy as np

from foolbox.models import ModelWrapper
from foolbox.models import CompositeModel


def test_context_manager(gl_bn_model):
Expand All @@ -16,3 +17,21 @@ def test_wrapping(gl_bn_model, bn_image):
assert np.all(
gl_bn_model.predictions(bn_image) ==
gl_bn_model.batch_predictions(bn_image[np.newaxis])[0])


def test_composite_model(gl_bn_model, bn_model, bn_image, bn_label):
model = CompositeModel(gl_bn_model, bn_model)
with model:
assert gl_bn_model.num_classes() == model.num_classes()
assert np.all(
gl_bn_model.predictions(bn_image) ==
model.predictions(bn_image))
assert np.all(
bn_model.gradient(bn_image, bn_label) ==
model.gradient(bn_image, bn_label))
assert np.all(
bn_model.predictions_and_gradient(bn_image, bn_label)[0] ==
model.predictions_and_gradient(bn_image, bn_label)[0])
assert np.all(
bn_model.predictions_and_gradient(bn_image, bn_label)[1] ==
model.predictions_and_gradient(bn_image, bn_label)[1])

0 comments on commit 62aedc2

Please sign in to comment.