Skip to content
This repository has been archived by the owner on Apr 19, 2023. It is now read-only.

Commit

Permalink
Merge branch 'master' into super-dev
Browse files Browse the repository at this point in the history
  • Loading branch information
constantinpape committed Sep 28, 2018
2 parents e0720b8 + cd86689 commit 22737a6
Show file tree
Hide file tree
Showing 3 changed files with 12 additions and 112 deletions.
13 changes: 4 additions & 9 deletions README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -8,18 +8,13 @@ Inferno
.. image:: https://img.shields.io/pypi/v/inferno.svg
:target: https://pypi.python.org/pypi/inferno-pytorch

.. image:: https://img.shields.io/travis/infern-pytorch/inferno.svg
.. image:: https://travis-ci.org/inferno-pytorch/inferno.svg?branch=master
:target: https://travis-ci.org/inferno-pytorch/inferno

.. image:: https://readthedocs.org/projects/inferno-pytorch/badge/?version=latest
:target: http://inferno-pytorch.readthedocs.io/en/latest/?badge=latest
:alt: Documentation Status

.. image:: https://pyup.io/repos/github/inferno-pytorch/inferno/shield.svg
:target: https://pyup.io/repos/github/inferno-pytorch/inferno/
:alt: Updates



.. image:: http://svgshare.com/i/2j7.svg

Expand All @@ -34,7 +29,7 @@ It's a work-in-progress, but the first stable release (0.2) is underway!


* Free software: Apache Software License 2.0
* Documentation: https://pytorch-inferno.readthedocs.io (Work in progress).
* Documentation: http://inferno-pytorch.readthedocs.io (Work in Progress).


Features
Expand Down Expand Up @@ -131,11 +126,11 @@ and navigate to `localhost:6007` with your browser.
Installation
------------------------

Conda packages for linux and mac (only python 3) are available via
Conda packages for linux (only python 3) are available via

.. code:: bash
$ conda install -c inferno-pytorch inferno
$ conda install -c pytorch -c conda-forge inferno
Expand Down
102 changes: 2 additions & 100 deletions inferno/io/transform/image.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,105 +83,6 @@ def image_function(self, image):
ShapeError)
return rescaled_image

class RandomScaleSegmentation(Transform):

""" Random Scale input and label image
Parameters
----------
scale_range : tuple of floats defining (min, max) scales
maximum angle of rotation
resize : if True, image is cropped or padded to the original size
"""

def __init__(self, scale_range, resize=True, pad_const=0, **super_kwargs):

super(RandomScaleSegmentation, self).__init__(**super_kwargs)

self.scale_range = scale_range

self.resize = resize

self.pad_const = pad_const



def build_random_variables(self):

np.random.seed()

self.set_random_variable('seg_scale',

np.random.uniform(low=self.scale_range[0],

high=self.scale_range[1]))



def batch_function(self, image):

scale = self.get_random_variable('seg_scale')

image_shape = np.array(image[0].shape[1:])



with catch_warnings():

simplefilter('ignore')

img = np.stack([zoom(x, scale, order=3) for x in image[0]])

seg = np.stack([zoom(x, scale, order=0) for x in image[1]])

new_shape = np.array(img.shape[1:])



if self.resize:

if scale > 1.:

# pad image to original size

crop_l = (new_shape - image_shape) // 2

crop_r = new_shape - image_shape - crop_l

cropping = [slice(None)] + [slice(c[0] if c[0] > 0 else None,

-c[1] if c[1] > 0 else None) for c in zip(crop_l, crop_r)]

img = img[cropping]

seg = seg[cropping]

else:

# crop image to original size

pad_l = (image_shape - new_shape) // 2

pad_r = image_shape - new_shape - pad_l

padding = [(0,0)] + list(zip(pad_l, pad_r))

img = np.pad(img, padding, 'mirror', constant_values=self.pad_const)



seg = np.pad(seg, padding, 'mirror', constant_values=self.pad_const)



return img, seg

class RandomCrop(Transform):
"""Crop input to a given size.
Expand Down Expand Up @@ -649,6 +550,7 @@ class RandomScaleSegmentation(Transform):
scale_range : tuple of floats defining (min, max) scales
maximum angle of rotation
resize : if True, image is cropped or padded to the original size
pad_const: value used for constant padding
"""
def __init__(self, scale_range, resize=True, pad_const=0, **super_kwargs):
super(RandomScaleSegmentation, self).__init__(**super_kwargs)
Expand Down Expand Up @@ -687,7 +589,7 @@ def batch_function(self, image):
pad_r = image_shape - new_shape - pad_l
padding = [(0,0)] + list(zip(pad_l, pad_r))
img = np.pad(img, padding, 'constant', constant_values=self.pad_const)

seg = np.pad(seg, padding, 'constant', constant_values=self.pad_const)

return img, seg
9 changes: 6 additions & 3 deletions inferno/trainers/basic.py
Original file line number Diff line number Diff line change
Expand Up @@ -533,7 +533,7 @@ def build_metric(self, method, **kwargs):
elif isinstance(method, str):
assert hasattr(metrics, method), \
"Could not find the metric '{}'.".format(method)
self._metric = getattr(metrics, method)()
self._metric = getattr(metrics, method)(**kwargs)
else:
raise NotImplementedError
return self
Expand Down Expand Up @@ -1657,7 +1657,7 @@ def save_model(self, to_directory=None):
pickle_module=self.pickle_module)
return self

def load(self, from_directory=None, best=False, filename=None):
def load(self, from_directory=None, best=False, filename=None, map_location=None):
"""
Load the trainer from checkpoint.
Expand All @@ -1671,6 +1671,8 @@ def load(self, from_directory=None, best=False, filename=None):
'best_checkpoint.pytorch'.
filename : str
Overrides the default filename.
device : function, torch.device, string or a dict
Specify how to remap storage locations.
Returns
-------
Expand All @@ -1684,7 +1686,8 @@ def load(self, from_directory=None, best=False, filename=None):
filename = self._best_checkpoint_filename if best else self._checkpoint_filename
# Load the dictionary
config_dict = torch.load(os.path.join(from_directory, filename),
pickle_module=self.pickle_module)
pickle_module=self.pickle_module, map_location=map_location)

# This is required to prevent an infinite save loop?
self._is_iteration_with_best_validation_score = False
# Set config
Expand Down

0 comments on commit 22737a6

Please sign in to comment.