Skip to content
This repository has been archived by the owner on Jul 10, 2021. It is now read-only.

Commit

Permalink
Merge 2aa9c9d into 1d31b5e
Browse files Browse the repository at this point in the history
  • Loading branch information
alexjc committed Apr 2, 2016
2 parents 1d31b5e + 2aa9c9d commit c04f482
Show file tree
Hide file tree
Showing 4 changed files with 86 additions and 8 deletions.
21 changes: 16 additions & 5 deletions sknn/backend/lasagne/mlp.py
Expand Up @@ -25,7 +25,7 @@
import lasagne.nonlinearities as nl

from ..base import BaseBackend
from ...nn import Layer, Convolution, ansi
from ...nn import Layer, Convolution, Native, ansi


def explin(x):
Expand Down Expand Up @@ -142,10 +142,17 @@ def _create_convolution_layer(self, name, layer, network):
pool_size=layer.pool_shape,
stride=layer.pool_shape)

network.name = layer.name
return network

def _create_native_layer(self, name, layer, network):
if layer.units and 'num_units' not in layer.keywords:
layer.keywords['num_units'] = layer.units
return layer.type(network, *layer.args, **layer.keywords)

def _create_layer(self, name, layer, network):
if isinstance(layer, Native):
return self._create_native_layer(name, layer, network)

dropout = layer.dropout or self.dropout_rate
if dropout is not None:
network = lasagne.layers.dropout(network, dropout)
Expand All @@ -161,8 +168,6 @@ def _create_layer(self, name, layer, network):
normalize = layer.normalize or self.normalize
if normalize == 'batch':
network = lasagne.layers.batch_norm(network)

network.name = layer.name
return network

def _create_mlp(self, X, w=None):
Expand All @@ -180,6 +185,7 @@ def _create_mlp(self, X, w=None):
self.mlp = []
for i, layer in enumerate(self.layers):
network = self._create_layer(layer.name, layer, network)
network.name = layer.name
self.mlp.append(network)

log.info(
Expand All @@ -197,11 +203,16 @@ def _create_mlp(self, X, w=None):
# NOTE: Numbers don't match up exactly for pooling; one off. The logic is convoluted!
# assert count == numpy.product(space.shape) * space.num_channels,\
# "Mismatch in the calculated number of convolution layer outputs."
elif isinstance(l, Native):
log.debug(" - Nativ: {}{: <10}{} Output: {}{: <10}{} Channels: {}{}{}".format(
ansi.BOLD, l.type.__name__, ansi.ENDC,
ansi.BOLD, repr(space[2:]), ansi.ENDC,
ansi.BOLD, space[1], ansi.ENDC))
else:
log.debug(" - Dense: {}{: <10}{} Units: {}{: <4}{}".format(
ansi.BOLD, l.type, ansi.ENDC, ansi.BOLD, l.units, ansi.ENDC))
assert count == space[1],\
"Mismatch in the calculated number of dense layer outputs."
"Mismatch in the calculated number of dense layer outputs. {} != {}".format(count, space[1])

if self.weights is not None:
l = min(len(self.weights), len(self.mlp))
Expand Down
2 changes: 1 addition & 1 deletion sknn/mlp.py
Expand Up @@ -21,7 +21,7 @@
import sklearn.preprocessing
import sklearn.cross_validation

from .nn import NeuralNetwork, Layer, Convolution, ansi
from .nn import NeuralNetwork, Layer, Convolution, Native, ansi
from . import backend


Expand Down
39 changes: 37 additions & 2 deletions sknn/nn.py
Expand Up @@ -27,6 +27,7 @@ class ansi:
ENDC = '\033[0m'



class Layer(object):
"""
Specification for a layer to be passed to the neural network during construction. This
Expand All @@ -38,7 +39,7 @@ class Layer(object):
type: str
Select which activation function this layer should use, as a string. Specifically,
options are ``Rectifier``, ``Sigmoid``, ``Tanh``, and ``ExpLin`` for non-linear layers
and ``Linear``, ``Softmax`` or ``Gaussian`` for linear layers.
and ``Linear`` or ``Softmax`` for output layers.
name: str, optional
You optionally can specify a name for this layer, and its parameters
Expand Down Expand Up @@ -118,6 +119,40 @@ def __repr__(self):
return "<sknn.nn.%s `%s`: %s>" % (self.__class__.__name__, self.type, params)


class Native(object):
"""Special type of layer that is handled directly to the backend (e.g. Lasagne). This
can be used to construct more advanced networks that are not yet supported by the
default interface.
Note that using this as a layer type means your code may not be compatible with future
revisions or other backends, and that serialization may be affected.
Parameters
----------
constructor: class or callable
The layer type usable directly by the backend (e.g. Lasagne). This can also
be a callable function that acts as a layer constructor.
*args: list of arguments
All positional arguments are passed directly to the constructor when the
neural network is initialized.
**kwargs: dictionary of named arguments
All named arguments are passed to the constructor directly also, with the exception
of the parameters ``name``, ``units``, ``frozen``, ``weight_decay``, ``normalize``
which take on the same role as in :class:`sknn.nn.Layer`.
"""

def __init__(self, constructor, *args, **keywords):
for attr in ['name', 'units', 'frozen', 'weight_decay', 'normalize']:
setattr(self, attr, keywords.pop(attr, None))

self.type = constructor
self.args = args
self.keywords = keywords


class Convolution(Layer):
"""
Specification for a convolution layer to be passed to the neural network in construction.
Expand Down Expand Up @@ -426,7 +461,7 @@ def __init__(

self.layers = []
for i, layer in enumerate(layers):
assert isinstance(layer, Layer),\
assert isinstance(layer, Layer) or isinstance(layer, Native),\
"Specify each layer as an instance of a `sknn.mlp.Layer` object."

# Layer names are optional, if not specified then generate one.
Expand Down
32 changes: 32 additions & 0 deletions sknn/tests/test_native.py
@@ -0,0 +1,32 @@
import unittest
from nose.tools import (assert_is_not_none, assert_true, assert_raises, assert_equal)

import io
import pickle
import numpy

from sknn.mlp import Regressor as MLPR
from sknn.mlp import Native as N, Layer as L

import lasagne.layers as ly
import lasagne.nonlinearities as nl


class TestNativeLasagneLayer(unittest.TestCase):

def _run(self, nn):
a_in, a_out = numpy.ones((8,16)), numpy.ones((8,4))
nn.fit(a_in, a_out)

def test_DenseLinear(self):
nn = MLPR(layers=[N(ly.DenseLayer, num_units=4, nonlinearity=nl.linear)], n_iter=1)
self._run(nn)

def test_GaussianNoise(self):
nn = MLPR(layers=[L("Rectifier", units=12), N(ly.GaussianNoiseLayer), L("Linear")], n_iter=1)
self._run(nn)

def test_LeakyRectifier(self):
nn = MLPR(layers=[N(ly.DenseLayer, units=24, nonlinearity=nl.leaky_rectify),
L("Linear")], n_iter=1)
self._run(nn)

0 comments on commit c04f482

Please sign in to comment.