Skip to content
This repository has been archived by the owner on Jul 10, 2021. It is now read-only.

Commit

Permalink
Using vector for mask weights if they are set, otherwise a scalar tha…
Browse files Browse the repository at this point in the history
…t becomes 1.0. Simplifies the code.
  • Loading branch information
alexjc committed Nov 22, 2015
1 parent bb2b9ae commit bcf9b22
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 8 deletions.
10 changes: 5 additions & 5 deletions sknn/backend/lasagne/mlp.py
Expand Up @@ -138,10 +138,10 @@ def _create_layer(self, name, layer, network):
num_units=layer.units,
nonlinearity=self._get_activation(layer))

def _create_mlp(self, X):
def _create_mlp(self, X, w=None):
self.data_input = T.tensor4('X') if self.is_convolution else T.matrix('X')
self.data_output = T.matrix('y')
self.data_mask = T.vector('m')
self.data_mask = T.vector('m') if w is not None else T.scalar('m')
self.data_correct = T.matrix('yp')

lasagne.random.get_rng().seed(self.random_state)
Expand Down Expand Up @@ -187,12 +187,12 @@ def _create_mlp(self, X):
self.network_output = lasagne.layers.get_output(network, deterministic=True)
self.f = theano.function([self.data_input], self.network_output, allow_input_downcast=True)

def _initialize_impl(self, X, y=None):
def _initialize_impl(self, X, y=None, w=None):
if self.is_convolution:
X = numpy.transpose(X, (0, 3, 1, 2))

if self.mlp is None:
self._create_mlp(X)
self._create_mlp(X, w)

# Can do partial initialization when predicting, no trainer needed.
if y is None:
Expand Down Expand Up @@ -254,7 +254,7 @@ def _batch_impl(self, X, y, w, processor, mode, output, shuffle):
self._do_callback('on_batch_start', locals())

if mode == 'train':
loss += processor(Xb, yb, wb)
loss += processor(Xb, yb, wb if wb is not None else 1.0)
else:
loss += processor(Xb, yb)
count += 1
Expand Down
6 changes: 3 additions & 3 deletions sknn/mlp.py
Expand Up @@ -31,14 +31,14 @@ class MultiLayerPerceptron(NeuralNetwork, sklearn.base.BaseEstimator):
def _setup(self):
pass

def _initialize(self, X, y=None):
def _initialize(self, X, y=None, w=None):
assert not self.is_initialized,\
"This neural network has already been initialized."
self._create_specs(X, y)

backend.setup()
self._backend = backend.MultiLayerPerceptronBackend(self)
return self._backend._initialize_impl(X, y)
return self._backend._initialize_impl(X, y, w)

def _check_layer(self, layer, required, optional=[]):
required.extend(['name', 'type'])
Expand Down Expand Up @@ -203,7 +203,7 @@ def _fit(self, X, y, w=None):
X, y = self._reshape(X, y)

if not self.is_initialized:
X, y = self._initialize(X, y)
X, y = self._initialize(X, y, w)

log.info("Training on dataset of {:,} samples with {:,} total size.".format(data_shape[0], data_size))
if data_shape[1:] != X.shape[1:]:
Expand Down

0 comments on commit bcf9b22

Please sign in to comment.