Skip to content
This repository has been archived by the owner on Jul 10, 2021. It is now read-only.

Commit

Permalink
Merge e5a87f9 into 3a06089
Browse files Browse the repository at this point in the history
  • Loading branch information
mangwang committed Nov 20, 2015
2 parents 3a06089 + e5a87f9 commit b56bd4a
Show file tree
Hide file tree
Showing 2 changed files with 9 additions and 6 deletions.
2 changes: 2 additions & 0 deletions sknn/backend/lasagne/mlp.py
Original file line number Diff line number Diff line change
Expand Up @@ -276,6 +276,8 @@ def _array_to_mlp(self, array, nn):
for layer, data in zip(nn, array):
if data is None: continue
weights, biases = data
weights = weights.astype(theano.config.floatX)
biases = biases.astype(theano.config.floatX)

while not hasattr(layer, 'W') and not hasattr(layer, 'b'):
layer = layer.input_layer
Expand Down
13 changes: 7 additions & 6 deletions sknn/tests/test_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import logging

import numpy
import theano
from sknn.mlp import Regressor as MLPR
from sknn.mlp import Layer as L, Convolution as C

Expand Down Expand Up @@ -60,8 +61,8 @@ def test_SetLayerParamsList(self):
nn.set_parameters([(weights, biases)])

p = nn.get_parameters()
assert_true((p[0].weights == weights).all())
assert_true((p[0].biases == biases).all())
assert_true((p[0].weights == weights.astype(theano.config.floatX)).all())
assert_true((p[0].biases == biases.astype(theano.config.floatX)).all())

def test_LayerParamsSkipOneWithNone(self):
nn = MLPR(layers=[L("Sigmoid", units=32), L("Linear", name='abcd')])
Expand All @@ -73,8 +74,8 @@ def test_LayerParamsSkipOneWithNone(self):
nn.set_parameters([None, (weights, biases)])

p = nn.get_parameters()
assert_true((p[1].weights == weights).all())
assert_true((p[1].biases == biases).all())
assert_true((p[1].weights == weights.astype(theano.config.floatX)).all())
assert_true((p[1].biases == biases.astype(theano.config.floatX)).all())

def test_SetLayerParamsDict(self):
nn = MLPR(layers=[L("Sigmoid", units=32), L("Linear", name='abcd')])
Expand All @@ -86,5 +87,5 @@ def test_SetLayerParamsDict(self):
nn.set_parameters({'abcd': (weights, biases)})

p = nn.get_parameters()
assert_true((p[1].weights == weights).all())
assert_true((p[1].biases == biases).all())
assert_true((p[1].weights == weights.astype(theano.config.floatX)).all())
assert_true((p[1].biases == biases.astype(theano.config.floatX)).all())

0 comments on commit b56bd4a

Please sign in to comment.