Skip to content
This repository has been archived by the owner on Jul 10, 2021. It is now read-only.

Commit

Permalink
Replaced double-underscores to fit with PEP8, and testing the types r…
Browse files Browse the repository at this point in the history
…eturned by the serialisation. If it's numpy arrays' then it's guaranteed cross platform.
  • Loading branch information
alexjc committed Apr 26, 2015
1 parent 9c33047 commit 8c32d48
Show file tree
Hide file tree
Showing 2 changed files with 9 additions and 4 deletions.
8 changes: 4 additions & 4 deletions sknn/mlp.py
Expand Up @@ -313,7 +313,7 @@ def _create_mlp(self):
input_space=self.input_space)

if self.weights is not None:
self.__array_to_mlp(self.weights, self.mlp)
self._array_to_mlp(self.weights, self.mlp)
self.weights = None

inputs = self.mlp.get_input_space().make_theano_batch()
Expand Down Expand Up @@ -388,14 +388,14 @@ def __getstate__(self):
"The neural network has not been initialized."

d = self.__dict__.copy()
d['weights'] = self.__mlp_to_array()
d['weights'] = self._mlp_to_array()

for k in ['ds', 'vs', 'f', 'trainer', 'mlp']:
if k in d:
del d[k]
return d

def __mlp_to_array(self):
def _mlp_to_array(self):
return [(l.get_weights(), l.get_biases()) for l in self.mlp.layers]

def __setstate__(self, d):
Expand All @@ -404,7 +404,7 @@ def __setstate__(self, d):
setattr(self, k, None)
self._create_mlp()

def __array_to_mlp(self, array, nn):
def _array_to_mlp(self, array, nn):
for layer, (weights, biases) in zip(nn.layers, array):
assert layer.get_weights().shape == weights.shape
layer.set_weights(weights)
Expand Down
5 changes: 5 additions & 0 deletions sknn/tests/test_linear.py
Expand Up @@ -76,6 +76,11 @@ def setUp(self):
buf.seek(0)
self.nn = pickle.load(buf)

def test_TypeOfWeightsArray(self):
for w, b in self.nn._mlp_to_array():
assert_equal(type(w), numpy.ndarray)
assert_equal(type(b), numpy.ndarray)

def test_FitAutoInitialize(self):
# Override base class test, you currently can't re-train a network that
# was serialized and deserialized.
Expand Down

0 comments on commit 8c32d48

Please sign in to comment.