Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
Browse files

Move package namespace to lmj.nn !

  • Loading branch information...
commit 659a929135a926f1c9dd3baf3f4fc5f3e1b3970e 1 parent 971bc09
@lmjohns3 authored
View
8 examples/mnist-autoencoder.py
@@ -7,9 +7,9 @@
import tempfile
import urllib
-import lmj.tnn
+import lmj.nn
-lmj.tnn.enable_default_logging()
+lmj.nn.enable_default_logging()
URL = 'http://www.iro.umontreal.ca/~lisa/deep/data/mnist/mnist.pkl.gz'
DATASET = os.path.join(tempfile.gettempdir(), 'mnist.pkl.gz')
@@ -19,9 +19,9 @@
urllib.urlretrieve(URL, DATASET)
logging.info('saved mnist digits to %s' % DATASET)
-class Main(lmj.tnn.Main):
+class Main(lmj.nn.Main):
def get_network(self):
- return lmj.tnn.Autoencoder
+ return lmj.nn.Autoencoder
def get_datasets(self):
return [x for x, _ in cPickle.load(gzip.open(DATASET))]
View
8 examples/mnist-classifier.py
@@ -7,9 +7,9 @@
import tempfile
import urllib
-import lmj.tnn
+import lmj.nn
-lmj.tnn.enable_default_logging()
+lmj.nn.enable_default_logging()
URL = 'http://www.iro.umontreal.ca/~lisa/deep/data/mnist/mnist.pkl.gz'
DATASET = os.path.join(tempfile.gettempdir(), 'mnist.pkl.gz')
@@ -19,9 +19,9 @@
urllib.urlretrieve(URL, DATASET)
logging.info('saved mnist digits to %s' % DATASET)
-class Main(lmj.tnn.Main):
+class Main(lmj.nn.Main):
def get_network(self):
- return lmj.tnn.Classifier
+ return lmj.nn.Classifier
def get_datasets(self):
return [(x, y.astype('int32')) for x, y in cPickle.load(gzip.open(DATASET))]
View
21 examples/recurrent-phase.py
@@ -0,0 +1,21 @@
+#!/usr/bin/env python
+
+import logging
+import numpy as np
+
+import lmj.nn
+
+lmj.nn.enable_default_logging()
+
+class Main(lmj.nn.Main):
+ def get_network(self):
+ return lmj.nn.recurrent.Autoencoder
+
+ def get_datasets(self):
+ t = np.linspace(0, 4 * np.pi, 512)
+ train = np.asarray([np.sin(t + i) for i in range(512)])
+ dev = np.asarray([np.cos(t + i) for i in range(64)])
+ return train, dev
+
+m = Main(layers=(1, 3, 1))
+m.train()
View
0  lmj/tnn/__init__.py → lmj/nn/__init__.py
File renamed without changes
View
0  lmj/tnn/dataset.py → lmj/nn/dataset.py
File renamed without changes
View
0  lmj/tnn/feedforward.py → lmj/nn/feedforward.py
File renamed without changes
View
0  lmj/tnn/flags.py → lmj/nn/flags.py
File renamed without changes
View
0  lmj/tnn/log.py → lmj/nn/log.py
File renamed without changes
View
0  lmj/tnn/recurrent.py → lmj/nn/recurrent.py
File renamed without changes
View
14 lmj/tnn/trainer.py → lmj/nn/trainer.py
@@ -21,7 +21,7 @@
'''This file contains optimization methods for neural networks.'''
import itertools
-import lmj.tnn
+import lmj.nn
import numpy as np
import numpy.random as rng
import theano
@@ -157,7 +157,7 @@ def __init__(self, network, **kwargs):
network.inputs,
network.y,
[network.J(**kwargs)] + network.monitors,
- network.hiddens[0] if isinstance(network, lmj.tnn.recurrent.Network) else None)
+ network.hiddens[0] if isinstance(network, lmj.nn.recurrent.Network) else None)
logging.info('%d parameter updates during training', len(self.params))
# fix mapping from kwargs into a dict to send to the hf optimizer
@@ -237,11 +237,11 @@ def train(self, train_set, valid_set=None):
first = lambda x: x[0] if isinstance(x, (tuple, list)) else x
bs = len(first(train_set.minibatches[0]))
p = lambda z: np.vstack(first(x) for x in z.minibatches)
- _train = lmj.tnn.Dataset(
+ _train = lmj.nn.Dataset(
'train-0', p(train_set), size=bs, batches=train_set.limit)
_valid = None
if valid_set is not None:
- _valid = lmj.tnn.Dataset(
+ _valid = lmj.nn.Dataset(
'valid-0', p(valid_set), size=bs, batches=valid_set.limit)
while i < len(self.network.biases) - 1:
@@ -253,7 +253,7 @@ def train(self, train_set, valid_set=None):
logging.info('layerwise training: layer %d with %d hidden units', i + 1, k)
# train a phantom autoencoder object on our dataset
- ae = lmj.tnn.Autoencoder([n, k, n], TT.nnet.sigmoid)
+ ae = lmj.nn.Autoencoder([n, k, n], TT.nnet.sigmoid)
t = SGD(ae, **self.kwargs)
t.train(_train, _valid)
@@ -264,10 +264,10 @@ def train(self, train_set, valid_set=None):
# map data through the network for the next layer
i += 1
p = lambda z: np.vstack(ae.forward(x[0])[0] for x in z.minibatches)
- _train = lmj.tnn.Dataset(
+ _train = lmj.nn.Dataset(
'train-%d' % i, p(_train), size=bs, batches=_train.limit)
if _valid is not None:
- _valid = lmj.tnn.Dataset(
+ _valid = lmj.nn.Dataset(
'valid-%d' % i, p(_valid), size=bs, batches=_valid.limit)
View
4 setup.py
@@ -2,8 +2,8 @@
import setuptools
setuptools.setup(
- name='lmj.tnn',
- version='0.3.1',
+ name='lmj.nn',
+ version='0.4.0',
namespace_packages=['lmj'],
packages=setuptools.find_packages(),
author='Leif Johnson',
Please sign in to comment.
Something went wrong with that request. Please try again.