Skip to content
This repository has been archived by the owner on Jul 10, 2021. It is now read-only.

Commit

Permalink
Merge 8eb245c into d221c57
Browse files Browse the repository at this point in the history
  • Loading branch information
leconteur committed May 19, 2015
2 parents d221c57 + 8eb245c commit 3b5d4dd
Show file tree
Hide file tree
Showing 16 changed files with 956 additions and 3 deletions.
1 change: 1 addition & 0 deletions sknn/.idea/.name

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 4 additions & 0 deletions sknn/.idea/encodings.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

37 changes: 37 additions & 0 deletions sknn/.idea/misc.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

8 changes: 8 additions & 0 deletions sknn/.idea/modules.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

5 changes: 5 additions & 0 deletions sknn/.idea/scopes/scope_settings.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

8 changes: 8 additions & 0 deletions sknn/.idea/sknn.iml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 6 additions & 0 deletions sknn/.idea/vcs.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

465 changes: 465 additions & 0 deletions sknn/.idea/workspace.xml

Large diffs are not rendered by default.

86 changes: 86 additions & 0 deletions sknn/.ropeproject/config.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
# The default ``config.py``


def set_prefs(prefs):
"""This function is called before opening the project"""

# Specify which files and folders to ignore in the project.
# Changes to ignored resources are not added to the history and
# VCSs. Also they are not returned in `Project.get_files()`.
# Note that ``?`` and ``*`` match all characters but slashes.
# '*.pyc': matches 'test.pyc' and 'pkg/test.pyc'
# 'mod*.pyc': matches 'test/mod1.pyc' but not 'mod/1.pyc'
# '.svn': matches 'pkg/.svn' and all of its children
# 'build/*.o': matches 'build/lib.o' but not 'build/sub/lib.o'
# 'build//*.o': matches 'build/lib.o' and 'build/sub/lib.o'
prefs['ignored_resources'] = ['*.pyc', '*~', '.ropeproject',
'.hg', '.svn', '_svn', '.git',
'__pycache__']

# Specifies which files should be considered python files. It is
# useful when you have scripts inside your project. Only files
# ending with ``.py`` are considered to be python files by
# default.
#prefs['python_files'] = ['*.py']

# Custom source folders: By default rope searches the project
# for finding source folders (folders that should be searched
# for finding modules). You can add paths to that list. Note
# that rope guesses project source folders correctly most of the
# time; use this if you have any problems.
# The folders should be relative to project root and use '/' for
# separating folders regardless of the platform rope is running on.
# 'src/my_source_folder' for instance.
#prefs.add('source_folders', 'src')

# You can extend python path for looking up modules
#prefs.add('python_path', '~/python/')

# Should rope save object information or not.
prefs['save_objectdb'] = True
prefs['compress_objectdb'] = False

# If `True`, rope analyzes each module when it is being saved.
prefs['automatic_soa'] = True
# The depth of calls to follow in static object analysis
prefs['soa_followed_calls'] = 0

# If `False` when running modules or unit tests "dynamic object
# analysis" is turned off. This makes them much faster.
prefs['perform_doa'] = True

# Rope can check the validity of its object DB when running.
prefs['validate_objectdb'] = True

# How many undos to hold?
prefs['max_history_items'] = 32

# Shows whether to save history across sessions.
prefs['save_history'] = True
prefs['compress_history'] = False

# Set the number spaces used for indenting. According to
# :PEP:`8`, it is best to use 4 spaces. Since most of rope's
# unit-tests use 4 spaces it is more reliable, too.
prefs['indent_size'] = 4

# Builtin and c-extension modules that are allowed to be imported
# and inspected by rope.
prefs['extension_modules'] = []

# Add all standard c-extensions to extension_modules list.
prefs['import_dynload_stdmods'] = True

# If `True` modules with syntax errors are considered to be empty.
# The default value is `False`; When `False` syntax errors raise
# `rope.base.exceptions.ModuleSyntaxError` exception.
prefs['ignore_syntax_errors'] = False

# If `True`, rope ignores unresolvable imports. Otherwise, they
# appear in the importing namespace.
prefs['ignore_bad_imports'] = False


def project_opened(project):
"""This function is called after opening the project"""
# Do whatever you like here!
Binary file added sknn/.ropeproject/globalnames
Binary file not shown.
Binary file added sknn/.ropeproject/history
Binary file not shown.
Binary file added sknn/.ropeproject/objectdb
Binary file not shown.
15 changes: 12 additions & 3 deletions sknn/mlp.py
Original file line number Diff line number Diff line change
Expand Up @@ -186,7 +186,11 @@ def _create_layer(self, name, layer, irange):
return mlp.Softmax(
layer_name=layer.name,
n_classes=layer.units,
irange=irange)
irange=irange,
init_bias_target_marginals=layer.init_bias_target_marginals)

if layer.type == "Pretrained":
return mlp.PretrainedLayer(layer_name=layer.name,layer_content=layer.content)

def _create_mlp(self):
mlp.logger.setLevel(logging.WARNING)
Expand All @@ -205,6 +209,10 @@ def _create_mlp(self):
lim *= numpy.sqrt(2)
elif layer.type == 'Sigmoid':
lim *= 4
if layer.init_bias_target_marginals is not None:
X, y = layer.init_bias_target_marginals
layer.init_bias_target_marginals = self._create_matrix_input(X=X, y=y)[0]
layer.init_bias_target_marginals.y_labels = y

mlp_layer = self._create_layer(layer.name, layer, irange=lim)
mlp_layers.append(mlp_layer)
Expand All @@ -221,7 +229,7 @@ def _create_mlp(self):

for l, p, count in zip(self.layers, self.mlp.layers, self.unit_counts[1:]):
space = p.get_output_space()
if isinstance(l, Convolution):
if isinstance(l, Convolution):
log.debug(" - Convl: {}{: <10}{} Output: {}{: <10}{} Channels: {}{}{}".format(
ansi.BOLD, l.type, ansi.ENDC,
ansi.BOLD, repr(space.shape), ansi.ENDC,
Expand Down Expand Up @@ -254,7 +262,7 @@ def _create_specs(self, X, y=None):

# Then compute the number of units in each layer for initialization.
self.unit_counts = [numpy.product(X.shape[1:]) if self.is_convolution else X.shape[1]]
res = X.shape[1:3] if self.is_convolution else None
res = X.shape[1:3] if self.is_convolution else None
for l in self.layers:
if isinstance(l, Convolution):
if l.border_mode == 'valid':
Expand Down Expand Up @@ -296,6 +304,7 @@ def _initialize(self, X, y):
self.trainer = self._create_mlp_trainer(self.vs)
self.trainer.setup(self.mlp, self.ds)


@property
def is_initialized(self):
"""Check if the neural network was setup already.
Expand Down
4 changes: 4 additions & 0 deletions sknn/nn.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,7 @@ def __init__(
units=None,
pieces=None,
weight_decay=None,
init_bias_target_marginals=None,
dropout=None):

assert warning is None,\
Expand All @@ -100,6 +101,9 @@ def __init__(
self.pieces = pieces
self.weight_decay = weight_decay
self.dropout = dropout
if init_bias_target_marginals is not None:
assert type == 'Softmax', 'The init_bias_target_marginals is only defined for softmax layer.'
self.init_bias_target_marginals = init_bias_target_marginals

def set_params(self, **params):
"""Setter for internal variables that's compatible with ``scikit-learn``.
Expand Down
Loading

0 comments on commit 3b5d4dd

Please sign in to comment.