Skip to content

Commit

Permalink
Learning Python3 linting, doctests, napoleon, sphinx, and google docs…
Browse files Browse the repository at this point in the history
…tring format
  • Loading branch information
dsblank committed Aug 8, 2017
1 parent c788f5f commit b255fa0
Show file tree
Hide file tree
Showing 2 changed files with 149 additions and 30 deletions.
46 changes: 33 additions & 13 deletions conx/layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,17 +58,16 @@

#------------------------------------------------------------------------

class BaseLayer():
class _BaseLayer():
"""
The base class for all conx layers.
See :any:`Layer` for more details.
"""
ACTIVATION_FUNCTIONS = ('relu', 'sigmoid', 'linear', 'softmax', 'tanh')
CLASS = None

def __init__(self, name, *args, **params):
"""
All conx layers require the name as first argument.
"""
if not (isinstance(name, str) and len(name) > 0):
raise Exception('bad layer name: %s' % (name,))
self.name = name
Expand Down Expand Up @@ -287,16 +286,37 @@ def tooltip(self):
retval += "\n %s = %s" % (key, self.params[key])
return retval

class Layer(BaseLayer):
class Layer(_BaseLayer):
"""
For Dense and Input type layers.
The default layer type. Will create either an InputLayer, or DenseLayer,
depending on its context after :any:`Network.connect`.
Arguments:
name: The name of the layer. Must be unique in this network.
Examples:
>>> layer = Layer("input", 10)
>>> layer.name
'input'
>>> from conx import Network
>>> net = Network("XOR2")
>>> net.add(Layer("input", 2))
>>> net.add(Layer("hidden", 5))
>>> net.add(Layer("output", 2))
>>> net.connect()
>>> net["input"].kind()
'input'
>>> net["output"].kind()
'output'
Note:
See also: :any:`Network`, :any:`Network.add`, and :any:`Network.connect`
for more information. See https://keras.io/ for more information on
Keras layers.
"""
CLASS = keras.layers.Dense
def __init__(self, name, shape, **params):
"""
This class represents either an InputLayer or a DenseLayer
depending on the kind of layer (input vs. hidden/output).
"""
def __init__(self, name: str, shape, **params):
super().__init__(name, **params)
if not valid_shape(shape):
raise Exception('bad shape: %s' % (shape,))
Expand Down Expand Up @@ -365,7 +385,7 @@ def process_class_docstring(docstring):
return docstring

## Dynamically load all of the keras layers, making a conx layer:
## Al of these will have BaseLayer as their superclass:
## Al of these will have _BaseLayer as their superclass:
keras_module = sys.modules["keras.layers"]
for (name, obj) in inspect.getmembers(keras_module):
if type(obj) == type and issubclass(obj, (keras.engine.Layer, )):
Expand All @@ -378,7 +398,7 @@ def process_class_docstring(docstring):
docstring = pypandoc.convert(process_class_docstring(docstring_md), "rst", "markdown_github")
except:
pass
locals()[new_name] = type(new_name, (BaseLayer,),
locals()[new_name] = type(new_name, (_BaseLayer,),
{"CLASS": obj,
"__doc__": docstring})

Expand Down
133 changes: 116 additions & 17 deletions conx/network.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,8 @@
from .utils import *
from .layers import Layer

from typing import Any

try:
from IPython import get_ipython
except:
Expand All @@ -46,15 +48,73 @@
class Network():
"""
The main class for the conx neural network package.
Arguments:
name: Required. The name of the network.
sizes: Optional numbers. Defines the sizes of layers of a sequential
network. These will be created, added, and connected automatically.
config: Configuration overrides for the network.
Note:
To have a complete, operating network, you must do the following items:
1. create a network
2. add layers
3. connect the layers
4. compile the network
See also :any:`Layer`, :any:`Network.add`, :any:`Network.connect`,
and :any:`Network.compile`.
Examples:
>>> net = Network("XOR1", 2, 5, 2)
>>> len(net.layers)
3
>>> net = Network("XOR2")
>>> net.add(Layer("input", 2))
>>> net.add(Layer("hidden", 5))
>>> net.add(Layer("output", 2))
>>> net.connect()
>>> len(net.layers)
3
>>> net = Network("XOR3")
>>> net.add(Layer("input", 2))
>>> net.add(Layer("hidden", 5))
>>> net.add(Layer("output", 2))
>>> net.connect("input", "hidden")
>>> net.connect("hidden", "output")
>>> len(net.layers)
3
>>> net = Network("NMIST")
>>> net.name
'NMIST'
>>> len(net.layers)
0
>>> net = Network("NMIST", 10, 5, 1)
>>> len(net.layers)
3
>>> net = Network("NMIST", 10, 5, 5, 1, activation="sigmoid")
>>> net.config["activation"]
'sigmoid'
>>> net["output"].activation == "sigmoid"
True
>>> net["hidden1"].activation == "sigmoid"
True
>>> net["hidden2"].activation == "sigmoid"
True
>>> net["input"].activation is None
True
>>> net.layers[0].name == "input"
True
"""
OPTIMIZERS = ("sgd", "rmsprop", "adagrad", "adadelta", "adam",
"adamax", "nadam", "tfoptimizer")
def __init__(self, name, *sizes, **config):
"""
Create a neural network.
if sizes is given, create a full network.
Optional keywork: activation
"""
def __init__(self, name: str, *sizes: int, **config: Any):
if not isinstance(name, str):
raise Exception("first argument should be a name for the network")
self.config = {
Expand Down Expand Up @@ -141,21 +201,58 @@ def _repr_svg_(self):
def __repr__(self):
return "<Network name='%s'>" % self.name

def add(self, layer):
def add(self, layer: Layer):
"""
Add a layer to the network layer connections. Order is not
important, unless using the default net.connect() form.
important, unless calling :any:`Network.connect` without any
arguments.
Arguments:
layer: A layer instance.
Examples:
>>> net = Network("XOR2")
>>> net.add(Layer("input", 2))
>>> len(net.layers)
1
>>> net = Network("XOR3")
>>> net.add(Layer("input", 2))
>>> net.add(Layer("hidden", 5))
>>> net.add(Layer("output", 2))
>>> len(net.layers)
3
Note:
See :any:`Network` for more information.
"""
if layer.name in self.layer_dict:
raise Exception("duplicate layer name '%s'" % layer.name)
self.layers.append(layer)
self.layer_dict[layer.name] = layer

def connect(self, from_layer_name=None, to_layer_name=None):
def connect(self, from_layer_name:str=None, to_layer_name:str=None):
"""
Connect two layers together if called with arguments. If
called with no arguments, then it will make a sequential
run through the layers in order added.
Arguments:
from_layer_name: Name of layer where connect begins.
to_layer_name: Name of layer where connection ends.
If both from_layer_name and to_layer_name are None, then
all of the layers are connected sequentially in the order
added.
Examples:
>>> net = Network("XOR2")
>>> net.add(Layer("input", 2))
>>> net.add(Layer("hidden", 5))
>>> net.add(Layer("output", 2))
>>> net.connect()
>>> [layer.name for layer in net["input"].outgoing_connections]
['hidden']
"""
if from_layer_name is None and to_layer_name is None:
for i in range(len(self.layers) - 1):
Expand Down Expand Up @@ -669,7 +766,7 @@ def train(self, epochs=1, accuracy=None, batch_size=None,
validation_inputs = self.test_inputs
validation_targets = self.test_targets
if verbose: print("Training...")
with InterruptHandler() as handler:
with _InterruptHandler() as handler:
if accuracy is None: # train them all using fit
result = self.model.fit(self.train_inputs, self.train_targets,
batch_size=batch_size,
Expand Down Expand Up @@ -955,6 +1052,8 @@ def propagate_to_image(self, layer_name, input, batch_size=32):
def compile(self, **kwargs):
"""
Check and compile the network.
See https://keras.io/ `Model.compile()` method for more details.
"""
## Error checking:
if len(self.layers) == 0:
Expand Down Expand Up @@ -1622,7 +1721,7 @@ def pf(self, vector, **opts):
"""
Pretty-format a vector. Returns string.
Args:
Parameters:
vector (list): The first parameter.
pp_max_length (int): Number of decimal places to show for each
value in vector.
Expand All @@ -1648,7 +1747,7 @@ def pf(self, vector, **opts):
truncated = len(vector) > max_length
return "[" + ", ".join([("%." + str(precision) + "f") % v for v in vector[:max_length]]) + ("..." if truncated else "") + "]"

def to_array(self):
def to_array(self) -> list:
"""
Get the weights of a network as a flat, one-dimensional list.
Expand All @@ -1661,20 +1760,20 @@ def to_array(self):
103
Returns:
list: All of weights in a single, flat list.
All of weights and biases of the network in a single, flat list.
"""
array = []
for layer in self.model.layers:
for weight in layer.get_weights():
array.extend(weight.flatten())
return array

def from_array(self, array):
def from_array(self, array: list):
"""
Load the weights from a list.
Args:
array (list) - a sequence (e.g., list, np.array) of numbers
Arguments:
array: a sequence (e.g., list, np.array) of numbers
Example:
>>> from conx import Network
Expand All @@ -1697,7 +1796,7 @@ def from_array(self, array):
position += size
layer.set_weights(new_weights)

class InterruptHandler():
class _InterruptHandler():
"""
Class for handling interrupts so that state is not left
in inconsistant situation.
Expand Down

0 comments on commit b255fa0

Please sign in to comment.