Skip to content

Commit

Permalink
Initial commit
Browse files Browse the repository at this point in the history
  • Loading branch information
mouradmourafiq authored and mmourafiq committed May 6, 2017
0 parents commit 0372174
Show file tree
Hide file tree
Showing 50 changed files with 8,752 additions and 0 deletions.
115 changes: 115 additions & 0 deletions .gitignore
@@ -0,0 +1,115 @@
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class

# temp files
*~

# C extensions
*.so

# Distribution / packaging
.Python
docker/environment/
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
*.egg-info/
.installed.cfg
*.egg

# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec

# Installer logs
pip-log.txt
pip-delete-this-directory.txt

# Unit test / coverage reports
htmlcov/
.tox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*,cover
.hypothesis/

# Translations
*.mo

# Mr Developer
.mr.developer.cfg
.pydevproject
.project
.settings/
.idea/
.DS_Store

# fab files
fabsettings.py
fabfile.py
fab_templates/

# graphviz files
*_graphviz.png
*_graphviz.dot

# Sphinx documentation
docs/_build/

# PyBuilder
target/

# IPython Notebook
.ipynb_checkpoints

# pyenv
.python-version

# locals
local.py

# celery beat schedule file
celerybeat-schedule
celeryev.pid
celeryd*pid
celeryd*log

# dotenv
.env

# virtualenv
venv/
ENV/

# data
big_data/
data/

# project
setup.log
static/
reports/
logs/
media/

# npm modules and transpiled typescript files
client/dist/
client/node_modules/
client/css/
client/polyaxon/**/*.js
client/polyaxon/**/*.js.map
20 changes: 20 additions & 0 deletions LICENCE
@@ -0,0 +1,20 @@
The MIT License (MIT)

Copyright (c) 2016 Mourad Mourafiq.

Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:

The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.

THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
3 changes: 3 additions & 0 deletions README.md
@@ -0,0 +1,3 @@
# Polyaxon

Deep Learning library for TensorFlow.
17 changes: 17 additions & 0 deletions polyaxon/__init__.py
@@ -0,0 +1,17 @@
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function


from . import experiments
from . import layers
from . import processing
from .libs import *
from . import activations
from . import initializations
from . import losses
from . import metrics
from . import optimizers
from . import regularizations
from . import variables

134 changes: 134 additions & 0 deletions polyaxon/activations.py
@@ -0,0 +1,134 @@
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function

import tensorflow as tf

from polyaxon.libs import getters
from polyaxon.libs.utils import get_name_scope, get_shape, track
from polyaxon.variables import variable


def built_activation(fct, name, collect):
""" Builds the metric function.
Args:
fct: the activation function to build.
name: operation name.
scope: operation scope.
collect: whether to collect this metric under the metric collection.
"""
def activation(x):
x = fct(x, name=name)
if collect:
track(x, tf.GraphKeys.ACTIVATIONS)
return x
return activation


def linear(name='Linear', collect=False):
"""Computes linear/identity function."""

def _linear(x, name):
with get_name_scope(name=name):
return x

return built_activation(_linear, name, collect)


def tanh(name=None, collect=False):
"""Computes hyperbolic tangent of x element-wise."""
return built_activation(tf.tanh, name, collect)


def sigmoid(name=None, collect=False):
"""Computes sigmoid of `x` element-wise: `y = 1 / (1 + exp(-x))`."""
return built_activation(tf.nn.sigmoid, name, collect)


def softmax(name=None, collect=False):
"""Computes softmax activations.
For each batch `i` and class `j` we have
softmax[i, j] = exp(logits[i, j]) / sum(exp(logits[i]))
"""
return built_activation(tf.nn.softmax, name, collect)


def softplus(name=None, collect=False):
"""Computes softplus. `log(exp(features) + 1)`."""
return built_activation(tf.nn.softplus, name, collect)


def softsign(name=None, collect=False):
"""Computes softsign: `features / (abs(features) + 1)`."""
return built_activation(tf.nn.softsign, name, collect)


def relu(name=None, collect=False):
"""Computes ReLU, rectified linear: `max(features, 0)`."""
return built_activation(tf.nn.relu, name, collect)


def relu6(name=None, collect=False):
"""Computes Rectified Linear 6: `min(max(features, 0), 6)`."""
return built_activation(tf.nn.relu6, name, collect)


def leaky_relu(alpha=0.1, name="LeakyReLU", collect=False):
"""Modified version of ReLU, introducing a nonzero gradient for negative input."""

def _leak_relu(x, name):
with get_name_scope(name):
x = tf.nn.relu(features=x)
m_x = tf.nn.relu(features=-x)
x -= alpha * m_x
return x

return built_activation(_leak_relu, name, collect)


def prelu(channel_shared=False, weights_init='zeros', restore=True, name="PReLU", scope=None,
collect=False):
"""Parametric Rectified Linear Unit."""

def _prelu(x, name):
with get_name_scope(name):
if channel_shared:
w_shape = (1,)
else:
w_shape = get_shape(x)[-1:]

W_init = getters.get_initializer(weights_init)
alphas = variable(shape=w_shape, initializer=W_init, restore=restore, name="alphas")

x = tf.nn.relu(features=x) + tf.multiply(x=alphas, y=(x - tf.abs(x))) * 0.5
x.alphas = alphas
return x

return built_activation(_prelu, name, collect)


def elu(name=None, collect=False):
"""Computes Exponential Linear Unit."""
return built_activation(tf.nn.elu, name, collect)


def crelu(name='CRelu', collect=False):
"""Computes Concatenated ReLU."""
return built_activation(tf.nn.crelu, name, collect)


ACTIVATIONS = {
'linear': linear,
'tanh': tanh,
'sigmoid': sigmoid,
'softmax': softmax,
'softplus': softplus,
'softsign': softsign,
'relu': relu,
'relu6': relu6,
'leaky_relu': leaky_relu,
'elu': elu,
'crelu': crelu
}
25 changes: 25 additions & 0 deletions polyaxon/decorators.py
@@ -0,0 +1,25 @@
import tensorflow as tf


def tf_template(name_):
"""This decorator wraps a method with `tf.make_template`. For example,
Examples:
```python
>>> @tf_template
... my_method():
... # Creates variables
```
"""

def template_decorator(func):
"""Inner decorator function"""

def func_wrapper(*args, **kwargs):
"""Inner wrapper function"""
templated_func = tf.make_template(name_, func)
return templated_func(*args, **kwargs)

return func_wrapper

return template_decorator
Empty file added polyaxon/examples/__init__.py
Empty file.
68 changes: 68 additions & 0 deletions polyaxon/examples/alexnet.py
@@ -0,0 +1,68 @@
import tensorflow as tf
import polyaxon as plx

from polyaxon.examples.mnist_data import load_mnist


def create_experiment_json_fn(output_dir):
X_train, Y_train, X_test, Y_test = load_mnist()

config = {
'name': 'real_mnsit',
'output_dir': output_dir,
'eval_every_n_steps': 5,
'run_config': {'save_checkpoints_steps': 100},
'train_input_data_config': {
'input_type': plx.configs.InputDataConfig.NUMPY,
'pipeline_config': {'name': 'train', 'batch_size': 64, 'num_epochs': 5,
'shuffle': True},
'x': X_train,
'y': Y_train
},
'eval_input_data_config': {
'input_type': plx.configs.InputDataConfig.NUMPY,
'pipeline_config': {'name': 'eval', 'batch_size': 32, 'num_epochs': 1,
'shuffle': False},
'x': X_test,
'y': Y_test
},
'estimator_config': {'output_dir': output_dir},
'model_config': {
'model_type': 'classifier',
'loss_config': {'name': 'sigmoid_cross_entropy'},
'eval_metrics_config': [{'name': 'streaming_accuracy'}],
'optimizer_config': {'name': 'Adam', 'learning_rate': 0.01},
'graph_config': {
'name': 'mnist',
'definition': [
(plx.layers.Conv2d,
{'num_filter': 32, 'filter_size': 3, 'strides': 1, 'activation': 'elu',
'regularizer': 'l2_regularizer'}),
(plx.layers.MaxPool2d, {'kernel_size': 2}),
(plx.layers.LocalResponseNormalization, {}),
(plx.layers.Conv2d, {'num_filter': 64, 'filter_size': 3, 'activation': 'relu',
'regularizer': 'l2_regularizer'}),
(plx.layers.MaxPool2d, {'kernel_size': 2}),
(plx.layers.LocalResponseNormalization, {}),
(plx.layers.FullyConnected, {'n_units': 128, 'activation': 'tanh'}),
(plx.layers.Dropout, {'keep_prob': 0.8}),
(plx.layers.FullyConnected, {'n_units': 256, 'activation': 'tanh'}),
(plx.layers.Dropout, {'keep_prob': 0.8}),
(plx.layers.FullyConnected, {'n_units': 10}),
]
}
}
}
experiement_config = plx.experiments.ExperimentConfig.read_configs(config)
return plx.experiments.create_experiment(experiement_config)


def main(*args):
plx.experiments.run_experiment(experiment_fn=create_experiment_json_fn,
output_dir="/tmp/polyaxon_logs/alexnet",
schedule='continuous_train_and_eval')


if __name__ == "__main__":
tf.logging.set_verbosity(tf.logging.INFO)
tf.app.run()

0 comments on commit 0372174

Please sign in to comment.