Skip to content

Commit

Permalink
Starting on v0.2 (python3 only)
Browse files Browse the repository at this point in the history
  • Loading branch information
Niru Maheswaranathan committed Sep 13, 2016
1 parent 42648db commit 9f27e4b
Show file tree
Hide file tree
Showing 5 changed files with 45 additions and 25 deletions.
7 changes: 2 additions & 5 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,8 @@ all:
develop:
python setup.py develop

test2:
python2 /usr/local/bin/nosetests --logging-level=INFO

test3:
nosetests -v --with-coverage --cover-package=descent --logging-level=INFO
test:
pytest --cov=descent --cov-report=html tests/

clean:
rm -rf htmlcov/
Expand Down
17 changes: 7 additions & 10 deletions descent/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,21 +3,18 @@
=======
A Python package for performing first-order optimization
For more information, see the accompanying README.md
"""

__all__ = [
'algorithms',
'proxops',
'utils',
'main',
]
__version__ = '0.2.0'

from .algorithms import *
from .proxops import *
from .utils import *
from .main import *

__version__ = '0.1.5'
__all__ = [
*algorithms.__all__,
*proxops.__all__,
*utils.__all__,
*main.__all__,
]
40 changes: 33 additions & 7 deletions descent/algorithms.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,8 @@
from future.utils import with_metaclass
import numpy as np
from collections import deque
from abc import ABCMeta, abstractmethod
from abc import abstractmethod
from custom_inherit import DocInheritMeta

__all__ = ['sgd', 'StochasticGradientDescent',
'nag', 'NesterovAcceleratedGradient',
Expand All @@ -17,24 +18,49 @@
'adam', 'ADAM']


class Algorithm(object, with_metaclass(ABCMeta)):
class Algorithm(object, with_metaclass(DocInheritMeta(style="numpy", abstract_base_class=True))):

def __init__(self, xinit):
self.k = 0.
"""Initializes an optimizer.
Parameters
----------
xinit : array_like
"""
self.k = 0
self.xk = xinit.copy()

def __next__(self):
"""Called to update every iteration"""
self.k += 1.0
self.k += 1

@abstractmethod
def __call__(self, gradient):
"""Applies the gradient.
Parameters
----------
gradient : array_like
Returns
-------
parameters : array_like
"""
raise NotImplementedError


class StochasticGradientDescent(Algorithm):

def __init__(self, xinit, lr=1e-3, momentum=0., decay=0.):
"""Stochastic gradient descent.
Optional Parameters
-------------------
lr : float
The learning rate
momentum : float
decay : float
"""

super().__init__(xinit)
self.vk = np.zeros_like(xinit)
Expand Down Expand Up @@ -87,7 +113,7 @@ def __call__(self, gradient):

class RMSProp(Algorithm):

def __init__(self, xinit, lr=1e-3, damping=0.1, decay=0.9):
def __init__(self, xinit, lr=1e-3, damping=1e-12, decay=0.9):
"""
RMSProp
Expand All @@ -99,7 +125,7 @@ def __init__(self, xinit, lr=1e-3, damping=0.1, decay=0.9):
Learning rate (Default: 1e-3)
damping : float, optional
Damping term (Default: 0)
Damping term (Default: 1e-12)
decay : float, optional
Decay of the learning rate (Default: 0)
Expand Down Expand Up @@ -169,7 +195,7 @@ def __init__(self, xinit, nterms=10, lr=1e-3, epsilon=1e-8):
self.mem = np.ones_like(self.xk)
self.g = np.zeros_like(self.xk)
self.g2 = np.zeros_like(self.xk)
self.epsilon = 1e-8
self.epsilon = epsilon

def __call__(self, gradient):

Expand Down
2 changes: 2 additions & 0 deletions descent/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@
except ImportError:
from time import time as perf_counter

__all__ = ['GradientDescent', 'Consensus']


class Optimizer(object):
def __init__(self, theta_init, display=sys.stdout):
Expand Down
4 changes: 1 addition & 3 deletions tests/test_rosenbrock.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,8 @@
def rosenbrock(theta):
"""Objective and gradient for the rosenbrock function"""

x = theta[0]
y = theta[1]

# Rosenbrock's banana function
x, y = theta
obj = (1 - x)**2 + 100 * (y - x**2)**2

# gradient for the Rosenbrock function
Expand Down

0 comments on commit 9f27e4b

Please sign in to comment.