Skip to content

Commit

Permalink
Added verbose and a test case
Browse files Browse the repository at this point in the history
  • Loading branch information
IssamLaradji committed Jun 30, 2014
1 parent 2b8de9c commit 2be2941
Show file tree
Hide file tree
Showing 2 changed files with 47 additions and 20 deletions.
27 changes: 22 additions & 5 deletions sklearn/neural_network/extreme_learning_machines.py
Expand Up @@ -65,9 +65,10 @@ class BaseELM(six.with_metaclass(ABCMeta, BaseEstimator)):
}

@abstractmethod
def __init__(self, n_hidden, activation, random_state):
def __init__(self, n_hidden, activation, verbose, random_state):
self.n_hidden = n_hidden
self.activation = activation
self.verbose = verbose
self.random_state = random_state

def _validate_params(self):
Expand Down Expand Up @@ -143,6 +144,12 @@ def fit(self, X, y):
H = self._get_hidden_activations(X)
self.coef_output_ = safe_sparse_dot(pinv2(H), y)

if self.verbose:
# compute training square error
cost = np.sum(
(y - self.decision_function(X)) ** 2) / (2 * n_samples)
print("Training square error = %f" % (cost))

return self

def decision_function(self, X):
Expand Down Expand Up @@ -188,6 +195,9 @@ class ELMClassifier(BaseELM, ClassifierMixin):
- 'tanh' for the hyperbolic tangent.
verbose : bool, optional, default False
Whether to print training score to stdout.
random_state : int or RandomState, optional, default None
State of or seed for random number generator.
Expand All @@ -202,9 +212,11 @@ class ELMClassifier(BaseELM, ClassifierMixin):
"""

def __init__(self, n_hidden=100, activation='tanh', random_state=None):
def __init__(self, n_hidden=100, activation='tanh', verbose=False,
random_state=None):

super(ELMClassifier, self).__init__(n_hidden, activation, random_state)
super(ELMClassifier, self).__init__(
n_hidden, activation, verbose, random_state)

self._lbin = LabelBinarizer(-1, 1)
self.classes_ = None
Expand Down Expand Up @@ -310,6 +322,9 @@ class ELMRegressor(BaseELM, RegressorMixin):
- 'tanh' for the hyperbolic tangent.
verbose : bool, optional, default False
Whether to print training score to stdout.
random_state : int or RandomState, optional, default None
State of or seed for random number generator.
Expand All @@ -324,9 +339,11 @@ class ELMRegressor(BaseELM, RegressorMixin):
"""

def __init__(self, n_hidden=100, activation='tanh', random_state=None):
def __init__(self, n_hidden=100, activation='tanh', verbose=False,
random_state=None):

super(ELMRegressor, self).__init__(n_hidden, activation, random_state)
super(ELMRegressor, self).__init__(
n_hidden, activation, verbose, random_state)

self.classes_ = None

Expand Down
40 changes: 25 additions & 15 deletions sklearn/neural_network/tests/test_elm.py
Expand Up @@ -91,21 +91,6 @@ def test_regression():
assert_greater(elm.score(X, y), 0.95)


def test_multilabel_classification():
"""
Tests whether multi-label classification works as expected
"""
# test fit method
X, y = make_multilabel_classification(
n_samples=50, random_state=random_state)
for activation in ACTIVATION_TYPES:
elm = ELMClassifier(n_hidden=50,
activation=activation,
random_state=random_state)
elm.fit(X, y)
assert_greater(elm.score(X, y), 0.95)


def test_multioutput_regression():
"""
Tests whether multi-output regression works as expected
Expand Down Expand Up @@ -193,3 +178,28 @@ def test_sparse_matrices():
pred1 = elm.predict(X)
pred2 = elm.predict(X_sparse)
assert_array_equal(pred1, pred2)


def test_verbose():
"""
Tests whether verbose displays the correct results
"""
X = Xboston
y = yboston[:, np.newaxis]

elm = ELMRegressor(
verbose=True)
old_stdout = sys.stdout
sys.stdout = output = StringIO()

elm.fit(X, y)
sys.stdout = old_stdout

# manually compute cost
cost = np.sum(
(y - elm.decision_function(X)) ** 2) / (2 * X.shape[0])

# trim cost to 4 decimal places
cost = str(cost)[:6]

assert cost in output.getvalue()

0 comments on commit 2be2941

Please sign in to comment.