Skip to content

Commit

Permalink
⚠️ Deprecate PolynomialModel and Optimizer
Browse files Browse the repository at this point in the history
  • Loading branch information
yoshoku committed May 16, 2020
1 parent 2e36efb commit be0c0e1
Show file tree
Hide file tree
Showing 9 changed files with 34 additions and 0 deletions.
3 changes: 3 additions & 0 deletions lib/rumale/optimizer/ada_grad.rb
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ module Rumale
module Optimizer
# AdaGrad is a class that implements AdaGrad optimizer.
#
# @deprecated AdaGrad will be deleted in version 0.20.0.
#
# *Reference*
# - Duchi, J., Hazan, E., and Singer, Y., "Adaptive Subgradient Methods for Online Learning and Stochastic Optimization," J. Machine Learning Research, vol. 12, pp. 2121--2159, 2011.
class AdaGrad
Expand All @@ -17,6 +19,7 @@ class AdaGrad
#
# @param learning_rate [Float] The initial value of learning rate.
def initialize(learning_rate: 0.01)
warn 'warning: AdaGrad is deprecated. This class will be deleted in version 0.20.0.'
check_params_numeric(learning_rate: learning_rate)
check_params_positive(learning_rate: learning_rate)
@params = {}
Expand Down
3 changes: 3 additions & 0 deletions lib/rumale/optimizer/adam.rb
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ module Rumale
module Optimizer
# Adam is a class that implements Adam optimizer.
#
# @deprecated Adam will be deleted in version 0.20.0.
#
# *Reference*
# - Kingma, D P., and Ba, J., "Adam: A Method for Stochastic Optimization," Proc. ICLR'15, 2015.
class Adam
Expand All @@ -19,6 +21,7 @@ class Adam
# @param decay1 [Float] The smoothing parameter for the first moment.
# @param decay2 [Float] The smoothing parameter for the second moment.
def initialize(learning_rate: 0.001, decay1: 0.9, decay2: 0.999)
warn 'warning: Adam is deprecated. This class will be deleted in version 0.20.0.'
check_params_numeric(learning_rate: learning_rate, decay1: decay1, decay2: decay2)
check_params_positive(learning_rate: learning_rate, decay1: decay1, decay2: decay2)
@params = {}
Expand Down
5 changes: 5 additions & 0 deletions lib/rumale/optimizer/nadam.rb
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,13 @@

module Rumale
# This module consists of the classes that implement optimizers adaptively tuning hyperparameters.
#
# @deprecated Optimizer module will be deleted in version 0.20.0.
module Optimizer
# Nadam is a class that implements Nadam optimizer.
#
# @deprecated Nadam will be deleted in version 0.20.0.
#
# *Reference*
# - Dozat, T., "Incorporating Nesterov Momentum into Adam," Tech. Repo. Stanford University, 2015.
class Nadam
Expand All @@ -20,6 +24,7 @@ class Nadam
# @param decay1 [Float] The smoothing parameter for the first moment.
# @param decay2 [Float] The smoothing parameter for the second moment.
def initialize(learning_rate: 0.01, decay1: 0.9, decay2: 0.999)
warn 'warning: Nadam is deprecated. This class will be deleted in version 0.20.0.'
check_params_numeric(learning_rate: learning_rate, decay1: decay1, decay2: decay2)
check_params_positive(learning_rate: learning_rate, decay1: decay1, decay2: decay2)
@params = {}
Expand Down
3 changes: 3 additions & 0 deletions lib/rumale/optimizer/rmsprop.rb
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ module Rumale
module Optimizer
# RMSProp is a class that implements RMSProp optimizer.
#
# @deprecated RMSProp will be deleted in version 0.20.0.
#
# *Reference*
# - Sutskever, I., Martens, J., Dahl, G., and Hinton, G., "On the importance of initialization and momentum in deep learning," Proc. ICML' 13, pp. 1139--1147, 2013.
# - Hinton, G., Srivastava, N., and Swersky, K., "Lecture 6e rmsprop," Neural Networks for Machine Learning, 2012.
Expand All @@ -20,6 +22,7 @@ class RMSProp
# @param momentum [Float] The initial value of momentum.
# @param decay [Float] The smooting parameter.
def initialize(learning_rate: 0.01, momentum: 0.9, decay: 0.9)
warn 'warning: RMSProp is deprecated. This class will be deleted in version 0.20.0.'
check_params_numeric(learning_rate: learning_rate, momentum: momentum, decay: decay)
check_params_positive(learning_rate: learning_rate, momentum: momentum, decay: decay)
@params = {}
Expand Down
3 changes: 3 additions & 0 deletions lib/rumale/optimizer/sgd.rb
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@
module Rumale
module Optimizer
# SGD is a class that implements SGD optimizer.
#
# @deprecated SGD will be deleted in version 0.20.0.
class SGD
include Base::BaseEstimator
include Validation
Expand All @@ -16,6 +18,7 @@ class SGD
# @param momentum [Float] The initial value of momentum.
# @param decay [Float] The smooting parameter.
def initialize(learning_rate: 0.01, momentum: 0.0, decay: 0.0)
warn 'warning: SGD is deprecated. This class will be deleted in version 0.20.0.'
check_params_numeric(learning_rate: learning_rate, momentum: momentum, decay: decay)
check_params_positive(learning_rate: learning_rate, momentum: momentum, decay: decay)
@params = {}
Expand Down
3 changes: 3 additions & 0 deletions lib/rumale/optimizer/yellow_fin.rb
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ module Rumale
module Optimizer
# YellowFin is a class that implements YellowFin optimizer.
#
# @deprecated YellowFin will be deleted in version 0.20.0.
#
# *Reference*
# - Zhang, J., and Mitliagkas, I., "YellowFin and the Art of Momentum Tuning," CoRR abs/1706.03471, 2017.
class YellowFin
Expand All @@ -20,6 +22,7 @@ class YellowFin
# @param decay [Float] The smooting parameter.
# @param window_width [Integer] The sliding window width for searching curvature range.
def initialize(learning_rate: 0.01, momentum: 0.9, decay: 0.999, window_width: 20)
warn 'warning: YellowFin is deprecated. This class will be deleted in version 0.20.0.'
check_params_numeric(learning_rate: learning_rate, momentum: momentum, decay: decay, window_width: window_width)
check_params_positive(learning_rate: learning_rate, momentum: momentum, decay: decay, window_width: window_width)
@params = {}
Expand Down
4 changes: 4 additions & 0 deletions lib/rumale/polynomial_model/base_factorization_machine.rb
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,13 @@

module Rumale
# This module consists of the classes that implement polynomial models.
#
# @deprecated PolynomialModel module will be deleted in version 0.20.0.
module PolynomialModel
# BaseFactorizationMachine is an abstract class for implementation of Factorization Machine-based estimators.
# This class is used internally.
#
# @deprecated BaseFactorizationMachine will be deleted in version 0.20.0.
class BaseFactorizationMachine
include Base::BaseEstimator

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,10 @@ module PolynomialModel
# with stochastic gradient descent (SGD) optimization.
# For multiclass classification problem, it uses one-vs-the-rest strategy.
#
# @deprecated
# FactorizationMachineClassifier will be deleted in version 0.20.0.
# The Ruamle author recommends using the xlearn gem instead.
#
# @example
# estimator =
# Rumale::PolynomialModel::FactorizationMachineClassifier.new(
Expand Down Expand Up @@ -65,6 +69,7 @@ class FactorizationMachineClassifier < BaseFactorizationMachine
def initialize(n_factors: 2, loss: 'hinge', reg_param_linear: 1.0, reg_param_factor: 1.0,
max_iter: 200, batch_size: 50, tol: 1e-4,
optimizer: nil, n_jobs: nil, verbose: false, random_seed: nil)
warn 'warning: FactorizationMachineClassifier is deprecated. This class will be deleted in version 0.20.0.'
check_params_numeric(reg_param_linear: reg_param_linear, reg_param_factor: reg_param_factor,
n_factors: n_factors, max_iter: max_iter, batch_size: batch_size, tol: tol)
check_params_string(loss: loss)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,10 @@ module PolynomialModel
# FactorizationMachineRegressor is a class that implements Factorization Machine
# with stochastic gradient descent (SGD) optimization.
#
# @deprecated
# FactorizationMachineRegressor will be deleted in version 0.20.0.
# The Ruamle author recommends using the xlearn gem instead.
#
# @example
# estimator =
# Rumale::PolynomialModel::FactorizationMachineRegressor.new(
Expand Down Expand Up @@ -58,6 +62,7 @@ class FactorizationMachineRegressor < BaseFactorizationMachine
def initialize(n_factors: 2, reg_param_linear: 1.0, reg_param_factor: 1.0,
max_iter: 200, batch_size: 50, tol: 1e-4,
optimizer: nil, n_jobs: nil, verbose: false, random_seed: nil)
warn 'warning: FactorizationMachineClassifier is deprecated. This class will be deleted in version 0.20.0.'
check_params_numeric(reg_param_linear: reg_param_linear, reg_param_factor: reg_param_factor,
n_factors: n_factors, max_iter: max_iter, batch_size: batch_size, tol: tol)
check_params_boolean(verbose: verbose)
Expand Down

0 comments on commit be0c0e1

Please sign in to comment.