Skip to content

Commit

Permalink
updated losses for linear svms
Browse files Browse the repository at this point in the history
  • Loading branch information
dmeoli committed May 22, 2020
1 parent 6265d90 commit 3f18bc7
Show file tree
Hide file tree
Showing 13 changed files with 331 additions and 239 deletions.
34 changes: 17 additions & 17 deletions notebooks/ml/MachineLearningReport.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@
"source": [
"import numpy as np\n",
"\n",
"from yase.ml.svm import SVC\n",
"from optiml.ml.svm import SVC\n",
"\n",
"from sklearn.datasets import fetch_openml\n",
"from sklearn.preprocessing import OneHotEncoder\n",
Expand Down Expand Up @@ -241,8 +241,8 @@
"source": [
"import numpy as np\n",
"\n",
"from yase.ml.neural_network import NeuralNetworkClassifier\n",
"from yase.ml.neural_network.layers import FullyConnected\n",
"from optiml.ml.neural_network import NeuralNetworkClassifier\n",
"from optiml.ml.neural_network.layers import FullyConnected\n",
"\n",
"from sklearn.datasets import fetch_openml\n",
"from sklearn.preprocessing import OneHotEncoder\n",
Expand Down Expand Up @@ -309,10 +309,10 @@
}
],
"source": [
"from yase.ml.neural_network.activations import sigmoid\n",
"from yase.ml.neural_network.losses import mean_squared_error\n",
"from optiml.ml.neural_network.activations import sigmoid\n",
"from optiml.ml.neural_network.losses import mean_squared_error\n",
"\n",
"from yase.optimization.unconstrained.line_search import SteepestGradientDescent\n",
"from optiml.optimization.unconstrained.line_search import SteepestGradientDescent\n",
"\n",
"X, y = fetch_openml('monks-problems-1', return_X_y=True)\n",
"y = y.astype(np.float32)\n",
Expand Down Expand Up @@ -395,10 +395,10 @@
}
],
"source": [
"from yase.ml.neural_network.activations import sigmoid, softmax\n",
"from yase.ml.neural_network.losses import sparse_categorical_cross_entropy\n",
"from optiml.ml.neural_network.activations import sigmoid, softmax\n",
"from optiml.ml.neural_network.losses import sparse_categorical_cross_entropy\n",
"\n",
"from yase.optimization.unconstrained.line_search import SteepestGradientDescent\n",
"from optiml.optimization.unconstrained.line_search import SteepestGradientDescent\n",
"\n",
"X, y = fetch_openml('monks-problems-2', return_X_y=True)\n",
"y = y.astype(np.float32)\n",
Expand Down Expand Up @@ -474,13 +474,13 @@
}
],
"source": [
"from yase.ml.neural_network.activations import sigmoid, softmax\n",
"from yase.ml.neural_network.losses import categorical_cross_entropy\n",
"from yase.ml.neural_network.regularizers import L2\n",
"from optiml.ml.neural_network.activations import sigmoid, softmax\n",
"from optiml.ml.neural_network.losses import categorical_cross_entropy\n",
"from optiml.ml.neural_network.regularizers import L2\n",
"\n",
"from yase.optimization.unconstrained.line_search import BFGS\n",
"from optiml.optimization.unconstrained.line_search import BFGS\n",
"\n",
"from yase.optimization.unconstrained.stochastic import Adam\n",
"from optiml.optimization.unconstrained.stochastic import Adam\n",
"\n",
"X, y = fetch_openml('monks-problems-3', return_X_y=True)\n",
"y = y.astype(np.float32)\n",
Expand Down Expand Up @@ -567,8 +567,8 @@
}
],
"source": [
"from yase.ml.utils import plot_validation_curve, plot_learning_curve\n",
"from yase.ml.svm import SVR\n",
"from optiml.ml.utils import plot_validation_curve, plot_learning_curve\n",
"from optiml.ml.svm import SVR\n",
"\n",
"from sklearn.metrics import make_scorer\n",
"from sklearn.model_selection import GridSearchCV\n",
Expand Down Expand Up @@ -689,4 +689,4 @@
},
"nbformat": 4,
"nbformat_minor": 4
}
}
30 changes: 15 additions & 15 deletions notebooks/ml/SupportVectorMachines.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -248,8 +248,8 @@
}
],
"source": [
"from yase.ml.svm import SVC\n",
"from yase.ml.utils import generate_linearly_separable_data, plot_svm_hyperplane\n",
"from optiml.ml.svm import SVC\n",
"from optiml.ml.utils import generate_linearly_separable_data, plot_svm_hyperplane\n",
"\n",
"from sklearn.svm import SVC as SKLSVC\n",
"from sklearn.model_selection import train_test_split\n",
Expand Down Expand Up @@ -403,8 +403,8 @@
}
],
"source": [
"from yase.ml.svm import SVC\n",
"from yase.ml.utils import generate_linearly_separable_overlap_data, plot_svm_hyperplane\n",
"from optiml.ml.svm import SVC\n",
"from optiml.ml.utils import generate_linearly_separable_overlap_data, plot_svm_hyperplane\n",
"\n",
"from sklearn.svm import SVC as SKLSVC\n",
"from sklearn.model_selection import train_test_split\n",
Expand Down Expand Up @@ -509,8 +509,8 @@
}
],
"source": [
"from yase.ml.svm import SVC\n",
"from yase.ml.utils import generate_non_linearly_separable_data, plot_svm_hyperplane\n",
"from optiml.ml.svm import SVC\n",
"from optiml.ml.utils import generate_non_linearly_separable_data, plot_svm_hyperplane\n",
"\n",
"from sklearn.svm import SVC as SKLSVC\n",
"from sklearn.model_selection import train_test_split\n",
Expand Down Expand Up @@ -593,8 +593,8 @@
}
],
"source": [
"from yase.ml.svm import SVC\n",
"from yase.ml.utils import generate_non_linearly_separable_data, plot_svm_hyperplane\n",
"from optiml.ml.svm import SVC\n",
"from optiml.ml.utils import generate_non_linearly_separable_data, plot_svm_hyperplane\n",
"\n",
"from sklearn.svm import SVC as SKLSVC\n",
"from sklearn.model_selection import train_test_split\n",
Expand Down Expand Up @@ -677,8 +677,8 @@
}
],
"source": [
"from yase.ml.svm import SVC\n",
"from yase.ml.utils import generate_non_linearly_separable_data, plot_svm_hyperplane\n",
"from optiml.ml.svm import SVC\n",
"from optiml.ml.utils import generate_non_linearly_separable_data, plot_svm_hyperplane\n",
"\n",
"from sklearn.svm import SVC as SKLSVC\n",
"from sklearn.model_selection import train_test_split\n",
Expand Down Expand Up @@ -767,8 +767,8 @@
}
],
"source": [
"from yase.ml.svm import SVC\n",
"from yase.ml.utils import generate_non_linearly_separable_data, plot_svm_hyperplane\n",
"from optiml.ml.svm import SVC\n",
"from optiml.ml.utils import generate_non_linearly_separable_data, plot_svm_hyperplane\n",
"\n",
"from sklearn.svm import SVC as SKLSVC\n",
"from sklearn.model_selection import train_test_split\n",
Expand Down Expand Up @@ -958,8 +958,8 @@
}
],
"source": [
"from yase.ml.svm import SVR\n",
"from yase.ml.utils import generate_non_linearly_regression_data, plot_svm_hyperplane\n",
"from optiml.ml.svm import SVR\n",
"from optiml.ml.utils import generate_non_linearly_regression_data, plot_svm_hyperplane\n",
"\n",
"from sklearn.svm import SVR as SKLSVR\n",
"from sklearn.model_selection import train_test_split\n",
Expand Down Expand Up @@ -1002,4 +1002,4 @@
},
"nbformat": 4,
"nbformat_minor": 4
}
}
8 changes: 4 additions & 4 deletions notebooks/optimization/LineSearchMethods.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@
"metadata": {},
"outputs": [],
"source": [
"from yase.optimization.unconstrained import Quadratic, Rosenbrock, SixHumpCamel\n",
"from optiml.optimization.unconstrained import Quadratic, Rosenbrock, SixHumpCamel\n",
"\n",
"quad = Quadratic(Q=[[5, -3], \n",
" [-3, 5]], q=[10, 5])\n",
Expand All @@ -52,8 +52,8 @@
"metadata": {},
"outputs": [],
"source": [
"from yase.optimization.unconstrained.line_search import *\n",
"from yase.optimization.utils import plot_trajectory_optimization"
"from optiml.optimization.unconstrained.line_search import *\n",
"from optiml.optimization.utils import plot_trajectory_optimization"
]
},
{
Expand Down Expand Up @@ -1407,4 +1407,4 @@
},
"nbformat": 4,
"nbformat_minor": 4
}
}
18 changes: 9 additions & 9 deletions notebooks/optimization/StochasticMethods.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -43,15 +43,15 @@
"source": [
"import numpy as np\n",
"\n",
"from yase.ml.utils import generate_centred_and_normalized_regression_data\n",
"from yase.ml.neural_network import NeuralNetworkRegressor\n",
"from yase.ml.neural_network.layers import FullyConnected\n",
"from yase.ml.neural_network.activations import linear\n",
"from yase.ml.neural_network.losses import mean_squared_error\n",
"from yase.ml.neural_network.regularizers import L2\n",
"from optiml.ml.utils import generate_centred_and_normalized_regression_data\n",
"from optiml.ml.neural_network import NeuralNetworkRegressor\n",
"from optiml.ml.neural_network.layers import FullyConnected\n",
"from optiml.ml.neural_network.activations import linear\n",
"from optiml.ml.neural_network.losses import mean_squared_error\n",
"from optiml.ml.neural_network.regularizers import L2\n",
"\n",
"from yase.optimization.utils import plot_trajectory_optimization\n",
"from yase.optimization.unconstrained.stochastic import *\n",
"from optiml.optimization.utils import plot_trajectory_optimization\n",
"from optiml.optimization.unconstrained.stochastic import *\n",
"\n",
"X, y = generate_centred_and_normalized_regression_data(random_state=1)"
]
Expand Down Expand Up @@ -812,4 +812,4 @@
},
"nbformat": 4,
"nbformat_minor": 4
}
}
2 changes: 1 addition & 1 deletion optiml/ml/neural_network/_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@ def _store_train_val_info(self, opt, X_batch, y_batch, X_val, y_val):
print('\tloss: {:1.4e}'.format(self._avg_epoch_loss), end='')
self._avg_epoch_loss = 0.
if self.validation_split:
val_loss = self.loss(opt.x, X_val, y_val)
val_loss = self.loss.function(opt.x, X_val, y_val)
self.val_loss_history.append(val_loss)
if self.verbose and not opt.epoch % self.verbose:
print(' - val_loss: {:1.4e}'.format(val_loss), end='')
Expand Down
4 changes: 2 additions & 2 deletions optiml/ml/svm/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
__all__ = ['SVM', 'PrimalSVC', 'DualSVC', 'PrimalSVR', 'DualSVR']
__all__ = ['SVM', 'LinearSVC', 'SVC', 'LinearSVR', 'SVR']

from ._base import SVM, PrimalSVC, DualSVC, PrimalSVR, DualSVR
from ._base import SVM, LinearSVC, SVC, LinearSVR, SVR
Loading

0 comments on commit 3f18bc7

Please sign in to comment.