-
Notifications
You must be signed in to change notification settings - Fork 239
/
geodesic_regression.py
483 lines (412 loc) · 16.5 KB
/
geodesic_regression.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
r"""Geodesic Regression.
Lead author: Nicolas Guigui.
The generative model of the data is:
:math:`Z = Exp_{\beta_0}(\beta_1.X)` and :math:`Y = Exp_Z(\epsilon)`
where:
- :math:`Exp` denotes the Riemannian exponential,
- :math:`\beta_0` is called the intercept,
and is a point on the manifold,
- :math:`\beta_1` is called the coefficient,
and is a tangent vector to the manifold at :math:`\beta_0`,
- :math:`\epsilon \sim N(0, 1)` is a standard Gaussian noise,
- :math:`X` is the input, :math:`Y` is the target.
The geodesic regression method:
- estimates :math:`\beta_0, \beta_1`,
- predicts :math:`\hat{y}` from input :math:`X`.
"""
import logging
import math
from scipy.optimize import minimize
from sklearn.base import BaseEstimator
import geomstats.backend as gs
import geomstats.errors
from geomstats.learning.frechet_mean import FrechetMean
class GeodesicRegression(BaseEstimator):
r"""Geodesic Regression.
The generative model of the data is:
:math:`Z = Exp_{\beta_0}(\beta_1.X)` and :math:`Y = Exp_Z(\epsilon)`
where:
- :math:`Exp` denotes the Riemannian exponential,
- :math:`\beta_0` is called the intercept,
and is a point on the manifold,
- :math:`\beta_1` is called the coefficient,
and is a tangent vector to the manifold at :math:`\beta_0`,
- :math:`\epsilon \sim N(0, 1)` is a standard Gaussian noise,
- :math:`X` is the input, :math:`Y` is the target.
The geodesic regression method:
- estimates :math:`\beta_0, \beta_1`,
- predicts :math:`\hat{y}` from input :math:`X`.
Parameters
----------
space : Manifold
Manifold.
metric : RiemannianMetric
Riemannian metric.
center_X : bool
Subtract mean to X as a preprocessing.
method : str, {\'extrinsic\', \'riemannian\'}
Gradient descent method.
Optional, default: extrinsic.
max_iter : int
Maximum number of iterations for gradient descent.
Optional, default: 100.
init_step_size : float
Initial learning rate for gradient descent.
Optional, default: 0.1
tol : float
Tolerance for loss minimization.
Optional, default: 1e-5
verbose : bool
Verbose option.
Optional, default: False.
initialization : str or array-like,
{'random', 'data', 'frechet', warm_start'}
Initial values of the parameters for the optimization,
or initialization method.
Optional, default: 'random'
regularization : float
Weight on the constraint for the intercept to lie on the manifold in
the extrinsic optimization scheme. An L^2 constraint is applied.
Optional, default: 1.
"""
def __init__(
self,
space,
metric=None,
center_X=True,
method="extrinsic",
max_iter=100,
init_step_size=0.1,
tol=1e-5,
verbose=False,
initialization="random",
regularization=1.0,
):
if metric is None:
metric = space.metric
self.metric = metric
self.space = space
self.intercept_ = None
self.coef_ = None
self.center_X = center_X
self.mean_ = None
self.training_score_ = None
geomstats.errors.check_parameter_accepted_values(
method, "method", ["extrinsic", "riemannian"]
)
self.method = method
self.max_iter = max_iter
self.verbose = verbose
self.init_step_size = init_step_size
self.tol = tol
self.initialization = initialization
self.regularization = regularization
def _model(self, X, coef, intercept):
"""Compute the generative model of the geodesic regression.
Parameters
----------
X : {array-like, sparse matrix}, shape=[...,}]
Training input samples.
coef : array-like, shape=[..., {dim, [n,n]}]
Coefficient of the geodesic regression.
intercept : array-like, shape=[..., {dim, [n,n]}]
Intercept of the geodesic regression.
Returns
-------
_ : array-like, shape=[..., {dim, [n,n]}]
Value on the manifold output by the generative model.
"""
X_copy = (
X[:, None]
if self.metric.default_point_type == "vector"
else X[:, None, None]
)
return self.metric.exp(X_copy * coef[None], intercept)
def _loss(self, X, y, param, shape, weights=None):
"""Compute the loss associated to the geodesic regression.
Parameters
----------
X : {array-like, sparse matrix}, shape=[...,}]
Training input samples.
y : array-like, shape=[..., {dim, [n,n]}]
Training target values.
param : array-like, shape=[2, {dim, [n,n]}]
Parameters intercept and coef of the geodesic regression,
vertically stacked.
weights : array-like, shape=[...,]
Weights associated to the points.
Optional, default: None.
Returns
-------
_ : float
Loss.
"""
intercept, coef = gs.split(param, 2)
intercept = gs.reshape(intercept, shape)
coef = gs.reshape(coef, shape)
intercept = gs.cast(intercept, dtype=y.dtype)
coef = gs.cast(coef, dtype=y.dtype)
if self.method == "extrinsic":
base_point = self.space.projection(intercept)
penalty = self.regularization * gs.sum((base_point - intercept) ** 2)
else:
base_point = intercept
penalty = 0
tangent_vec = self.space.to_tangent(coef, base_point)
distances = self.metric.squared_dist(self._model(X, tangent_vec, base_point), y)
if weights is None:
weights = 1.0
return 1.0 / 2.0 * gs.sum(weights * distances) + penalty
def fit(self, X, y, weights=None, compute_training_score=False):
"""Estimate the parameters of the geodesic regression.
Estimate the intercept and the coefficient defining the
geodesic regression model.
Parameters
----------
X : {array-like, sparse matrix}, shape=[...,}]
Training input samples.
y : array-like, shape=[..., {dim, [n,n]}]
Training target values.
weights : array-like, shape=[...,]
Weights associated to the points.
Optional, default: None.
compute_training_score : bool
Whether to compute R^2.
Optional, default: False.
Returns
-------
self : object
Returns self.
"""
times = gs.copy(X)
if self.center_X:
self.mean_ = gs.mean(X)
times -= self.mean_
if self.method == "extrinsic":
return self._fit_extrinsic(times, y, weights, compute_training_score)
if self.method == "riemannian":
return self._fit_riemannian(times, y, weights, compute_training_score)
def _fit_extrinsic(self, X, y, weights=None, compute_training_score=False):
"""Estimate the parameters using the extrinsic gradient descent.
Estimate the intercept and the coefficient defining the
geodesic regression model, using the extrinsic gradient.
Parameters
----------
X : {array-like, sparse matrix}, shape=[...,}]
Training input samples.
y : array-like, shape=[..., {dim, [n,n]}]
Training target values.
weights : array-like, shape=[...,]
Weights associated to the points.
Optional, default: None.
compute_training_score : bool
Whether to compute R^2.
Optional, default: False.
Returns
-------
self : object
Returns self.
"""
shape = (
y.shape[-1:] if self.space.default_point_type == "vector" else y.shape[-2:]
)
intercept_init, coef_init = self.initialize_parameters(y)
intercept_hat = self.space.projection(intercept_init)
coef_hat = self.space.to_tangent(coef_init, intercept_hat)
initial_guess = gs.vstack([gs.flatten(intercept_hat), gs.flatten(coef_hat)])
objective_with_grad = gs.autodiff.value_and_grad(
lambda param: self._loss(X, y, param, shape, weights), to_numpy=True
)
res = minimize(
objective_with_grad,
initial_guess,
method="CG",
jac=True,
options={"disp": self.verbose, "maxiter": self.max_iter},
tol=self.tol,
)
intercept_hat, coef_hat = gs.split(gs.array(res.x), 2)
intercept_hat = gs.reshape(intercept_hat, shape)
intercept_hat = gs.cast(intercept_hat, dtype=y.dtype)
coef_hat = gs.reshape(coef_hat, shape)
coef_hat = gs.cast(coef_hat, dtype=y.dtype)
self.intercept_ = self.space.projection(intercept_hat)
self.coef_ = self.space.to_tangent(coef_hat, self.intercept_)
if compute_training_score:
variance = gs.sum(self.metric.squared_dist(y, self.intercept_))
self.training_score_ = 1 - 2 * res.fun / variance
return self
def initialize_parameters(self, y):
"""Set initial values for the parameters of the model.
Set initial parameters for the optimization, depending on the value
of the attribute `initialization`. The options are:
- `random` : pick random numbers from a normal distribution,
then project them to the manifold and the tangent space.
- `frechet` : compute the Frechet mean of the target points
- `data` : pick a random sample from the target points and a
tangent vector with random coefficients.
- `warm_start`: pick previous values of the parameters if the
model was fitted before, otherwise behaves as `random`.
Parameters
----------
y: array-like, shape=[n_samples, {dim, [n,n]}]
The target data, used for the option `data` and 'frechet'.
Returns
-------
intercept : array-like, shape=[{dim, [n,n]}]
Initial value for the intercept.
coef : array-like, shape=[{dim, [n,n]}]
Initial value for the coefficient.
"""
init = self.initialization
shape = (
y.shape[-1:] if self.space.default_point_type == "vector" else y.shape[-2:]
)
if isinstance(init, str):
if init == "random":
return gs.random.normal(size=(2,) + shape)
if init == "frechet":
mean = FrechetMean(self.metric, verbose=self.verbose).fit(y).estimate_
return mean, gs.zeros(shape)
if init == "data":
return gs.random.choice(y, 1)[0], gs.random.normal(size=shape)
if init == "warm_start":
if self.intercept_ is not None:
return self.intercept_, self.coef_
return gs.random.normal(size=(2,) + shape)
raise ValueError(
"The initialization string must be one of "
"random, frechet, data or warm_start"
)
return init
def _fit_riemannian(self, X, y, weights=None, compute_training_score=False):
"""Estimate the parameters using a Riemannian gradient descent.
Estimate the intercept and the coefficient defining the
geodesic regression model, using the Riemannian gradient.
Parameters
----------
X : {array-like, sparse matrix}, shape=[...,}]
Training input samples.
y : array-like, shape=[..., {dim, [n,n]}]
Training target values.
weights : array-like, shape=[...,]
Weights associated to the points.
Optional, default: None.
compute_training_score : bool
Whether to compute R^2.
Optional, default: False.
Returns
-------
self : object
Returns self.
"""
shape = (
y.shape[-1:] if self.space.default_point_type == "vector" else y.shape[-2:]
)
if hasattr(self.metric, "parallel_transport"):
def vector_transport(tan_a, tan_b, base_point, _):
return self.metric.parallel_transport(tan_a, base_point, tan_b)
else:
def vector_transport(tan_a, _, __, point):
return self.space.to_tangent(tan_a, point)
objective_with_grad = gs.autodiff.value_and_grad(
lambda params: self._loss(X, y, params, shape, weights)
)
lr = self.init_step_size
intercept_init, coef_init = self.initialize_parameters(y)
intercept_hat = intercept_hat_new = self.space.projection(intercept_init)
coef_hat = coef_hat_new = self.space.to_tangent(coef_init, intercept_hat)
param = gs.vstack([gs.flatten(intercept_hat), gs.flatten(coef_hat)])
current_loss = [math.inf]
current_grad = gs.zeros_like(param)
current_iter = i = 0
for i in range(self.max_iter):
loss, grad = objective_with_grad(param)
if gs.any(gs.isnan(grad)):
logging.warning(f"NaN encountered in gradient at iter {current_iter}")
lr /= 2
grad = current_grad
elif loss >= current_loss[-1] and i > 0:
lr /= 2
else:
if not current_iter % 5:
lr *= 2
coef_hat = coef_hat_new
intercept_hat = intercept_hat_new
current_iter += 1
if abs(loss - current_loss[-1]) < self.tol:
if self.verbose:
logging.info(f"Tolerance threshold reached at iter {current_iter}")
break
grad_intercept, grad_coef = gs.split(grad, 2)
riem_grad_intercept = self.space.to_tangent(
gs.reshape(grad_intercept, shape), intercept_hat
)
riem_grad_coef = self.space.to_tangent(
gs.reshape(grad_coef, shape), intercept_hat
)
intercept_hat_new = self.metric.exp(
-lr * riem_grad_intercept, intercept_hat
)
coef_hat_new = vector_transport(
coef_hat - lr * riem_grad_coef,
-lr * riem_grad_intercept,
intercept_hat,
intercept_hat_new,
)
param = gs.vstack([gs.flatten(intercept_hat_new), gs.flatten(coef_hat_new)])
current_loss.append(loss)
current_grad = grad
self.intercept_ = self.space.projection(intercept_hat)
self.coef_ = self.space.to_tangent(coef_hat, self.intercept_)
if self.verbose:
logging.info(
f"Number of gradient evaluations: {i}, "
f"Number of gradient iterations: {current_iter}"
f" loss at termination: {current_loss[-1]}"
)
if compute_training_score:
variance = gs.sum(self.metric.squared_dist(y, self.intercept_))
self.training_score_ = 1 - 2 * current_loss[-1] / variance
return self
def predict(self, X, y=None):
"""Predict the manifold value for each input.
Parameters
----------
X : array-like, shape=[...,
Input data.
Returns
-------
self : array-like, shape=[...,]
Array of predicted cluster indices for each sample.
"""
times = gs.copy(X)
if self.center_X:
times = times - self.mean_
if self.coef_ is None:
raise RuntimeError("Fit method must be called before predict.")
return self._model(times, self.coef_, self.intercept_)
def score(self, X, y, weights=None):
"""Compute training score.
Compute the training score defined as R^2.
Parameters
----------
X : {array-like, sparse matrix}, shape=[...,}]
Training input samples.
y : array-like, shape=[..., {dim, [n,n]}]
Training target values.
weights : array-like, shape=[...,]
Weights associated to the points.
Optional, default: None.
Returns
-------
_ : float
Training score.
"""
y_pred = self.predict(X)
if weights is None:
weights = 1.0
mean = FrechetMean(self.metric, verbose=self.verbose).fit(y).estimate_
numerator = gs.sum(weights * self.metric.squared_dist(y, y_pred))
denominator = gs.sum(weights * self.metric.squared_dist(y, mean))
return 1 - numerator / denominator if denominator != 0 else 0.0