-
-
Notifications
You must be signed in to change notification settings - Fork 4
/
DotProduct.ts
271 lines (213 loc) · 8.23 KB
/
DotProduct.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
/* eslint-disable */
/* NOTE: This file is auto-generated. Do not edit it directly. */
import crypto from 'node:crypto'
import { PythonBridge, NDArray, ArrayLike, SparseMatrix } from '@/sklearn/types'
/**
Dot-Product kernel.
The DotProduct kernel is non-stationary and can be obtained from linear regression by putting \\(N(0, 1)\\) priors on the coefficients of \\(x\_d (d = 1, . . . , D)\\) and a prior of \\(N(0, \\sigma\_0^2)\\) on the bias. The DotProduct kernel is invariant to a rotation of the coordinates about the origin, but not translations. It is parameterized by a parameter sigma\_0 \\(\\sigma\\) which controls the inhomogenity of the kernel. For \\(\\sigma\_0^2 =0\\), the kernel is called the homogeneous linear kernel, otherwise it is inhomogeneous. The kernel is given by
[Python Reference](https://scikit-learn.org/stable/modules/generated/sklearn.gaussian_process.kernels.DotProduct.html)
*/
export class DotProduct {
id: string
opts: any
_py: PythonBridge
_isInitialized: boolean = false
_isDisposed: boolean = false
constructor(opts?: {
/**
Parameter controlling the inhomogenity of the kernel. If sigma\_0=0, the kernel is homogeneous.
@defaultValue `1`
*/
sigma_0?: any
/**
The lower and upper bound on ‘sigma\_0’. If set to “fixed”, ‘sigma\_0’ cannot be changed during hyperparameter tuning.
*/
sigma_0_bounds?: 'fixed'
}) {
this.id = `DotProduct${crypto.randomUUID().split('-')[0]}`
this.opts = opts || {}
}
get py(): PythonBridge {
return this._py
}
set py(pythonBridge: PythonBridge) {
this._py = pythonBridge
}
/**
Initializes the underlying Python resources.
This instance is not usable until the `Promise` returned by `init()` resolves.
*/
async init(py: PythonBridge): Promise<void> {
if (this._isDisposed) {
throw new Error('This DotProduct instance has already been disposed')
}
if (this._isInitialized) {
return
}
if (!py) {
throw new Error('DotProduct.init requires a PythonBridge instance')
}
this._py = py
await this._py.ex`
import numpy as np
from sklearn.gaussian_process.kernels import DotProduct
try: bridgeDotProduct
except NameError: bridgeDotProduct = {}
`
// set up constructor params
await this._py.ex`ctor_DotProduct = {'sigma_0': ${
this.opts['sigma_0'] ?? undefined
}, 'sigma_0_bounds': ${this.opts['sigma_0_bounds'] ?? undefined}}
ctor_DotProduct = {k: v for k, v in ctor_DotProduct.items() if v is not None}`
await this._py
.ex`bridgeDotProduct[${this.id}] = DotProduct(**ctor_DotProduct)`
this._isInitialized = true
}
/**
Disposes of the underlying Python resources.
Once `dispose()` is called, the instance is no longer usable.
*/
async dispose() {
if (this._isDisposed) {
return
}
if (!this._isInitialized) {
return
}
await this._py.ex`del bridgeDotProduct[${this.id}]`
this._isDisposed = true
}
/**
Return the kernel k(X, Y) and optionally its gradient.
*/
async __call__(opts: {
/**
Left argument of the returned kernel k(X, Y)
*/
X?: NDArray[]
/**
Right argument of the returned kernel k(X, Y). If `undefined`, k(X, X) if evaluated instead.
*/
Y?: NDArray[]
/**
Determines whether the gradient with respect to the log of the kernel hyperparameter is computed. Only supported when Y is `undefined`.
@defaultValue `false`
*/
eval_gradient?: boolean
}): Promise<NDArray[]> {
if (this._isDisposed) {
throw new Error('This DotProduct instance has already been disposed')
}
if (!this._isInitialized) {
throw new Error('DotProduct must call init() before __call__()')
}
// set up method params
await this._py.ex`pms_DotProduct___call__ = {'X': np.array(${
opts['X'] ?? undefined
}) if ${opts['X'] !== undefined} else None, 'Y': np.array(${
opts['Y'] ?? undefined
}) if ${opts['Y'] !== undefined} else None, 'eval_gradient': ${
opts['eval_gradient'] ?? undefined
}}
pms_DotProduct___call__ = {k: v for k, v in pms_DotProduct___call__.items() if v is not None}`
// invoke method
await this._py
.ex`res_DotProduct___call__ = bridgeDotProduct[${this.id}].__call__(**pms_DotProduct___call__)`
// convert the result from python to node.js
return this
._py`res_DotProduct___call__.tolist() if hasattr(res_DotProduct___call__, 'tolist') else res_DotProduct___call__`
}
/**
Returns a clone of self with given hyperparameters theta.
*/
async clone_with_theta(opts: {
/**
The hyperparameters
*/
theta?: NDArray
}): Promise<any> {
if (this._isDisposed) {
throw new Error('This DotProduct instance has already been disposed')
}
if (!this._isInitialized) {
throw new Error('DotProduct must call init() before clone_with_theta()')
}
// set up method params
await this._py.ex`pms_DotProduct_clone_with_theta = {'theta': np.array(${
opts['theta'] ?? undefined
}) if ${opts['theta'] !== undefined} else None}
pms_DotProduct_clone_with_theta = {k: v for k, v in pms_DotProduct_clone_with_theta.items() if v is not None}`
// invoke method
await this._py
.ex`res_DotProduct_clone_with_theta = bridgeDotProduct[${this.id}].clone_with_theta(**pms_DotProduct_clone_with_theta)`
// convert the result from python to node.js
return this
._py`res_DotProduct_clone_with_theta.tolist() if hasattr(res_DotProduct_clone_with_theta, 'tolist') else res_DotProduct_clone_with_theta`
}
/**
Returns the diagonal of the kernel k(X, X).
The result of this method is identical to np.diag(self(X)); however, it can be evaluated more efficiently since only the diagonal is evaluated.
*/
async diag(opts: {
/**
Left argument of the returned kernel k(X, Y).
*/
X?: NDArray[]
}): Promise<NDArray> {
if (this._isDisposed) {
throw new Error('This DotProduct instance has already been disposed')
}
if (!this._isInitialized) {
throw new Error('DotProduct must call init() before diag()')
}
// set up method params
await this._py.ex`pms_DotProduct_diag = {'X': np.array(${
opts['X'] ?? undefined
}) if ${opts['X'] !== undefined} else None}
pms_DotProduct_diag = {k: v for k, v in pms_DotProduct_diag.items() if v is not None}`
// invoke method
await this._py
.ex`res_DotProduct_diag = bridgeDotProduct[${this.id}].diag(**pms_DotProduct_diag)`
// convert the result from python to node.js
return this
._py`res_DotProduct_diag.tolist() if hasattr(res_DotProduct_diag, 'tolist') else res_DotProduct_diag`
}
/**
Returns whether the kernel is stationary.
*/
async is_stationary(opts: {}): Promise<any> {
if (this._isDisposed) {
throw new Error('This DotProduct instance has already been disposed')
}
if (!this._isInitialized) {
throw new Error('DotProduct must call init() before is_stationary()')
}
// set up method params
await this._py.ex`pms_DotProduct_is_stationary = {}
pms_DotProduct_is_stationary = {k: v for k, v in pms_DotProduct_is_stationary.items() if v is not None}`
// invoke method
await this._py
.ex`res_DotProduct_is_stationary = bridgeDotProduct[${this.id}].is_stationary(**pms_DotProduct_is_stationary)`
// convert the result from python to node.js
return this
._py`res_DotProduct_is_stationary.tolist() if hasattr(res_DotProduct_is_stationary, 'tolist') else res_DotProduct_is_stationary`
}
get hyperparameter_sigma_0(): Promise<any> {
if (this._isDisposed) {
throw new Error('This DotProduct instance has already been disposed')
}
if (!this._isInitialized) {
throw new Error(
'DotProduct must call init() before accessing hyperparameter_sigma_0'
)
}
return (async () => {
// invoke accessor
await this._py
.ex`attr_DotProduct_hyperparameter_sigma_0 = bridgeDotProduct[${this.id}].hyperparameter_sigma_0`
// convert the result from python to node.js
return this
._py`attr_DotProduct_hyperparameter_sigma_0.tolist() if hasattr(attr_DotProduct_hyperparameter_sigma_0, 'tolist') else attr_DotProduct_hyperparameter_sigma_0`
})()
}
}