Skip to content

Commit 0ab35f7

Browse files
committed
Add GPLVM
1 parent dfb091b commit 0ab35f7

File tree

4 files changed

+258
-1
lines changed

4 files changed

+258
-1
lines changed

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ Javascript AI package and online demo.
3131
| regression | Least squares, Ridge, Lasso, Elastic net, RLS, Bayesian linear, Poisson, Least absolute deviations, Least trimmed squares, Least median squares, Lp norm linear, Segmented, LOWESS, spline, Gaussian process, Principal components, Partial least squares, Projection pursuit, Quantile regression, k nearest neighbor, IDW, Nadaraya Watson, Priestley Chao, Gasser Muller, RBF Network, RVM, Decision tree, Random forest, GBDT, XGBoost, SVR, MLP, GMR, Isotonic, Ramer Douglas Peucker |
3232
| interpolation | Nearest neighbor, IDW, Linear, Brahmagupta, Logarithmic, Cosine, (Inverse) Smoothstep, Cubic, (Centripetal) Catmull-Rom, Hermit, Polynomial, Lagrange, Trigonometric, Spline, RBF Network, Akima |
3333
| anomaly detection | Percentile, MAD, Tukey's fences, Grubbs's test, Thompson test, Tietjen Moore test, Generalized ESD, Hotelling, MT, MCD, k nearest neighbor, LOF, PCA, OCSVM, KDE, GMM, Isolation forest, Autoencoder, GAN |
34-
| dimensionality reduction | Random projection, (Dual/Kernel) PCA, Incremental PCA, Probabilistic PCA, LSA, MDS, Linear discriminant analysis, NCA, ICA, Principal curve, Sammon, FastMap, Sliced inverse regression, LLE, Laplacian eigenmaps, Isomap, SNE, t-SNE, SOM, GTM, NMF, Autoencoder, VAE |
34+
| dimensionality reduction | Random projection, (Dual/Kernel) PCA, Incremental PCA, Probabilistic PCA, GPLVM, LSA, MDS, Linear discriminant analysis, NCA, ICA, Principal curve, Sammon, FastMap, Sliced inverse regression, LLE, Laplacian eigenmaps, Isomap, SNE, t-SNE, SOM, GTM, NMF, Autoencoder, VAE |
3535
| feature selection | Mutual information, Ridge, Lasso, Elastic net, Decision tree, NCA |
3636
| transformation | Box-Cox, Yeo-Johnson |
3737
| density estimation | Histogram, Average shifted histogram, Polynomial histogram, Maximum likelihood, Kernel density estimation, k nearest neighbor, GMM, HMM |

js/model_selector.js

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -249,6 +249,7 @@ const AIMethods = [
249249
{ value: 'pca', title: 'PCA' },
250250
{ value: 'incremental_pca', title: 'Incremental PCA' },
251251
{ value: 'probabilistic_pca', title: 'Probabilistic PCA' },
252+
{ value: 'gplvm', title: 'GPLVM' },
252253
{ value: 'lsa', title: 'LSA' },
253254
{ value: 'mds', title: 'MDS' },
254255
{ value: 'lda', title: 'Linear Discriminant Analysis' },

js/view/gplvm.js

Lines changed: 94 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,94 @@
1+
import GPLVM from '../../lib/model/gplvm.js'
2+
3+
var dispGPLVM = function (elm, platform) {
4+
let model = null
5+
const fitModel = () => {
6+
platform.fit((tx, ty, pred_cb) => {
7+
if (!model) {
8+
const dim = platform.dimension
9+
const alpha = +elm.select('[name=alpha]').property('value')
10+
const sigma = +elm.select('[name=sigma]').property('value')
11+
const ez = +elm.select('[name=ez]').property('value')
12+
const ea = +elm.select('[name=ea]').property('value')
13+
const ep = +elm.select('[name=ep]').property('value')
14+
model = new GPLVM(dim, alpha, ez, ea, ep, 'gaussian', [1.0, sigma])
15+
model.init(tx)
16+
}
17+
model.fit()
18+
console.log(model._kernel._b)
19+
const y = model.predict(tx)
20+
pred_cb(y)
21+
})
22+
}
23+
24+
const kernelElm = elm.append('span')
25+
kernelElm
26+
.append('select')
27+
.attr('name', 'kernel')
28+
.selectAll('option')
29+
.data(['gaussian'])
30+
.enter()
31+
.append('option')
32+
.attr('value', d => d)
33+
.text(d => d)
34+
const gauss_sigma = kernelElm.append('span')
35+
gauss_sigma
36+
.append('span')
37+
.text(' sigma = ')
38+
.append('input')
39+
.attr('type', 'number')
40+
.attr('name', 'sigma')
41+
.attr('value', 1)
42+
.attr('min', 0)
43+
.attr('max', 10)
44+
.attr('step', 0.1)
45+
elm.append('span')
46+
.text(' alpha = ')
47+
.append('input')
48+
.attr('type', 'number')
49+
.attr('name', 'alpha')
50+
.attr('value', 0.05)
51+
.attr('min', 0)
52+
.attr('max', 10)
53+
.attr('step', 0.01)
54+
elm.append('span')
55+
.text(' ez = ')
56+
.append('input')
57+
.attr('type', 'number')
58+
.attr('name', 'ez')
59+
.attr('value', 0.01)
60+
.attr('min', 0)
61+
.attr('max', 10)
62+
.attr('step', 0.001)
63+
elm.append('span')
64+
.text(' ea = ')
65+
.append('input')
66+
.attr('type', 'number')
67+
.attr('name', 'ea')
68+
.attr('value', 0.00005)
69+
.attr('min', 0)
70+
.attr('max', 10)
71+
.attr('step', 0.001)
72+
elm.append('span')
73+
.text(' ep = ')
74+
.append('input')
75+
.attr('type', 'number')
76+
.attr('name', 'ep')
77+
.attr('value', 0.0002)
78+
.attr('min', 0)
79+
.attr('max', 10)
80+
.attr('step', 0.001)
81+
platform.setting.ml.controller
82+
.stepLoopButtons()
83+
.init(() => {
84+
model = null
85+
platform.init()
86+
})
87+
.step(fitModel)
88+
.epoch()
89+
}
90+
91+
export default function (platform) {
92+
platform.setting.ml.usage = 'Click and add data point. Next, click "Fit" button.'
93+
dispGPLVM(platform.setting.ml.configElement, platform)
94+
}

lib/model/gplvm.js

Lines changed: 162 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,162 @@
1+
import { Matrix } from '../util/math.js'
2+
3+
class GaussianKernel {
4+
constructor(a = 1.0, b = 1.0, e = 0.1) {
5+
this._a = a
6+
this._b = b
7+
this._e = e
8+
}
9+
10+
_calc(x0, x1) {
11+
const s = x0.copySub(x1).reduce((acc, v) => acc + v * v, 0)
12+
return this._a * Math.exp(-s / (2 * this._b))
13+
}
14+
15+
_grad(x0, x1, k) {
16+
const g = x0.copySub(x1)
17+
g.mult((-this._a * k) / this._b)
18+
return g
19+
}
20+
21+
calc(x, y) {
22+
if (!y) {
23+
y = x
24+
}
25+
const n = x.rows
26+
const m = y.rows
27+
const K = new Matrix(n, m)
28+
for (let i = 0; i < n; i++) {
29+
const xi = x.row(i)
30+
for (let j = 0; j < m; j++) {
31+
const v = this._calc(xi, y.row(j))
32+
K.set(i, j, v)
33+
}
34+
}
35+
return K
36+
}
37+
38+
grad(x, k) {
39+
const n = x.rows
40+
const d = new Matrix(n, n)
41+
for (let i = 0; i < n; i++) {
42+
const xi = x.row(i)
43+
for (let j = 0; j < n; j++) {
44+
const v = this._grad(xi, x.row(j), k.at(i, j))
45+
d.set(i, j, v.value)
46+
}
47+
}
48+
return d
49+
}
50+
51+
update(x, k, G) {
52+
if (this._e === 0) {
53+
return
54+
}
55+
const n = x.rows
56+
const d = new Matrix(n, n)
57+
for (let i = 0; i < n; i++) {
58+
const xi = x.row(i)
59+
for (let j = 0; j < n; j++) {
60+
const di = xi.copySub(x.row(j))
61+
di.map(v => v ** 2)
62+
const s = -(this._a * di.sum()) / (2 * this._b ** 2)
63+
d.set(i, j, s * k.at(i, j))
64+
}
65+
}
66+
const dsig = G.copyMult(d.t).sum()
67+
this._b = Math.exp(Math.log(this._b) + this._e / n * dsig)
68+
if (this._b === 0) {
69+
this._b = 1.0e-8
70+
}
71+
}
72+
}
73+
74+
/**
75+
* Gaussian Process Latent Variable Model
76+
*/
77+
export default class GPLVM {
78+
// https://qiita.com/student-i/items/328030426fa42b6010f9
79+
// https://cmbnur.com/?p=1621
80+
// https://ayatoashihara.github.io/my_blog/post/post9/
81+
/**
82+
* @param {number} rd
83+
* @param {number} alpha
84+
* @param {number} ez
85+
* @param {number} ea
86+
* @param {number} ep
87+
* @param {'gaussian'} [kernel='gaussian']
88+
* @param {*[]} [kernelArgs]
89+
*/
90+
constructor(rd, alpha, ez = 0.01, ea = 0.00005, ep = 0.002, kernel = 'gaussian', kernelArgs = []) {
91+
this._rd = rd
92+
this._alpha = alpha
93+
if (kernel === 'gaussian') {
94+
this._kernel = new GaussianKernel(...kernelArgs, ep)
95+
}
96+
this._ez = ez
97+
this._ea = ea
98+
}
99+
100+
/**
101+
* Initialize model.
102+
* @param {Array<Array<number>>} x
103+
*/
104+
init(x) {
105+
this._x = Matrix.fromArray(x)
106+
this._z = Matrix.randn(x.length, this._rd, 0, 0.01)
107+
108+
this._s = this._x.dot(this._x.t)
109+
}
110+
111+
/**
112+
* Fit model.
113+
*/
114+
fit() {
115+
const n = this._x.rows
116+
117+
const ker = this._kernel.calc(this._z)
118+
const K = ker.copyAdd(Matrix.eye(n, n, this._alpha))
119+
const Kinv = K.inv()
120+
const G = Kinv.dot(this._s).dot(Kinv)
121+
G.sub(Kinv.copyMult(this._x.cols))
122+
G.div(2)
123+
124+
const dK = this._kernel.grad(this._z, K)
125+
const dz = new Matrix(n, this._rd)
126+
for (let i = 0; i < n; i++) {
127+
const dzi = Matrix.zeros(1, this._rd)
128+
for (let j = 0; j < n; j++) {
129+
const v = new Matrix(1, this._rd, dK.at(i, j))
130+
v.mult(G.at(i, j))
131+
dzi.add(v)
132+
}
133+
dz.set(i, 0, dzi)
134+
}
135+
dz.mult(this._ez / n)
136+
137+
this._kernel.update(this._z, ker, G)
138+
139+
this._z.add(dz)
140+
this._alpha = Math.exp(Math.log(this._alpha) + this._ea / n * G.trace() * this._alpha)
141+
}
142+
143+
/**
144+
* Returns reduced datas.
145+
*/
146+
predict() {
147+
return this._z.toArray()
148+
}
149+
150+
/**
151+
* Returns reconstruct datas.
152+
* @param {Array<Array<number>>} z
153+
*/
154+
reconstruct(z) {
155+
z = Matrix.fromArray(z)
156+
const n = this._z.rows
157+
const K = this._kernel.calc(this._z)
158+
const Ka = K.copyAdd(Matrix.eye(n, n, this._alpha))
159+
const ks = this._kernel.calc(z, this._z)
160+
return ks.dot(Ka.solve(this._x)).toArray()
161+
}
162+
}

0 commit comments

Comments
 (0)