Skip to content

Commit dfb091b

Browse files
committed
Add Probabilistic PCA
1 parent f103287 commit dfb091b

File tree

4 files changed

+161
-1
lines changed

4 files changed

+161
-1
lines changed

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ Javascript AI package and online demo.
3131
| regression | Least squares, Ridge, Lasso, Elastic net, RLS, Bayesian linear, Poisson, Least absolute deviations, Least trimmed squares, Least median squares, Lp norm linear, Segmented, LOWESS, spline, Gaussian process, Principal components, Partial least squares, Projection pursuit, Quantile regression, k nearest neighbor, IDW, Nadaraya Watson, Priestley Chao, Gasser Muller, RBF Network, RVM, Decision tree, Random forest, GBDT, XGBoost, SVR, MLP, GMR, Isotonic, Ramer Douglas Peucker |
3232
| interpolation | Nearest neighbor, IDW, Linear, Brahmagupta, Logarithmic, Cosine, (Inverse) Smoothstep, Cubic, (Centripetal) Catmull-Rom, Hermit, Polynomial, Lagrange, Trigonometric, Spline, RBF Network, Akima |
3333
| anomaly detection | Percentile, MAD, Tukey's fences, Grubbs's test, Thompson test, Tietjen Moore test, Generalized ESD, Hotelling, MT, MCD, k nearest neighbor, LOF, PCA, OCSVM, KDE, GMM, Isolation forest, Autoencoder, GAN |
34-
| dimensionality reduction | Random projection, (Dual/Kernel) PCA, Incremental PCA, LSA, MDS, Linear discriminant analysis, NCA, ICA, Principal curve, Sammon, FastMap, Sliced inverse regression, LLE, Laplacian eigenmaps, Isomap, SNE, t-SNE, SOM, GTM, NMF, Autoencoder, VAE |
34+
| dimensionality reduction | Random projection, (Dual/Kernel) PCA, Incremental PCA, Probabilistic PCA, LSA, MDS, Linear discriminant analysis, NCA, ICA, Principal curve, Sammon, FastMap, Sliced inverse regression, LLE, Laplacian eigenmaps, Isomap, SNE, t-SNE, SOM, GTM, NMF, Autoencoder, VAE |
3535
| feature selection | Mutual information, Ridge, Lasso, Elastic net, Decision tree, NCA |
3636
| transformation | Box-Cox, Yeo-Johnson |
3737
| density estimation | Histogram, Average shifted histogram, Polynomial histogram, Maximum likelihood, Kernel density estimation, k nearest neighbor, GMM, HMM |

js/model_selector.js

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -248,6 +248,7 @@ const AIMethods = [
248248
{ value: 'random_projection', title: 'Random projection' },
249249
{ value: 'pca', title: 'PCA' },
250250
{ value: 'incremental_pca', title: 'Incremental PCA' },
251+
{ value: 'probabilistic_pca', title: 'Probabilistic PCA' },
251252
{ value: 'lsa', title: 'LSA' },
252253
{ value: 'mds', title: 'MDS' },
253254
{ value: 'lda', title: 'Linear Discriminant Analysis' },

js/view/probabilistic_pca.js

Lines changed: 39 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,39 @@
1+
import { ProbabilisticPCA } from '../../lib/model/probabilistic_pca.js'
2+
3+
var dispPPCA = function (elm, platform) {
4+
let model = null
5+
const fitModel = () => {
6+
platform.fit((tx, ty, pred_cb) => {
7+
if (!model) {
8+
const dim = platform.dimension
9+
const method = elm.select('[name=method]').property('value')
10+
model = new ProbabilisticPCA(method, dim)
11+
}
12+
model.fit(tx)
13+
const y = model.predict(tx)
14+
pred_cb(y)
15+
})
16+
}
17+
18+
elm.append('select')
19+
.attr('name', 'method')
20+
.selectAll('option')
21+
.data(['analysis', 'em', 'bayes'])
22+
.enter()
23+
.append('option')
24+
.attr('value', d => d)
25+
.text(d => d)
26+
platform.setting.ml.controller
27+
.stepLoopButtons()
28+
.init(() => {
29+
model = null
30+
platform.init()
31+
})
32+
.step(fitModel)
33+
.epoch()
34+
}
35+
36+
export default function (platform) {
37+
platform.setting.ml.usage = 'Click and add data point. Next, click "Fit" button.'
38+
dispPPCA(platform.setting.ml.configElement, platform)
39+
}

lib/model/probabilistic_pca.js

Lines changed: 120 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,120 @@
1+
import { Matrix } from '../util/math.js'
2+
3+
/**
4+
* Probabilistic Principal component analysis
5+
*/
6+
export class ProbabilisticPCA {
7+
// https://qiita.com/amber_kshz/items/e47fa606863aa97c7bd7
8+
// https://qiita.com/ctgk/items/89c11192affe7f236852
9+
// http://www.cs.columbia.edu/~blei/seminar/2020-representation/readings/TippingBishop1999.pdf
10+
/**
11+
* @param {'analysis' | 'em' | 'bayes'} method
12+
* @param {number} rd
13+
*/
14+
constructor(method = 'analysis', rd) {
15+
this._method = method
16+
this._rd = rd
17+
}
18+
19+
/**
20+
* Fit model.
21+
* @param {Array<Array<number>>} x
22+
*/
23+
fit(x) {
24+
x = Matrix.fromArray(x)
25+
if (this._method === 'analysis') {
26+
this._analysis(x)
27+
} else if (this._method === 'em') {
28+
this._em(x)
29+
} else if (this._method === 'bayes') {
30+
this._bayes(x)
31+
}
32+
}
33+
34+
_analysis(x) {
35+
this._m = x.mean(0)
36+
37+
const s = x.cov()
38+
const [eigvalues, eigvectors] = s.eigen()
39+
40+
this._sigma = 0
41+
for (let i = this._rd; i < eigvalues.length; i++) {
42+
this._sigma += eigvalues[i] / (eigvalues.length - this._rd)
43+
}
44+
45+
const l = eigvalues.slice(0, this._rd).map(v => Math.sqrt(v - this._sigma))
46+
47+
this._w = eigvectors.slice(0, this._rd, 1).dot(Matrix.diag(l))
48+
}
49+
50+
_em(x) {
51+
if (!this._w) {
52+
this._w = Matrix.eye(x.cols, this._rd)
53+
this._sigma = 0
54+
this._m = x.mean(0)
55+
}
56+
x = x.copySub(this._m)
57+
58+
const m = this._w.tDot(this._w)
59+
m.add(Matrix.eye(this._rd, this._rd, this._sigma))
60+
const minv = m.inv()
61+
62+
const ez = x.dot(this._w).dot(minv.t)
63+
const ezz = minv.copyMult(this._sigma * x.rows)
64+
ezz.add(ez.tDot(ez))
65+
66+
this._w = x.tDot(ez).dot(ezz.inv())
67+
this._sigma =
68+
(x.copyMult(x).sum() -
69+
2 * ez.copyMult(x.dot(this._w)).sum() +
70+
ezz.copyMult(this._w.tDot(this._w).t).sum()) /
71+
(x.rows * x.cols)
72+
}
73+
74+
_bayes(x) {
75+
if (!this._w) {
76+
this._w = Matrix.eye(x.cols, this._rd)
77+
this._sigma = 0
78+
this._m = x.mean(0)
79+
this._alpha = Matrix.ones(1, this._rd).value
80+
}
81+
x = x.copySub(this._m)
82+
83+
const m = this._w.tDot(this._w)
84+
m.add(Matrix.eye(this._rd, this._rd, this._sigma))
85+
const minv = m.inv()
86+
87+
const ez = x.dot(this._w).dot(minv.t)
88+
const ezz = minv.copyMult(this._sigma * x.rows)
89+
ezz.add(ez.tDot(ez))
90+
91+
const a = Matrix.diag(this._alpha)
92+
a.mult(this._sigma)
93+
94+
this._w = x.tDot(ez).dot(ezz.copyAdd(a).inv())
95+
this._sigma =
96+
(x.copyMult(x).sum() -
97+
2 * ez.copyMult(x.dot(this._w)).sum() +
98+
ezz.copyMult(this._w.tDot(this._w).t).sum()) /
99+
(x.rows * x.cols)
100+
this._alpha = this._w
101+
.copyMult(this._w)
102+
.sum(0)
103+
.value.map(v => x.cols / v)
104+
}
105+
106+
/**
107+
* Returns reduced datas.
108+
* @param {Array<Array<number>>} x
109+
* @returns {Array<Array<number>>}
110+
*/
111+
predict(x) {
112+
x = Matrix.fromArray(x)
113+
x.sub(this._m)
114+
115+
const m = this._w.tDot(this._w)
116+
const d = this._w.cols
117+
m.add(Matrix.eye(d, d, this._sigma))
118+
return x.dot(this._w).dot(m.inv()).toArray()
119+
}
120+
}

0 commit comments

Comments
 (0)