8 #ifndef MACHINE_LEARNING_PCA_HPP 9 #define MACHINE_LEARNING_PCA_HPP 11 #include "../include/matrix/Matrix.hpp" 21 MatrixD
X, eigenvalues, eigenvectors, percentages, cumPercentages;
28 explicit PCA(MatrixD data) {
36 MatrixD XMinusMean = X.minusMean();
37 MatrixD covariances = XMinusMean.cov();
41 for (
size_t i = 0; i < covariances.nRows(); i++) {
42 sumVar += covariances(i, i);
45 pair<MatrixD, MatrixD> eig = covariances.eigen();
46 eigenvalues = eig.first;
47 eigenvectors = eig.second;
50 percentages = MatrixD(eigenvalues.nRows(), eigenvalues.nCols());
51 cumPercentages = MatrixD(eigenvalues.nRows(), eigenvalues.nCols());
52 for (
int i = 0; i < eigenvalues.nRows(); i++) {
53 percentages(i, 0) = eigenvalues(i, 0) / sumVar;
54 cumPercentages(i, 0) = i == 0 ? percentages(i, 0) : percentages(i, 0) + cumPercentages(i - 1, 0);
61 MatrixD finalData = eigenvectors.transpose() * X.minusMean().transpose();
62 return finalData.transpose();
69 MatrixI filter = MatrixI::zeros(eigenvalues.nRows(), 1);
71 for (
int i = 0; i < numComponents; i++) {
75 MatrixD finalData = eigenvectors.getColumns(filter).transpose() * X.minusMean().transpose();
76 return finalData.transpose();
92 return cumPercentages;
96 #endif //MACHINE_LEARNING_PCA_HPP k-nearest neighbors algorithm, able to do regression and classification
Principal component analysis.
PCA(MatrixD data)
Principal component analysis algorithm.
const MatrixD & getCumPercentages() const
const MatrixD & getEigenvectors() const
MatrixD transform(int numComponents)
Rotates the data set, using the eigenvectors of the covariance matrix with the largest eigenvalues as...
const MatrixD & getPercentages() const
MatrixD transform()
Rotates the data set, using the eigenvectors of the covariance matrix as the new base.
const MatrixD & getEigenvalues() const
void fit()
Finds the principal components of a Matrix.