Commit 1699213d0bc480bdc29b32e1a751aaaf602fbf2f

Authored by Josh Klontz
1 parent 845605d8

minor API improvements to PCA/LDA

Showing 1 changed file with 35 additions and 18 deletions
openbr/plugins/eigen3.cpp
@@ -40,8 +40,12 @@ protected: @@ -40,8 +40,12 @@ protected:
40 Q_PROPERTY(int drop READ get_drop WRITE set_drop RESET reset_drop STORED false) 40 Q_PROPERTY(int drop READ get_drop WRITE set_drop RESET reset_drop STORED false)
41 Q_PROPERTY(bool whiten READ get_whiten WRITE set_whiten RESET reset_whiten STORED false) 41 Q_PROPERTY(bool whiten READ get_whiten WRITE set_whiten RESET reset_whiten STORED false)
42 42
43 - // If keep < 1 then it is assumed to be the energy to retain  
44 - // else it is the number of leading eigenvectors to keep. 43 + /*!
  44 + * keep < 0: All eigenvalues are retained.
  45 + * keep = 0: No PCA performed, eigenvectors form an identity matrix.
  46 + * 0 < keep < 1: Fraction of the variance to retain.
  47 + * keep >= 1: Number of leading eigenvectors to retain.
  48 + */
45 BR_PROPERTY(float, keep, 0.95) 49 BR_PROPERTY(float, keep, 0.95)
46 BR_PROPERTY(int, drop, 0) 50 BR_PROPERTY(int, drop, 0)
47 BR_PROPERTY(bool, whiten, false) 51 BR_PROPERTY(bool, whiten, false)
@@ -127,23 +131,33 @@ protected: @@ -127,23 +131,33 @@ protected:
127 int instances = data.cols(); 131 int instances = data.cols();
128 const bool dominantEigenEstimation = (dimsIn > instances); 132 const bool dominantEigenEstimation = (dimsIn > instances);
129 133
130 - // Compute and remove mean  
131 - mean = Eigen::VectorXf(dimsIn);  
132 - for (int i=0; i<dimsIn; i++) mean(i) = data.row(i).sum() / (float)instances;  
133 - for (int i=0; i<dimsIn; i++) data.row(i).array() -= mean(i);  
134 -  
135 - // Calculate covariance matrix  
136 - Eigen::MatrixXd cov;  
137 - if (dominantEigenEstimation) cov = data.transpose() * data / (instances-1.0);  
138 - else cov = data * data.transpose() / (instances-1.0);  
139 -  
140 - // Compute eigendecomposition. Returns eigenvectors/eigenvalues in increasing order by eigenvalue.  
141 - Eigen::SelfAdjointEigenSolver<Eigen::MatrixXd> eSolver(cov);  
142 - Eigen::MatrixXd allEVals = eSolver.eigenvalues();  
143 - Eigen::MatrixXd allEVecs = eSolver.eigenvectors();  
144 - if (dominantEigenEstimation) allEVecs = data * allEVecs; 134 + Eigen::MatrixXd allEVals, allEVecs;
  135 + if (keep != 0) {
  136 + // Compute and remove mean
  137 + mean = Eigen::VectorXf(dimsIn);
  138 + for (int i=0; i<dimsIn; i++) mean(i) = data.row(i).sum() / (float)instances;
  139 + for (int i=0; i<dimsIn; i++) data.row(i).array() -= mean(i);
  140 +
  141 + // Calculate covariance matrix
  142 + Eigen::MatrixXd cov;
  143 + if (dominantEigenEstimation) cov = data.transpose() * data / (instances-1.0);
  144 + else cov = data * data.transpose() / (instances-1.0);
  145 +
  146 + // Compute eigendecomposition. Returns eigenvectors/eigenvalues in increasing order by eigenvalue.
  147 + Eigen::SelfAdjointEigenSolver<Eigen::MatrixXd> eSolver(cov);
  148 + allEVals = eSolver.eigenvalues();
  149 + allEVecs = eSolver.eigenvectors();
  150 + if (dominantEigenEstimation) allEVecs = data * allEVecs;
  151 + } else {
  152 + // Null case
  153 + mean = Eigen::VectorXf::Zero(dimsIn);
  154 + allEVecs = Eigen::MatrixXd::Identity(dimsIn, dimsIn);
  155 + allEVals = Eigen::VectorXd::Ones(dimsIn);
  156 + }
145 157
146 - if (keep < 1) { 158 + if (keep <= 0) {
  159 + keep = dimsIn - drop;
  160 + } else if (keep < 1) {
147 // Keep eigenvectors that retain a certain energy percentage. 161 // Keep eigenvectors that retain a certain energy percentage.
148 const double totalEnergy = allEVals.sum(); 162 const double totalEnergy = allEVals.sum();
149 if (totalEnergy == 0) { 163 if (totalEnergy == 0) {
@@ -301,9 +315,11 @@ class LDATransform : public Transform @@ -301,9 +315,11 @@ class LDATransform : public Transform
301 { 315 {
302 Q_OBJECT 316 Q_OBJECT
303 Q_PROPERTY(float pcaKeep READ get_pcaKeep WRITE set_pcaKeep RESET reset_pcaKeep STORED false) 317 Q_PROPERTY(float pcaKeep READ get_pcaKeep WRITE set_pcaKeep RESET reset_pcaKeep STORED false)
  318 + Q_PROPERTY(bool pcaWhiten READ get_pcaWhiten WRITE set_pcaWhiten RESET reset_pcaWhiten STORED false)
304 Q_PROPERTY(int directLDA READ get_directLDA WRITE set_directLDA RESET reset_directLDA STORED false) 319 Q_PROPERTY(int directLDA READ get_directLDA WRITE set_directLDA RESET reset_directLDA STORED false)
305 Q_PROPERTY(float directDrop READ get_directDrop WRITE set_directDrop RESET reset_directDrop STORED false) 320 Q_PROPERTY(float directDrop READ get_directDrop WRITE set_directDrop RESET reset_directDrop STORED false)
306 BR_PROPERTY(float, pcaKeep, 0.98) 321 BR_PROPERTY(float, pcaKeep, 0.98)
  322 + BR_PROPERTY(bool, pcaWhiten, false)
307 BR_PROPERTY(int, directLDA, 0) 323 BR_PROPERTY(int, directLDA, 0)
308 BR_PROPERTY(float, directDrop, 0.1) 324 BR_PROPERTY(float, directDrop, 0.1)
309 325
@@ -319,6 +335,7 @@ class LDATransform : public Transform @@ -319,6 +335,7 @@ class LDATransform : public Transform
319 // Perform PCA dimensionality reduction 335 // Perform PCA dimensionality reduction
320 PCATransform pca; 336 PCATransform pca;
321 pca.keep = pcaKeep; 337 pca.keep = pcaKeep;
  338 + pca.whiten = pcaWhiten;
322 pca.train(trainingSet); 339 pca.train(trainingSet);
323 mean = pca.mean; 340 mean = pca.mean;
324 341