@inproceedings{8c096b89bf3549e3ae5adba0b379213d,
title = "Diagonally weighted and shifted criteria for minor and principal component extraction",
abstract = "A framework for a class of minor and principal component learning rules is presented. These rules compute multiple eigenvectors and not only a basis for a multi-dimensional eigenspace. Several MCA/PCA cost functions which are weighted or shifted by a diagonal matrix are optimized subject to orthogonal or symmetric constraints. A number of minor and principal component learning rules for symmetric matrices and matrix pencils, many of which are new, are obtained by exploiting symmetry of constrained criteria. These algorithms may be seen as the counterparts or generalization of Oja's and Xu's systems for computing multiple principal component analyzers. Procedures for converting minor component flows into principal component flows are also discussed.",
keywords = "Adaptive learning algorithm, Diagonally shifted cost function, Extreme eigenvalues, Generalized MCA, Generalized PCA, Minor component analysis, Neural networks, Power method, Principal component analysis, Symmetric constraints",
author = "Hasan, {Mohammed A.}",
year = "2005",
doi = "10.1109/IJCNN.2005.1556033",
language = "English (US)",
isbn = "0780390482",
series = "Proceedings of the International Joint Conference on Neural Networks",
pages = "1251--1256",
booktitle = "Proceedings of the International Joint Conference on Neural Networks, IJCNN 2005",
note = "International Joint Conference on Neural Networks, IJCNN 2005 ; Conference date: 31-07-2005 Through 04-08-2005",
}