@inproceedings{1df8c356a6544b5591779d12657be597,
title = "Information preserving discriminant projections",
abstract = "In classification, a large number of features often make the design of a classifier difficult and degrade its performance. This is particularly pronounced when the number of examples is small relative to the number of features, which is due to the curse of dimensionality. There are many dimensionality reduction techniques in the literature. However, most these techniques are either informative (or minimum information loss), as in principal component analysis (PCA), or discriminant, as in linear discriminant analysis (LDA). Each type of technique has its strengths and weaknesses. Motivated by Gaussian Processes Latent Variable Models, we propose a simple linear projection technique that explores the characteristics of both PCA and LDA in latent representations. The proposed technique optimizes a regularized information preserving objective, where the regularizer is a LDA based criterion. And as such, it prefers a latent space that is both informative and discriminant, thereby providing better generalization performance. Experimental results based on a variety of data sets are provided to validate the proposed technique.",
keywords = "Classification, Dimensionality Reduction, Feature Selection",
author = "Jing Peng and Aved, {Alex J.}",
note = "Publisher Copyright: {\textcopyright} 2020 by SCITEPRESS - Science and Technology Publications, Lda. All rights reserved; 12th International Conference on Agents and Artificial Intelligence, ICAART 2020 ; Conference date: 22-02-2020 Through 24-02-2020",
year = "2020",
language = "English",
series = "ICAART 2020 - Proceedings of the 12th International Conference on Agents and Artificial Intelligence",
publisher = "SciTePress",
pages = "162--171",
editor = "Ana Rocha and Luc Steels and {van den Herik}, Jaap",
booktitle = "ICAART 2020 - Proceedings of the 12th International Conference on Agents and Artificial Intelligence",
}