@inproceedings{e1f9890c1c9645878f58eaa004a75224,
title = "Learning Latent Variable Models with Discriminant Regularization",
abstract = "In many machine learning applications, data are often described by a large number of features or attributes. However, too many features can result in overfitting. This is often the case when the number of examples is smaller than the number of features. The problem can be mitigated by learning latent variable models where the data can be described by a fewer number of latent dimensions. There are many techniques for learning latent variable models in the literature. Most of these techniques can be grouped into two classes: techniques that are informative, represented by principal component analysis (PCA), and techniques that are discriminant, represented by linear discriminant analysis (LDA). Each class of the techniques has its advantages. In this work, we introduce a technique for learning latent variable models with discriminant regularization that combines the characteristics of both classes. Empirical evaluation using a variety of data sets is presented to verify the performance of the proposed technique.",
keywords = "Classification, Dimensionality reduction, Latent variable models",
author = "Jing Peng and Aved, {Alex J.}",
note = "Publisher Copyright: {\textcopyright} 2021, This is a U.S. government work and not under copyright protection in the United States; foreign copyright protection may apply.; null ; Conference date: 22-02-2020 Through 24-02-2020",
year = "2021",
doi = "10.1007/978-3-030-71158-0_18",
language = "English",
isbn = "9783030711573",
series = "Lecture Notes in Computer Science (including subseries Lecture Notes in Artificial Intelligence and Lecture Notes in Bioinformatics)",
publisher = "Springer Science and Business Media Deutschland GmbH",
pages = "378--398",
editor = "Rocha, {Ana Paula} and Luc Steels and {van den Herik}, Jaap",
booktitle = "Agents and Artificial Intelligence - 12th International Conference, ICAART 2020, Revised Selected Papers",
}