We introduce a Bayesian model for inferring mixtures of subspaces of different dimensions. The model allows flexible and efficient learning of a density supported in an ambient space which in fact can concentrate around some lower-dimensional space. The key challenge in such a mixture model is specification of prior distributions over subspaces of different dimensions. We address this challenge by embedding subspaces or Grassmann manifolds into a sphere of relatively low dimension and specifying priors on the sphere. We provide an efficient sampling algorithm for the posterior distribution of the model parameters. We illustrate that a simple extension of our mixture of subspaces model can be applied to topic modeling. The utility of our approach is demonstrated with applications to real and simulated data.
%0 Journal Article
%1 thomas2022learning
%A Thomas, Brian St.
%A You, Kisung
%A Lin, Lizhen
%A Lim, Lek-Heng
%A Mukherjee, Sayan
%D 2022
%I ASA Website
%J Journal of Computational and Graphical Statistics
%K imported
%N 2
%P 337--350
%R 10.1080/10618600.2021.2000420
%T Learning Subspaces of Different Dimensions
%U https://doi.org/10.1080/10618600.2021.2000420
%V 31
%X We introduce a Bayesian model for inferring mixtures of subspaces of different dimensions. The model allows flexible and efficient learning of a density supported in an ambient space which in fact can concentrate around some lower-dimensional space. The key challenge in such a mixture model is specification of prior distributions over subspaces of different dimensions. We address this challenge by embedding subspaces or Grassmann manifolds into a sphere of relatively low dimension and specifying priors on the sphere. We provide an efficient sampling algorithm for the posterior distribution of the model parameters. We illustrate that a simple extension of our mixture of subspaces model can be applied to topic modeling. The utility of our approach is demonstrated with applications to real and simulated data.
@article{thomas2022learning,
abstract = {We introduce a Bayesian model for inferring mixtures of subspaces of different dimensions. The model allows flexible and efficient learning of a density supported in an ambient space which in fact can concentrate around some lower-dimensional space. The key challenge in such a mixture model is specification of prior distributions over subspaces of different dimensions. We address this challenge by embedding subspaces or Grassmann manifolds into a sphere of relatively low dimension and specifying priors on the sphere. We provide an efficient sampling algorithm for the posterior distribution of the model parameters. We illustrate that a simple extension of our mixture of subspaces model can be applied to topic modeling. The utility of our approach is demonstrated with applications to real and simulated data.},
added-at = {2024-10-02T13:52:45.000+0200},
author = {Thomas, Brian St. and You, Kisung and Lin, Lizhen and Lim, Lek-Heng and Mukherjee, Sayan},
biburl = {https://puma.scadsai.uni-leipzig.de/bibtex/2b7dd7195374d3861087639161c389c32/scadsfct},
doi = {10.1080/10618600.2021.2000420},
eprint = {https://doi.org/10.1080/10618600.2021.2000420},
interhash = {3fd7818525e07b4c10c8ae96cc414318},
intrahash = {b7dd7195374d3861087639161c389c32},
journal = {Journal of Computational and Graphical Statistics},
keywords = {imported},
number = 2,
pages = {337--350},
publisher = {ASA Website},
timestamp = {2024-10-02T13:52:45.000+0200},
title = {Learning Subspaces of Different Dimensions},
url = {https://doi.org/10.1080/10618600.2021.2000420},
volume = 31,
year = 2022
}