@inproceedings{06141d3b86b84716a5c48db924376937,
title = "Simplifying Mixture Models through Function Approximation",
abstract = "Finite mixture model is a powerful tool in many statistical learning problems. In this paper, we propose a general, structure-preserving approach to reduce its model complexity, which can bring significant computational benefits in many applications. The basic idea is to group the original mixture components into compact clusters, and then minimize an upper bound on the approximation error between the original and simplified models. By adopting the L2 norm as the distance measure between mixture models, we can derive closed-form solutions that are more robust and reliable than using the KL-based distance measure. Moreover, the complexity of our algorithm is only linear in the sample size and dimensionality. Experiments on density estimation and clustering-based image segmentation demonstrate its outstanding performance in terms of both speed and accuracy.",
author = "Kai Zhang and Kwok, \{James T.\}",
note = "Publisher Copyright: {\textcopyright} NIPS 2006.All rights reserved; 19th International Conference on Neural Information Processing Systems, NIPS 2006 ; Conference date: 04-12-2006 Through 07-12-2006",
year = "2006",
language = "英语",
series = "NIPS 2006: Proceedings of the 19th International Conference on Neural Information Processing Systems",
publisher = "MIT Press Journals",
pages = "1577--1584",
editor = "Bernhard Scholkopf and Platt, \{John C.\} and Thomas Hofmann",
booktitle = "NIPS 2006",
address = "美国",
}