@inproceedings{5f2c6dfb2b9f4a60b873972d4e498c67,
title = "K-means clustering via principal component analysis",
abstract = "Principal component analysis (PCA) is a widely used statistical technique for unsupervised dimension reduction. K-meaas clustering is a commonly used data clustering for performing unsupervised learning tasks. Here we prove that principal components are the continuous solutions to the discrete cluster membership indicators for K-means clustering. New lower bounds for K-means objective function are derived, which is the total variance minus the eigenvalues of the data covariance matrix. These results indicate that unsupervised dimension reduction is closely related to unsupervised learning. Several implications are discussed. On dimension reduction, the result provides new insights to the observed effectiveness of PCA-based data reductions, beyond the conventional noise-reduction explanation that PCA, via singular value decomposition, provides the best low-dimensional linear approximation of the data. On learning, the result suggests effective techniques for K-means data clustering. DNA gene expression and Internet newsgroups are analyzed to illustrate our results. Experiments indicate that the new bounds are within 0.5-1.5\% of the optimal values.",
author = "Chris Ding and Xiaofeng He",
year = "2004",
language = "英语",
isbn = "1581138385",
series = "Proceedings, Twenty-First International Conference on Machine Learning, ICML 2004",
pages = "225--232",
editor = "R. Greiner and D. Schuurmans",
booktitle = "Proceedings, Twenty-First International Conference on Machine Learning, ICML 2004",
note = "Proceedings, Twenty-First International Conference on Machine Learning, ICML 2004 ; Conference date: 04-07-2004 Through 08-07-2004",
}