@InProceedings{Supelec488,
author = {Matthieu Geist and Olivier Pietquin and Gabriel Fricout},
title = {Kernelizing Vector Quantization Algorithms},
year = {2009},
booktitle = {Proceedings of the 17th European Symposium on Artificial Neural Networks (ESANN 09)},
pages = {541-546},
month = {April},
editor = {Michel Verleysen},
address = {Bruges (Belgium)},
url = {http://www.dice.ucl.ac.be/Proceedings/esann/esannpdf/es2009-49.pdf},
abstract = {The kernel trick is a well known approach allowing to implicitly cast a linear method into a nonlinear one by replacing any dot product by a kernel function. However few vector quantization algorithms have been kernelized. Indeed, they usually imply to compute linear transformations (e.g. moving prototypes), what is not easily kernelizable. This paper introduces the Kernel-based Vector Quantization (KVQ) method which allows working in an approximation of the feature space, and thus kernelizing any Vector Quantization (VQ) algorithm.}
}