author = {Lucie Daubigney and Matthieu Geist and Olivier Pietquin},
title = {Random Projections: a Remedy for Overfitting Issues in Time Series Prediction with Echo State Networks},
year = {2013},
booktitle = {Proceedings of the 38th IEEE International Conference on Acoustics, Speech and Signal Processing (ICASSP 2013)},
note = {to appear},
address = {Vancouver, Canada},
url = {http://www.metz.supelec.fr//metz/personnel/geist_mat/pdfs/supelec828.pdf},
abstract = {Modelling time series is quite a difficult task. The last recent years, reservoir computing approaches have been proven very efficient for such problems. Indeed, thanks to recurrence in the connections between neurons, this approach is a powerful tool to catch and model time dependencies between samples. Yet, the prediction quality often depends on the trade-off between the number of neurons in the reservoir and the amount of training data. Supposedly, the larger the number of neurons, the richer the reservoir of dynamics. However, the risk of overfitting problem appears. Conversely, the lower the number of neurons is, the lower the risk of overfitting problem is but also the poorer the reservoir of dynamics is. We consider here the combination of an echo state network with a projection method to benefit from the advantages of the reservoir computing approach without needing to pay attention to overfitting problems due to a lack of training data.}