by Andreas Andreakis, Nicolai von Hoyningen-Huene and Michael Beetz
Abstract:
We propose Merge Growing Neural Gas (MGNG) as a novel unsupervised growing neural network for time series analysis. MGNG combines the state-of-the-art recursive temporal context of Merge Neural Gas (MNG) with the incremental Growing Neural Gas (GNG) and enables thereby the analysis of unbounded and possibly infinite time series in an online manner. There is no need to define the number of neurons a priori and only constant parameters are used. In order to focus on frequent sequence patterns an entropy maximization strategy is utilized which controls the creation of new neurons. Experimental results demonstrate reduced time complexity compared to MNG while retaining similar accuracy in time series representation.
Reference:
Andreas Andreakis, Nicolai von Hoyningen-Huene and Michael Beetz, "Incremental Unsupervised Time Series Analysis Using Merge Growing Neural Gas", In WSOM, Springer, vol. 5629, pp. 10-18, 2009.
Bibtex Entry:
@inproceedings{andreakis09wsom,
author = {Andreas Andreakis and
Nicolai von Hoyningen-Huene and
Michael Beetz},
title = {Incremental Unsupervised Time Series Analysis Using Merge
Growing Neural Gas},
booktitle = {WSOM},
year = {2009},
pages = {10-18},
editor = {Jos{\'e} Carlos Pr\'{\i}ncipe and
Risto Miikkulainen},
publisher = {Springer},
series = {Lecture Notes in Computer Science},
volume = {5629},
isbn = {978-3-642-02396-5},
bib2html_pubtype = {Conference Paper},
bib2html_rescat = {Time Series, Machine Learning, SOM},
bib2html_groups = {IAS},
bib2html_funding = {Aspogamo},
abstract = {We propose Merge Growing Neural Gas (MGNG) as a novel unsupervised growing neural network for time series analysis. MGNG combines the state-of-the-art recursive temporal context of Merge Neural Gas (MNG) with the incremental Growing Neural Gas (GNG) and enables thereby the analysis of unbounded and possibly infinite time series in an online manner.
There is no need to define the number of neurons a priori and only constant parameters are used. In order to focus on frequent sequence patterns an entropy maximization strategy is utilized which controls the creation of new neurons.
Experimental results demonstrate reduced time complexity compared to MNG while retaining similar accuracy in time series representation.
}
}