@inproceedings{a4499e5540114554bdbafb6bf078cbb3,
title = "Constructive learning of recurrent neural networks",
abstract = "Recurrent neural networks are a natural model for learning and predicting temporal signals. In addition, simple recurrent networks have been shown to be both theoretically and experimentally capable of learning finite state automata [Cleeremans 89. Giles 92a, Minsky 67, Pollack 91, Siegelmann 92]. However, it is difficult to determine what is the minimal neural network structure for a particular automaton. Using a large recurrent network, which would be versatile in theory, in practice proves to be very difficult to train. Constructive or destructive recurrent methods might offer a solution to this problem. We prove that one current method. Recurrent Cascade Correlation, has fundamental limitations in representation and thus in its learning capabilities. We give a preliminary approach on how to get around these limitations by devising a {"}simple{"} constructive training method that adds neurons during training while still preserving the powerful fully recurrent structure. Through simulations we show that such a method can learn many types of regular grammars that the Recurrent Cascade Correlation method is unable to learn.",
author = "D. Chen and Giles, \{C. L.\} and Sun, \{G. Z.\} and Chen, \{H. H.\} and Lee, \{Y. C.\} and Goudreau, \{M. W.\}",
note = "Publisher Copyright: {\textcopyright} 1993 IEEE.; IEEE International Conference on Neural Networks, ICNN 1993 ; Conference date: 28-03-1993 Through 01-04-1993",
year = "1993",
doi = "10.1109/ICNN.1993.298727",
language = "English (US)",
series = "IEEE International Conference on Neural Networks - Conference Proceedings",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
pages = "1196--1201",
booktitle = "1993 IEEE International Conference on Neural Networks, ICNN 1993",
address = "United States",
}