@inproceedings{30b94f00c6674a819cebc149873ad2a9,
title = "Inserting rules into recurrent neural networks",
abstract = "We present a method that incorporates a priori knowledge in the training of recurrent neural networks. This a priori knowledge can be interpreted as hints about the problem to be learned and these hints are encoded as rules which are then inserted into the neural network. We demonstrate the approach by training recurrent neural networks with inserted rules to learn to recognize regular languages from grammatical string examples. Because the recurrent networks have second-order connections, rule-insertion is a straightforward mapping of rules into weights and neurons. Simulations show that training recurrent networks with different amounts of partial knowledge to recognize simple grammars improves the training time by orders of magnitude, even when only a small fraction of all transitions are inserted as rules. In addition there is appears to be no loss in generalization performance.",
author = "Giles, {C. L.} and Omlin, {C. W.}",
year = "1992",
doi = "10.1109/NNSP.1992.253712",
language = "English (US)",
series = "Neural Networks for Signal Processing - Proceedings of the IEEE Workshop",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
pages = "13--22",
editor = "C.A. Kamm and S.Y. Kung and Sorenson, {J. Aa.} and F. Fallside",
booktitle = "Neural Networks for Signal Processing II - Proceedings of the 1992 IEEE Workshop",
address = "United States",
note = "1992 IEEE Workshop on Neural Networks for Signal Processing II ; Conference date: 31-08-1992 Through 02-09-1992",
}