@inproceedings{f095ed823acb4839b5d76917c8587ed0,
title = "Second-order recurrent neural networks for grammatical inference",
abstract = "It is shown that a recurrent, second-order neural network using a real-time, feed-forward training algorithm readily learns to infer regular grammars from positive and negative string training samples. Numerous simulations which show the effect of initial conditions, training set size and order, and neuron architecture are presented. All simulations were performed with random initial weight strengths and usually converge after approximately a hundred epochs of training. The authors discuss a quantization algorithm for dynamically extracting finite-state automata during and after training. For a well-trained neural net, the extracted automata constitute an equivalence class of state machines that are reducible to the minimal machine of the inferred grammar. It is then shown through simulations that many of the neural net state machines are dynamically stable and correctly classify long unseen strings.",
author = "Giles, {C. L.} and D. Chen and Miller, {C. B.} and Chen, {H. H.} and Sun, {G. Z.} and Lee, {Y. C.}",
year = "1992",
language = "English (US)",
isbn = "0780301641",
series = "Proceedings. IJCNN - International Joint Conference on Neural Networks",
publisher = "Publ by IEEE",
pages = "273--281",
editor = "Anon",
booktitle = "Proceedings. IJCNN - International Joint Conference on Neural Networks",
note = "International Joint Conference on Neural Networks - IJCNN-91-Seattle ; Conference date: 08-07-1991 Through 12-07-1991",
}