@inbook{a2372f8fb7884e42b80e8f0dd9d70a94,
title = "Two frameworks for improving gradient-based learning algorithms",
abstract = "Backpropagation is the most popular algorithm for training neural networks. However, this gradient-based training method is known to have a tendency towards very long training times and convergence to local optima. Various methods have been proposed to alleviate these issues including, but not limited to, different training algorithms, automatic architecture design and different transfer functions. In this chapter we continue the exploration into improving gradient-based learning algorithms through dynamic transfer function modification. We propose opposite transfer functions as a means to improve the numerical conditioning of neural networks and extrapolate two backpropagation-based learning algorithms. Our experimental results show an improvement in accuracy and generalization ability on common benchmark functions. The experiments involve examining the sensitivity of the approach to learning parameters, type of transfer function and number of neurons in the network.",
author = "Mario Ventresca and Tizhoosh, {H. R.}",
note = "Funding Information: Research was partially supported by a National Science Foundation Graduate Research Fellowship and ONR grants N00014-02-1-0826 and N00014-04-1-0534. This article presents research results of the Belgian Network Dynamical Systems, Control, and Optimization (DYSCO), funded by the Interuniversity Attraction Poles Program, initiated by the Belgian State, Science Policy Office. The scientific responsibility rests with its authors.",
year = "2008",
doi = "10.1007/978-3-540-70829-2_12",
language = "English (US)",
isbn = "9783540708261",
series = "Studies in Computational Intelligence",
pages = "255--284",
editor = "Hamid Tizhoosh and Mario Ventresca",
booktitle = "Oppositional Concepts in Computational Intelligence",
}