Perfect learning in neural networks: on sample complexity and scaling issues of gradient descent trained multilayer perceptrons and a novel object-oriented simulation framework for scalable information processing modules
Please log in to take part in the discussion (add own reviews or comments).
Cite this publication
More citation styles
- please select -
%0 Thesis
%1 lange1995perfect
%A Lange, Rupert
%D 1995
%K
%T Perfect learning in neural networks: on sample complexity and scaling issues of gradient descent trained multilayer perceptrons and a novel object-oriented simulation framework for scalable information processing modules
@phdthesis{lange1995perfect,
added-at = {2023-12-14T15:19:19.000+0100},
author = {Lange, Rupert},
biburl = {https://www.bibsonomy.org/bibtex/2657f8fc45ae7bf11c7f0d56777c38843/admin},
dnbtitleid = {945298056},
interhash = {e9ba506b38da98c66057cdc8fa67c7e5},
intrahash = {657f8fc45ae7bf11c7f0d56777c38843},
keywords = {},
school = {Uni Heidelberg},
timestamp = {2023-12-14T15:19:19.000+0100},
title = {Perfect learning in neural networks: on sample complexity and scaling issues of gradient descent trained multilayer perceptrons and a novel object-oriented simulation framework for scalable information processing modules},
year = 1995
}