@inproceedings{bd24773720ac424aa7af5598b6ba3509,

title = "The Statistical Physics of Learning Revisited: Typical Learning Curves in Model Scenarios",

abstract = "The exchange of ideas between computer science and statistical physics has advanced the understanding of machine learning and inference significantly. This interdisciplinary approach is currently regaining momentum due to the revived interest in neural networks and deep learning. Methods borrowed from statistical mechanics complement other approaches to the theory of computational and statistical learning. In this brief review, we outline and illustrate some of the basic concepts. We exemplify the role of the statistical physics approach in terms of a particularly important contribution: the computation of typical learning curves in student teacher scenarios of supervised learning. Two, by now classical examples from the literature illustrate the approach: the learning of a linearly separable rule by a perceptron with continuous and with discrete weights, respectively. We address these prototypical problems in terms of the simplifying limit of stochastic training at high formal temperature and obtain the corresponding learning curves.",

author = "Michael Biehl",

note = "Publisher Copyright: {\textcopyright} 2021, The Author(s).; 4th International Workshop on Brain-Inspired Computing, BrainComp 2019 ; Conference date: 15-07-2019 Through 19-07-2019",

year = "2021",

month = jul,

doi = "10.1007/978-3-030-82427-3_10",

language = "English",

isbn = "978-3-030-82426-6",

series = "Lecture Notes in Computer Science ",

publisher = "Springer",

pages = "128--142",

editor = "Katrin Amunts and Lucio Grandinetti and Thomas Lippert and Nicolai Petkov",

booktitle = "Brain-Inspired Computing",

}