{ "id": "1810.00122", "version": "v1", "published": "2018-09-29T00:50:21.000Z", "updated": "2018-09-29T00:50:21.000Z", "title": "On the Convergence and Robustness of Batch Normalization", "authors": [ "Yongqiang Cai", "Qianxiao Li", "Zuowei Shen" ], "categories": [ "cs.LG", "stat.ML" ], "abstract": "Despite its empirical success, the theoretical underpinnings of the stability, convergence and acceleration properties of batch normalization (BN) remain elusive. In this paper, we attack this problem from a modeling approach, where we perform a thorough theoretical analysis on BN applied to a simplified model: ordinary least squares (OLS). We discover that gradient descent on OLS with BN has interesting properties, including a scaling law, convergence for arbitrary learning rates for the weights, asymptotic acceleration effects, as well as insensitivity to the choice of learning rates. We then demonstrate numerically that these findings are not specific to the OLS problem and hold qualitatively for more complex supervised learning problems. This points to a new direction towards uncovering the mathematical principles that underlies batch normalization.", "revisions": [ { "version": "v1", "updated": "2018-09-29T00:50:21.000Z" } ], "analyses": { "keywords": [ "convergence", "robustness", "asymptotic acceleration effects", "underlies batch normalization", "arbitrary learning rates" ], "note": { "typesetting": "TeX", "pages": 0, "language": "en", "license": "arXiv", "status": "editable" } } }