{ "id": "1506.06840", "version": "v1", "published": "2015-06-23T01:57:19.000Z", "updated": "2015-06-23T01:57:19.000Z", "title": "On Variance Reduction in Stochastic Gradient Descent and its Asynchronous Variants", "authors": [ "Sashank J. Reddi", "Ahmed Hefny", "Suvrit Sra", "Barnabás Póczos", "Alex Smola" ], "categories": [ "cs.LG", "stat.ML" ], "abstract": "We study optimization algorithms based on variance reduction for stochastic gradient descent (SGD). Remarkable recent progress has been made in this direction through development of algorithms like SAG, SVRG, SAGA. These algorithms have been shown to outperform SGD, both theoretically and empirically. However, asynchronous versions of these algorithms---a crucial requirement for modern large-scale applications---have not been studied. We bridge this gap by presenting a unifying framework for many variance reduction techniques. Subsequently, we propose an asynchronous algorithm grounded in our framework, and prove its fast convergence. An important consequence of our general approach is that it yields asynchronous versions of variance reduction algorithms such as SVRG and SAGA as a byproduct. Our method achieves near linear speedup in sparse settings common to machine learning. We demonstrate the empirical performance of our method through a concrete realization of asynchronous SVRG.", "revisions": [ { "version": "v1", "updated": "2015-06-23T01:57:19.000Z" } ], "analyses": { "keywords": [ "stochastic gradient descent", "asynchronous variants", "variance reduction algorithms", "algorithms-a crucial requirement", "variance reduction techniques" ], "note": { "typesetting": "TeX", "pages": 0, "language": "en", "license": "arXiv", "status": "editable", "adsabs": "2015arXiv150606840R" } } }