{ "id": "2406.14936", "version": "v1", "published": "2024-06-21T07:45:28.000Z", "updated": "2024-06-21T07:45:28.000Z", "title": "On the growth of the parameters of approximating ReLU neural networks", "authors": [ "Erion Morina", "Martin Holler" ], "categories": [ "cs.LG", "cs.NA", "math.NA" ], "abstract": "This work focuses on the analysis of fully connected feed forward ReLU neural networks as they approximate a given, smooth function. In contrast to conventionally studied universal approximation properties under increasing architectures, e.g., in terms of width or depth of the networks, we are concerned with the asymptotic growth of the parameters of approximating networks. Such results are of interest, e.g., for error analysis or consistency results for neural network training. The main result of our work is that, for a ReLU architecture with state of the art approximation error, the realizing parameters grow at most polynomially. The obtained rate with respect to a normalized network size is compared to existing results and is shown to be superior in most cases, in particular for high dimensional input.", "revisions": [ { "version": "v1", "updated": "2024-06-21T07:45:28.000Z" } ], "analyses": { "subjects": [ "41A25", "41A65" ], "keywords": [ "approximating relu neural networks", "parameters", "connected feed forward relu", "universal approximation properties", "feed forward relu neural networks" ], "note": { "typesetting": "TeX", "pages": 0, "language": "en", "license": "arXiv", "status": "editable" } } }