{ "id": "1802.08250", "version": "v1", "published": "2018-02-22T10:23:36.000Z", "updated": "2018-02-22T10:23:36.000Z", "title": "Overcoming Catastrophic Forgetting in Convolutional Neural Networks by Selective Network Augmentation", "authors": [ "Abel S. Zacarias", "Luís A. Alexandre" ], "categories": [ "cs.LG", "stat.ML" ], "abstract": "Lifelong learning aims to develop machine learning systems that can learn new tasks while preserving the performance on previous tasks. This approach can be applied, for example, to prevent accident on autonomous vehicles by applying the knowledge learned on previous situations. In this paper we present a method to overcomes catastrophic forgetting that learns new tasks and preserves the performance on old tasks without accessing the data of the original model, by selective network augmentation, using convolutional neural networks for image classification. The experiment results showed that our method, in some scenarios outperforms the state-of-art Learning without Forgetting algorithm. Results also showed that in some situations is better to use our model instead of training a neural network using isolated learning.", "revisions": [ { "version": "v1", "updated": "2018-02-22T10:23:36.000Z" } ], "analyses": { "keywords": [ "convolutional neural networks", "selective network augmentation", "overcoming catastrophic forgetting", "experiment results", "image classification" ], "note": { "typesetting": "TeX", "pages": 0, "language": "en", "license": "arXiv", "status": "editable" } } }