{ "id": "1901.08479", "version": "v1", "published": "2019-01-24T16:13:24.000Z", "updated": "2019-01-24T16:13:24.000Z", "title": "On the Transformation of Latent Space in Autoencoders", "authors": [ "Jaehoon Cha", "Kyeong Soo Kim", "Sanghyuk Lee" ], "comment": "9 pages and 9 figures. The paper has been submitted to ICML (The International Conference on Machine Learning) 2019", "categories": [ "cs.LG", "stat.ML" ], "abstract": "Noting the importance of the latent variables in inference and learning, we propose a novel framework for autoencoders based on the homeomorphic transformation of latent variables --- which could reduce the distance between vectors in the transformed space, while preserving the topological properties of the original space --- and investigate the effect of the transformation in both learning generative models and denoising corrupted data. The results of our experiments show that the proposed model can work as both a generative model and a denoising model with improved performance due to the transformation compared to conventional variational and denoising autoencoders.", "revisions": [ { "version": "v1", "updated": "2019-01-24T16:13:24.000Z" } ], "analyses": { "keywords": [ "latent space", "autoencoders", "latent variables", "original space", "conventional variational" ], "tags": [ "conference paper" ], "note": { "typesetting": "TeX", "pages": 9, "language": "en", "license": "arXiv", "status": "editable" } } }