{ "id": "1402.4844", "version": "v2", "published": "2014-02-19T22:57:03.000Z", "updated": "2016-05-26T14:06:50.000Z", "title": "Subspace Learning with Partial Information", "authors": [ "Alon Gonen", "Dan Rosenbaum", "Yonina Eldar", "Shai Shalev-Shwartz" ], "categories": [ "cs.LG", "stat.ML" ], "abstract": "The goal of subspace learning is to find a $k$-dimensional subspace of $\\mathbb{R}^d$, such that the expected squared distance between instance vectors and the subspace is as small as possible. In this paper we study subspace learning in a partial information setting, in which the learner can only observe $r \\le d$ attributes from each instance vector. We propose several efficient algorithms for this task, and analyze their sample complexity", "revisions": [ { "version": "v1", "updated": "2014-02-19T22:57:03.000Z", "title": "The Sample Complexity of Subspace Learning with Partial Information", "abstract": "The goal of subspace learning is to find a $k$-dimensional subspace of $\\mathbb{R}^d$, such that the expected squared distance between instance vectors and the subspace is as small as possible. In this paper we study the sample complexity of subspace learning in a \\emph{partial information} setting, in which the learner can only observe $r \\le d$ attributes from each instance vector. We derive upper and lower bounds on the sample complexity in different scenarios. In particular, our upper bounds involve a generalization of vector sampling techniques, which are often used in bandit problems, to matrices.", "comment": null, "journal": null, "doi": null }, { "version": "v2", "updated": "2016-05-26T14:06:50.000Z" } ], "analyses": { "keywords": [ "sample complexity", "subspace learning", "partial information", "instance vector", "bandit problems" ], "note": { "typesetting": "TeX", "pages": 0, "language": "en", "license": "arXiv", "status": "editable", "adsabs": "2014arXiv1402.4844G" } } }