{ "id": "2502.01293", "version": "v1", "published": "2025-02-03T12:12:52.000Z", "updated": "2025-02-03T12:12:52.000Z", "title": "TT-LSQR For Tensor Least Squares Problems and Application to Data Mining *", "authors": [ "Lorenzo Piccinini", "Valeria Simoncini" ], "comment": "21 pages, 10 figures, 6 tables, 1 algorithm", "categories": [ "math.NA", "cs.NA" ], "abstract": "We are interested in the numerical solution of the tensor least squares problem \\[ \\min_{\\mathcal{X}} \\| \\mathcal{F} - \\sum_{i =1}^{\\ell} \\mathcal{X} \\times_1 A_1^{(i)} \\times_2 A_2^{(i)} \\cdots \\times_d A_d^{(i)} \\|_F, \\] where $\\mathcal{X}\\in\\mathbb{R}^{m_1 \\times m_2 \\times \\cdots \\times m_d}$, $\\mathcal{F}\\in\\mathbb{R}^{n_1\\times n_2 \\times \\cdots \\times n_d}$ are tensors with $d$ dimensions, and the coefficients $A_j^{(i)}$ are tall matrices of conforming dimensions. We first describe a tensor implementation of the classical LSQR method by Paige and Saunders, using the tensor-train representation as key ingredient. We also show how to incorporate sketching to lower the computational cost of dealing with the tall matrices $A_j^{(i)}$. We then use this methodology to address a problem in information retrieval, the classification of a new query document among already categorized documents, according to given keywords.", "revisions": [ { "version": "v1", "updated": "2025-02-03T12:12:52.000Z" } ], "analyses": { "subjects": [ "65F45", "65F55", "15A23" ], "keywords": [ "squares problem", "data mining", "application", "tall matrices", "information retrieval" ], "note": { "typesetting": "TeX", "pages": 21, "language": "en", "license": "arXiv", "status": "editable" } } }