{ "id": "2403.07185", "version": "v1", "published": "2024-03-11T21:54:52.000Z", "updated": "2024-03-11T21:54:52.000Z", "title": "Uncertainty in Graph Neural Networks: A Survey", "authors": [ "Fangxin Wang", "Yuqing Liu", "Kay Liu", "Yibo Wang", "Sourav Medya", "Philip S. Yu" ], "comment": "13 main pages, 3 figures, 1 table. Under review", "categories": [ "cs.LG", "stat.ML" ], "abstract": "Graph Neural Networks (GNNs) have been extensively used in various real-world applications. However, the predictive uncertainty of GNNs stemming from diverse sources such as inherent randomness in data and model training errors can lead to unstable and erroneous predictions. Therefore, identifying, quantifying, and utilizing uncertainty are essential to enhance the performance of the model for the downstream tasks as well as the reliability of the GNN predictions. This survey aims to provide a comprehensive overview of the GNNs from the perspective of uncertainty with an emphasis on its integration in graph learning. We compare and summarize existing graph uncertainty theory and methods, alongside the corresponding downstream tasks. Thereby, we bridge the gap between theory and practice, meanwhile connecting different GNN communities. Moreover, our work provides valuable insights into promising directions in this field.", "revisions": [ { "version": "v1", "updated": "2024-03-11T21:54:52.000Z" } ], "analyses": { "keywords": [ "graph neural networks", "downstream tasks", "summarize existing graph uncertainty theory", "diverse sources", "gnn predictions" ], "note": { "typesetting": "TeX", "pages": 0, "language": "en", "license": "arXiv", "status": "editable" } } }