{ "id": "1909.09630", "version": "v1", "published": "2019-09-20T17:42:22.000Z", "updated": "2019-09-20T17:42:22.000Z", "title": "Manipulation Attacks in Local Differential Privacy", "authors": [ "Albert Cheu", "Adam Smith", "Jonathan Ullman" ], "categories": [ "cs.DS", "cs.CR" ], "abstract": "Local differential privacy is a widely studied restriction on distributed algorithms that collect aggregates about sensitive user data, and is now deployed in several large systems. We initiate a systematic study of a fundamental limitation of locally differentially private protocols: they are highly vulnerable to adversarial manipulation. While any algorithm can be manipulated by adversaries who lie about their inputs, we show that any non-interactive locally differentially private protocol can be manipulated to a much greater extent. Namely, when the privacy level is high or the input domain is large, an attacker who controls a small fraction of the users in the protocol can completely obscure the distribution of the users' inputs. We also show that existing protocols differ greatly in their resistance to manipulation, even when they offer the same accuracy guarantee with honest execution. Our results suggest caution when deploying local differential privacy and reinforce the importance of efficient cryptographic techniques for emulating mechanisms from central differential privacy in distributed settings.", "revisions": [ { "version": "v1", "updated": "2019-09-20T17:42:22.000Z" } ], "analyses": { "keywords": [ "locally differentially private protocol", "manipulation attacks", "central differential privacy", "efficient cryptographic techniques", "deploying local differential privacy" ], "note": { "typesetting": "TeX", "pages": 0, "language": "en", "license": "arXiv", "status": "editable" } } }