@governmentreport {Vadhan-ANPRM11,
title = {Comments on Advance Notice of Proposed Rulemaking: Human Subjects Research Protections: Enhancing Protections for Research Subjects and Reducing Burden, Delay, and Ambiguity for Investigators, Docket ID number HHS-OPHS-2011-0005},
year = {2011},
month = {October},
abstract = {Comments by Salil Vadhan, David Abrams, Micah Altman, Cynthia Dwork, Scott Duke Kominers, Paul Kominers, Harry Lewis, Tal Moran, Guy Rothblum, and Jon Ullman (at Harvard, Microsoft Research, the University of Chicago, MIT, and the Herzilya Interdisciplinary Center) These comments address the issues of data privacy and de-identification raised in the ANPRM. Our perspective is informed by substantial advances in privacy science that have been made in the computer science literature.},
url = {http://www.regulations.gov/$\#$!documentDetail;D=HHS-OPHS-2011-0005-1101},
author = {Salil Vadhan and David Abrams and Micah Altman and Cynthia Dwork and Paul Kominers and Scott Duke Kominers and Harry R. Lewis and Tal Moran and Guy Rothblum}
}
@conference {UllmanVa11,
title = {PCPs and the Hardness of Generating Synthetic Data},
booktitle = {Proceedings of the 8th IACR Theory of Cryptography Conference (TCC {\textquoteleft}11)},
series = {Lecture Notes on Computer Science},
volume = {5978},
year = {2011},
note = {Full version posted as {\em ECCC} TR10-017.},
month = {28{\textendash}30 March},
pages = {572{\textendash}587},
publisher = {Springer-Verlag},
organization = {Springer-Verlag},
edition = {Lecture Notes on Computer Science},
address = {Providence, RI},
abstract = {Assuming the existence of one-way functions, we show that there is no polynomial-time, differentially private algorithm A that takes a database D\in ({0,1}^d)^n and outputs a {\textquoteleft}{\textquoteleft}synthetic database{\textquoteright}{\textquoteright} D{\textquoteright} all of whose two-way marginals are approximately equal to those of D. (A two-way marginal is the fraction of database rows x\in {0,1}^d with a given pair of values in a given pair of columns.) This answers a question of Barak et al. (PODS {\textquoteleft}07), who gave an algorithm running in time poly(n,2^d). Our proof combines a construction of hard-to-sanitize databases based on digital signatures (by Dwork et al., STOC {\textquoteleft}09) with PCP-based Levin-reductions from NP search problems to finding approximate solutions to CSPs.},
url = {http://link.springer.com/chapter/10.1007\%2F978-3-642-19571-6_24},
author = {Jon Ullman and Salil Vadhan},
editor = {Yuval Ishai}
}
@conference {DBLP:conf/stoc/GuptaHRU11,
title = {Privately releasing conjunctions and the statistical query barrier},
booktitle = {Proceedings of the 43rd ACM Symposium on Theory of Computing, STOC 2011},
year = {2011},
month = {6-8 June},
pages = {803-812},
publisher = {ACM},
organization = {ACM},
address = {San Jose, CA, USA},
abstract = {Suppose we would like to know all answers to a set of statistical queries C on a data set up to small error, but we can only access the data itself using statistical queries. A trivial solution is to exhaustively ask all queries in C. Can we do any better? We show that the number of statistical queries necessary and sufficient for this task is---up to polynomial factors---equal to the agnostic learning complexity of C in Kearns{\textquoteright} statistical query (SQ)model. This gives a complete answer to the question when running time is not a concern.
We then show that the problem can be solved efficiently (allowing arbitrary error on a small fraction of queries) whenever the answers to C can be described by a submodular function. This includes many natural concept classes, such as graph cuts and Boolean disjunctions and conjunctions.
While interesting from a learning theoretic point of view, our main applications are in privacy-preserving data analysis: Here, our second result leads to an algorithm that efficiently releases differentially private answers to all Boolean conjunctions with 1\% average error. This presents progress on a key open problem in privacy-preserving data analysis. Our first result on the other hand gives unconditional lower bounds on any differentially private algorithm that admits a (potentially non-privacy-preserving) implementation using only statistical queries. Not only our algorithms, but also most known private algorithms can be implemented using only statistical queries, and hence are constrained by these lower bounds. Our result therefore isolates the complexity of agnostic learning in the SQ-model as a new barrier in the design of differentially private algorithms.},
url = {http://dl.acm.org/citation.cfm?id=1993742},
author = {Anupam Gupta and Moritz Hardt and Aaron Roth and Jonathan Ullman}
}