@inproceedings{2940, abstract = {A chain rule for an entropy notion H(.) states that the entropy H(X) of a variable X decreases by at most l if conditioned on an l-bit string A, i.e., H(X|A)>= H(X)-l. More generally, it satisfies a chain rule for conditional entropy if H(X|Y,A)>= H(X|Y)-l. All natural information theoretic entropy notions we are aware of (like Shannon or min-entropy) satisfy some kind of chain rule for conditional entropy. Moreover, many computational entropy notions (like Yao entropy, unpredictability entropy and several variants of HILL entropy) satisfy the chain rule for conditional entropy, though here not only the quantity decreases by l, but also the quality of the entropy decreases exponentially in l. However, for the standard notion of conditional HILL entropy (the computational equivalent of min-entropy) the existence of such a rule was unknown so far. In this paper, we prove that for conditional HILL entropy no meaningful chain rule exists, assuming the existence of one-way permutations: there exist distributions X,Y,A, where A is a distribution over a single bit, but $H(X|Y)>>H(X|Y,A)$, even if we simultaneously allow for a massive degradation in the quality of the entropy. The idea underlying our construction is based on a surprising connection between the chain rule for HILL entropy and deniable encryption. }, author = {Krenn, Stephan and Pietrzak, Krzysztof Z and Wadia, Akshay}, editor = {Sahai, Amit}, location = {Tokyo, Japan}, pages = {23 -- 39}, publisher = {Springer}, title = {{A counterexample to the chain rule for conditional HILL entropy, and what deniable encryption has to do with it}}, doi = {10.1007/978-3-642-36594-2_2}, volume = {7785}, year = {2013}, }