@article{4550, abstract = {In 2-player non-zero-sum games, Nash equilibria capture the options for rational behavior if each player attempts to maximize her payoff. In contrast to classical game theory, we consider lexicographic objectives: first, each player tries to maximize her own payoff, and then, the player tries to minimize the opponent's payoff. Such objectives arise naturally in the verification of systems with multiple components. There, instead of proving that each component satisfies its specification no matter how the other components behave, it sometimes suffices to prove that each component satisfies its specification provided that the other components satisfy their specifications. We say that a Nash equilibrium is secure if it is an equilibrium with respect to the lexicographic objectives of both players. We prove that in graph games with Borel winning conditions, which include the games that arise in verification, there may be several Nash equilibria, but there is always a unique maximal payoff profile of a secure equilibrium. We show how this equilibrium can be computed in the case of ω-regular winning conditions, and we characterize the memory requirements of strategies that achieve the equilibrium.}, author = {Krishnendu Chatterjee and Thomas Henzinger and Jurdziński, Marcin}, journal = {Theoretical Computer Science}, number = {1-2}, pages = {67 -- 82}, publisher = {Elsevier}, title = {{Games with secure equilibria}}, doi = {10.1016/j.tcs.2006.07.032}, volume = {365}, year = {2006}, } @inproceedings{4549, abstract = {We present a compositional theory of system verification, where specifications assign real-numbered costs to systems. These costs can express a wide variety of quantitative system properties, such as resource consumption, price, or a measure of how well a system satisfies its specification. The theory supports the composition of systems and specifications, and the hiding of variables. Boolean refinement relations are replaced by real-numbered distances between descriptions of a system at different levels of detail. We show that the classical Boolean rules for compositional reasoning have quantitative counterparts in our setting. While our general theory allows costs to be specified by arbitrary cost functions, we also consider a class of linear cost functions, which give rise to an instance of our framework where all operations are computable in polynomial time.}, author = {Krishnendu Chatterjee and de Alfaro, Luca and Faella, Marco and Thomas Henzinger and Majumdar, Ritankar S and Stoelinga, Mariëlle}, pages = {179 -- 188}, publisher = {IEEE}, title = {{Compositional quantitative reasoning}}, doi = {10.1109/QEST.2006.11}, year = {2006}, } @inproceedings{4552, abstract = {A concurrent reachability game is a two-player game played on a graph: at each state, the players simultaneously and independently select moves; the two moves determine jointly a probability distribution over the successor states. The objective for player 1 consists in reaching a set of target states; the objective for player 2 is to prevent this, so that the game is zero-sum. Our contributions are two-fold. First, we present a simple proof of the fact that in concurrent reachability games, for all epsilon > 0, memoryless epsilon-optimal strategies exist. A memoryless strategy is independent of the history of plays, and an epsilon-optimal strategy achieves the objective with probability within epsilon of the value of the game. In contrast to previous proofs of this fact, which rely on the limit behavior of discounted games using advanced Puisieux series analysis, our proof is elementary and combinatorial. Second, we present a strategy-improvement (a.k.a. policy-iteration) algorithm for concurrent games with reachability objectives.}, author = {Krishnendu Chatterjee and de Alfaro, Luca and Thomas Henzinger}, pages = {291 -- 300}, publisher = {IEEE}, title = {{Strategy improvement for concurrent reachability games}}, doi = {10.1109/QEST.2006.48}, year = {2006}, } @inproceedings{4574, abstract = {Many software model checkers are based on predicate abstraction. If the verification goal depends on pointer structures, the approach does not work well, because it is difficult to find adequate predicate abstractions for the heap. In contrast, shape analysis, which uses graph-based heap abstractions, can provide a compact representation of recursive data structures. We integrate shape analysis into the software model checker Blast. Because shape analysis is expensive, we do not apply it globally. Instead, we ensure that, like predicates, shape graphs are computed and stored locally, only where necessary for proving the verification goal. To achieve this, we extend lazy abstraction refinement, which so far has been used only for predicate abstractions, to three-valued logical structures. This approach does not only increase the precision of model checking, but it also increases the efficiency of shape analysis. We implemented the technique by extending Blast with calls to Tvla.}, author = {Beyer, Dirk and Thomas Henzinger and Théoduloz, Grégory}, pages = {532 -- 546}, publisher = {Springer}, title = {{Lazy shape analysis}}, doi = {10.1007/11817963_48}, volume = {4144}, year = {2006}, } @unpublished{573, abstract = {Mitchison and Jozsa recently suggested that the "chained-Zeno" counterfactual computation protocol recently proposed by Hosten et al. is counterfactual for only one output of the computer. This claim was based on the existing abstract algebraic definition of counterfactual computation, and indeed according to this definition, their argument is correct. However, a more general definition (physically adequate) for counterfactual computation is implicitly assumed by Hosten et. al. Here we explain in detail why the protocol is counterfactual and how the "history tracking" method of the existing description inadequately represents the physics underlying the protocol. Consequently, we propose a modified definition of counterfactual computation. Finally, we comment on one of the most interesting aspects of the error-correcting protocol. }, author = {Hosten, Onur and Rakher, Matthew and Barreiro, Julio and Peters, Nicholas and Kwiat, Paul}, pages = {12}, publisher = {ArXiv}, title = {{Counterfactual computation revisited}}, year = {2006}, } @unpublished{574, abstract = {Vaidman, in a recent article adopts the method of 'quantum weak measurements in pre- and postselected ensembles' to ascertain whether or not the chained-Zeno counterfactual computation scheme proposed by Hosten et al. is counterfactual; which has been the topic of a debate on the definition of counterfactuality. We disagree with his conclusion, which brings up some interesting aspects of quantum weak measurements and some concerns about the way they are interpreted. }, author = {Hosten, Onur and Kwiat, Paul}, pages = {2}, publisher = {ArXiv}, title = {{Weak measurements and counterfactual computation}}, year = {2006}, } @inproceedings{578, abstract = {A source of single photons allows secure quantum key distribution, in addition, to being a critical resource for linear optics quantum computing. We describe our progress on deterministically creating single photons from spontaneous parametric downconversion, an extension of the Pittman, Jacobs and Franson scheme [Phys. Rev A, v66, 042303 (2002)]. Their idea was to conditionally prepare single photons by measuring one member of a spontaneously emitted photon pair and storing the remaining conditionally prepared photon until a predetermined time, when it would be "deterministically" released from storage. Our approach attempts to improve upon this by recycling the pump pulse in order to decrease the possibility of multiple-pair generation, while maintaining a high probability of producing a single pair. Many of the challenges we discuss are central to other quantum information technologies, including the need for low-loss optical storage, switching and detection, and fast feed-forward control.}, author = {Peters, Nicholas A and Arnold, Keith J and VanDevender, Aaron P and Jeffrey, Evan R and Rangarajan, Radhika and Onur Hosten and Barreiro, Julio T and Altepeter, Joseph B and Kwiat, Paul G}, publisher = {SPIE}, title = {{Towards a quasi-deterministic single-photon source}}, doi = {10.1117/12.684702}, volume = {6305}, year = {2006}, } @inproceedings{577, abstract = {Visible light photon counters (VLPCs) and solid-state photomultipliers (SSPMs) are high-efficiency single-photon detectors which have multi-photon counting capability. While both the VLPCs and the SSPMs have inferred internal quantum efficiencies above 93%, the actual measured values for both the detectors were in fact limited to less than 88%, attributed to in-coupling losses. We are currently improving this overall detection efficiency via a) custom anti-reflection coating the detectors and the in-coupling fibers, b) implementing a novel cryogenic design to reduce transmission losses and, c) using low-noise electronics to obtain a better signal-to-noise ratio.}, author = {Rangarajan, Radhika and Altepeter, Joseph B and Jeffrey, Evan R and Stoutimore, Micah J and Peters, Nicholas A and Onur Hosten and Kwiat, Paul G}, publisher = {SPIE}, title = {{High-efficiency single-photon detectors}}, doi = {10.1117/12.686117}, volume = {6372}, year = {2006}, } @article{579, abstract = {The logic underlying the coherent nature of quantum information processing often deviates from intuitive reasoning, leading to surprising effects. Counterfactual computation constitutes a striking example: the potential outcome of a quantum computation can be inferred, even if the computer is not run 1. Relying on similar arguments to interaction-free measurements 2 (or quantum interrogation3), counterfactual computation is accomplished by putting the computer in a superposition of 'running' and 'not running' states, and then interfering the two histories. Conditional on the as-yet-unknown outcome of the computation, it is sometimes possible to counterfactually infer information about the solution. Here we demonstrate counterfactual computation, implementing Grover's search algorithm with an all-optical approach4. It was believed that the overall probability of such counterfactual inference is intrinsically limited1,5, so that it could not perform better on average than random guesses. However, using a novel 'chained' version of the quantum Zeno effect6, we show how to boost the counterfactual inference probability to unity, thereby beating the random guessing limit. Our methods are general and apply to any physical system, as illustrated by a discussion of trapped-ion systems. Finally, we briefly show that, in certain circumstances, counterfactual computation can eliminate errors induced by decoherence. }, author = {Onur Hosten and Rakher, Matthew T and Barreiro, Julio T and Peters, Nicholas A and Kwiat, Paul G}, journal = {Nature}, number = {7079}, pages = {949 -- 952}, publisher = {Nature Publishing Group}, title = {{Counterfactual quantum computation through quantum interrogation}}, doi = {10.1038/nature04523}, volume = {439}, year = {2006}, } @inproceedings{583, abstract = {Visible light photon counters (VLPCs) and solid-state photomultipliers (SSPMs) facilitate efficient single-photon detection. We are attempting to improve their efficiency, previously limited to < 88% by coupling losses, via anti-reflection coatings, better electronics and cryogenics.}, author = {Rangarajan, Radhika and Peters, Nicholas A and Onur Hosten and Altepeter, Joseph B and Jeffrey, Evan R and Kwiat, Paul G}, publisher = {IEEE}, title = {{Improved single-photon detection}}, doi = {10.1109/CLEO.2006.4628641}, year = {2006}, }