@article{4531,
abstract = {Caenorhabditis elegans vulval development provides an important paradigm for studying the process of cell fate determination and pattern formation during animal development. Although many genes controlling vulval cell fate specification have been identified, how they orchestrate themselves to generate a robust and invariant pattern of cell fates is not yet completely understood. Here, we have developed a dynamic computational model incorporating the current mechanistic understanding of gene interactions during this patterning process. A key feature of our model is the inclusion of multiple modes of crosstalk between the epidermal growth factor receptor (EGFR) and LIN-12/Notch signaling pathways, which together determine the fates of the six vulval precursor cells (VPCs). Computational analysis, using the model-checking technique, provides new biological insights into the regulatory network governing VPC fate specification and predicts novel negative feedback loops. In addition, our analysis shows that most mutations affecting vulval development lead to stable fate patterns in spite of variations in synchronicity between VPCs. Computational searches for the basis of this robustness show that a sequential activation of the EGFR-mediated inductive signaling and LIN-12 / Notch-mediated lateral signaling pathways is key to achieve a stable cell fate pattern. We demonstrate experimentally a time-delay between the activation of the inductive and lateral signaling pathways in wild-type animals and the loss of sequential signaling in mutants showing unstable fate patterns; thus, validating two key predictions provided by our modeling work. The insights gained by our modeling study further substantiate the usefulness of executing and analyzing mechanistic models to investigate complex biological behaviors.},
author = {Fisher, Jasmin and Piterman, Nir and Hajnal, Alex and Thomas Henzinger},
journal = {PLoS Computational Biology},
publisher = {Public Library of Science},
title = {{Predictive modeling of signaling crosstalk during C. elegans vulval development}},
doi = {10.1371/journal.pcbi.0030092},
volume = {3(5):e92},
year = {2007},
}
@inproceedings{4537,
abstract = {The classical synthesis problem for reactive systems asks, given a proponent process A and an opponent process B, to refine A so that the closed-loop system A parallel to B satisfies a given specification Phi. The solution of this problem requires the computation of a winning strategy for proponent A in a game against opponent B. We define and study the co-synthesis problem, where the proponent A consists itself of two independent processes, A = A(1)parallel to A(2), with specifications Phi(1) and Phi(2), and the goal is to refine both A(1) and A(2) so that A(1)parallel to A(2)parallel to B satisfies Phi(1) boolean AND Phi(2). For example, if the opponent B is a fair scheduler for the two processes A(1) and A(2), and Phi(i) specifies the requirements of mutual exclusion for A(i) (e.g., starvation freedom), then the co-synthesis problem asks for the automatic synthesis of a mutual-exclusion protocol. We show that co-synthesis defined classically, with the processes A(1) and A(2) either collaborating or competing, does not capture desirable solutions. Instead, the proper formulation of co-synthesis is the one where process A, competes with A(2) but not at the price of violating Phi(1), and vice versa. We call this assume-guarantee synthesis and show that it can be solved by computing secure-equilibrium strategies. In particular, from mutual-exclusion requirements the assume-guarantee synthesis algorithm automatically computes Peterson's protocol.},
author = {Krishnendu Chatterjee and Thomas Henzinger},
pages = {261 -- 275},
publisher = {Springer},
title = {{Assume-guarantee synthesis}},
doi = {10.1007/978-3-540-71209-1_21},
volume = {4424},
year = {2007},
}
@article{4547,
abstract = {We study observation-based strategies for two-player turn-based games on graphs with omega-regular objectives. An observation-based strategy relies on imperfect information about the history of a play, namely, on the past sequence of observations. Such games occur in the synthesis of a controller that does not see the private state of the plant. Our main results are twofold. First, we give a fixed-point algorithm for computing the set of states from which a player can win with a deterministic observation-based strategy for any omega-regular objective. The fixed point is computed in the lattice of antichains of state sets. This algorithm has the advantages of being directed by the objective and of avoiding an explicit subset construction on the game graph. Second, we give an algorithm for computing the set of states from which a player can win with probability 1 with a randomized observation-based strategy for a Buechi objective. This set is of interest because in the absence of perfect information, randomized strategies are more powerful than deterministic ones. We show that our algorithms are optimal by proving matching lower bounds.},
author = {Krishnendu Chatterjee and Doyen, Laurent and Thomas Henzinger and Raskin, Jean-François},
journal = {Logical Methods in Computer Science},
number = {184},
pages = {1 -- 23},
publisher = {International Federation of Computational Logic},
title = {{Algorithms for omega-regular games with imperfect information}},
doi = {10.2168/LMCS-3(3:4)2007},
volume = {3},
year = {2007},
}
@phdthesis{4559,
abstract = {We study games played on graphs with omega-regular conditions specified as parity, Rabin, Streett or Muller conditions. These games have applications in the verification, synthesis, modeling, testing, and compatibility checking of reactive systems. Important distinctions between graph games are as follows: (a) turn-based vs. concurrent games, depending on whether at a state of the game only a single player makes a move, or players make moves simultaneously; (b) deterministic vs. stochastic, depending on whether the transition function is a deterministic or a probabilistic function over successor states; and (c) zero-sum vs. non-zero-sum, depending on whether the objectives of the players are strictly conflicting or not.
We establish that the decision problem for turn-based stochastic zero-sum games with Rabin, Streett, and Muller objectives are NP-complete, coNP-complete, and PSPACE-complete, respectively, substantially improving the previously known 3EXPTIME bound. We also present strategy improvement style algorithms for turn-based stochastic Rabin and Streett games. In the case of concurrent stochastic zero-sum games with parity objectives we obtain a PSPACE bound, again improving the previously known 3EXPTIME bound. As a consequence, concurrent stochastic zero-sum games with Rabin, Streett, and Muller objectives can be solved in EXPSPACE, improving the previously known 4EXPTIME bound. We also present an elementary and combinatorial proof of the existence of memoryless \epsilon-optimal strategies in concurrent stochastic games with reachability objectives, for all real \epsilon>0, where an \epsilon-optimal strategy achieves the value of the game with in \epsilon against all strategies of the opponent. We also use the proof techniques to present a strategy improvement style algorithm for concurrent stochastic reachability games.
We then go beyond \omega-regular objectives and study the complexity of an important class of quantitative objectives, namely, limit-average objectives. In the case of limit-average games, the states of the graph is labeled with rewards and the goal is to maximize the long-run average of the rewards. We show that concurrent stochastic zero-sum games with limit-average objectives can be solved in EXPTIME.
Finally, we introduce a new notion of equilibrium, called secure equilibrium, in non-zero-sum games which captures the notion of conditional competitiveness. We prove the existence of unique maximal secure equilibrium payoff profiles in turn-based deterministic games, and present algorithms to compute such payoff profiles. We also show how the notion of secure equilibrium extends the assume-guarantee style of reasoning in the game theoretic framework.},
author = {Krishnendu Chatterjee},
pages = {1 -- 247},
publisher = {University of California, Berkeley},
title = {{Stochastic ω-Regular Games}},
year = {2007},
}
@phdthesis{4566,
abstract = {Complex system design today calls for compositional design and implementation. However each component is designed with certain assumptions about the environment it is meant to operate in, and delivering certain guarantees if those assumptions are satisfied; numerous inter-component interaction errors are introduced in the manual and error-prone integration process as there is little support in design environments for machine-readably representing these assumptions and guarantees and automatically checking consistency during integration.
Based on Interface Automata we propose a framework for compositional design and analysis of systems: a set of domain-specific automata-theoretic type systems for compositional system specification and analysis by behavioral specification of open systems. We focus on three different domains: component-based hardware systems communicating on bidirectional wires. concurrent distributed recursive message-passing software systems, and embedded software system components operating in resource-constrained environments. For these domains we present approaches to formally represent the assumptions and conditional guarantees between interacting open system components. Composition of such components produces new components with the appropriate assumptions and guarantees. We check satisfaction of temporal logic specifications by such components, and the substitutability of one component with another in an arbitrary context. Using this framework one can analyze large systems incrementally without needing extensive summary information to close the system at each stage. Furthermore, we focus only on the inter-component interaction behavior without dealing with the full implementation details of each component. Many of the merits of automata-theoretic model-checking are combined with the compositionality afforded by type-system based techniques. We also present an integer-based extension of the conventional boolean verification framework motivated by our interface formalism for embedded software components.
Our algorithms for checking the behavioral compatibility of component interfaces are available in our tool Chic, which can be used as a plug-in for the Java IDE JBuilder and the heterogenous modeling and design environment Ptolemy II.
Finally, we address the complementary problem of partitioning a large system into meaningful coherent components by analyzing the interaction patterns between its basic elements. We demonstrate the usefulness of our partitioning approach by evaluating its efficacy in improving unit-test branch coverage for a large software system implemented in C.},
author = {Chakrabarti, Arindam},
pages = {1 -- 244},
publisher = {University of California, Berkeley},
title = {{A framework for compositional design and analysis of systems}},
year = {2007},
}
@article{4567,
abstract = {BLAST is an automatic verification tool for checking temporal safety properties of C programs. Given a C program and a temporal safety property, BLAST either statically proves that the program satisfies the safety property, or provides an execution path that exhibits a violation of the property (or, since the problem is undecidable, does not terminate). BLAST constructs, explores, and refines abstractions of the program state space based on lazy predicate abstraction and interpolation-based predicate discovery. This paper gives an introduction to BLAST and demonstrates, through two case studies, how it can be applied to program verification and test-case generation. In the first case study, we use BLAST to statically prove memory safety for C programs. We use CCured, a type-based memory-safety analyzer, to annotate a program with run-time assertions that check for safe memory operations. Then, we use BLAST to remove as many of the run-time checks as possible (by proving that these checks never fail), and to generate execution scenarios that violate the assertions for the remaining run-time checks. In our second case study, we use BLAST to automatically generate test suites that guarantee full coverage with respect to a given predicate. Given a C program and a target predicate p, BLAST determines the program locations q for which there exists a program execution that reaches q with p true, and automatically generates a set of test vectors that cause such executions. Our experiments show that BLAST can provide automated, precise, and scalable analysis for C programs.},
author = {Beyer, Dirk and Thomas Henzinger and Jhala, Ranjit and Majumdar, Ritankar S},
journal = {International Journal on Software Tools for Technology Transfer},
number = {5},
pages = {505 -- 525},
publisher = {Springer},
title = {{The software model checker BLAST: Applications to software engineering}},
doi = {10.1007/s10009-007-0044-z},
volume = {9},
year = {2007},
}
@inproceedings{4570,
abstract = {We consider the minimum-time reachability problem in concurrent two-player timed automaton game structures. We show how to compute the minimum time needed by a player to reach a target location against all possible choices of the opponent. We do not put any syntactic restriction on the game structure, nor do we require any player to guarantee time divergence. We only require players to use receptive strategies which do not block time. The minimal time is computed in part using a fixpoint expression, which we show can be evaluated on equivalence classes of a non-trivial extension of the clock-region equivalence relation for timed automata.},
author = {Brihaye, Thomas and Thomas Henzinger and Prabhu, Vinayak S and Raskin, Jean-François},
pages = {825 -- 837},
publisher = {Springer},
title = {{Minimum-time reachability in timed games}},
doi = {10.1007/978-3-540-73420-8_71},
volume = {4596},
year = {2007},
}
@inproceedings{4571,
abstract = {The success of software verification depends on the ability to find a suitable abstraction of a program automatically. We propose a method for automated abstraction refinement which overcomes some limitations of current predicate discovery schemes. In current schemes, the cause of a false alarm is identified as an infeasible error path, and the abstraction is refined in order to remove that path. By contrast, we view the cause of a false alarm -the spurious counterexample- as a full-fledged program, namely, a fragment of the original program whose control-flow graph may contain loops and represent unbounded computations. There are two advantages to using such path programs as counterexamples for abstraction refinement. First, we can bring the whole machinery of program analysis to bear on path programs, which are typically small compared to the original program. Specifically, we use constraint-based invariant generation to automatically infer invariants of path programs-so-called path invariants. Second, we use path invariants for abstraction refinement in order to remove not one infeasibility at a time, but at once all (possibly infinitely many) infeasible error computations that are represented by a path program. Unlike previous predicate discovery schemes, our method handles loops without unrolling them; it infers abstractions that involve universal quantification and naturally incorporates disjunctive reasoning.},
author = {Beyer, Dirk and Thomas Henzinger and Majumdar, Ritankar S and Rybalchenko, Andrey},
pages = {300 -- 309},
publisher = {ACM},
title = {{Path invariants}},
doi = {10.1145/1250734.1250769},
year = {2007},
}
@inproceedings{4572,
abstract = {We present a constraint-based algorithm for the synthesis of invariants expressed in the combined theory of linear arithmetic and uninterpreted function symbols. Given a set of programmer-specified invariant templates, our algorithm reduces the invariant synthesis problem to a sequence of arithmetic constraint satisfaction queries. Since the combination of linear arithmetic and uninterpreted functions is a widely applied predicate domain for program verification, our algorithm provides a powerful tool to statically and automatically reason about program correctness. The algorithm can also be used for the synthesis of invariants over arrays and set data structures, because satisfiability questions for the theories of sets and arrays can be reduced to the theory of linear arithmetic with uninterpreted functions. We have implemented our algorithm and used it to find invariants for a low-level memory allocator written in C.},
author = {Beyer, Dirk and Thomas Henzinger and Majumdar, Ritankar S and Rybalchenko, Andrey},
pages = {378 -- 394},
publisher = {Springer},
title = {{Invariant synthesis for combined theories}},
doi = {10.1007/978-3-540-69738-1_27},
volume = {4349},
year = {2007},
}
@inproceedings{4573,
abstract = {In automatic software verification, we have observed a theoretical convergence of model checking and program analysis. In practice, however, model checkers are still mostly concerned with precision, e.g., the removal of spurious counterexamples; for this purpose they build and refine reachability trees. Lattice-based program analyzers, on the other hand, are primarily concerned with efficiency. We designed an algorithm and built a tool that can be configured to perform not only a purely tree-based or a purely lattice-based analysis, but offers many intermediate settings that have not been evaluated before. The algorithm and tool take one or more abstract interpreters, such as a predicate abstraction and a shape analysis, and configure their execution and interaction using several parameters. Our experiments show that such customization may lead to dramatic improvements in the precision-efficiency spectrum.},
author = {Beyer, Dirk and Thomas Henzinger and Théoduloz, Grégory},
pages = {504 -- 518},
publisher = {Springer},
title = {{Configurable software verification: Concretizing the convergence of model checking and program analysis}},
doi = {10.1007/978-3-540-73368-3_51},
volume = {4590},
year = {2007},
}