@inproceedings{4003,
abstract = {The writhing number measures the global geometry of a closed space curve or knot. We show that this measure is related to the average winding number of its Gauss map. Using this relationship, we give an algorithm for computing the writhing number for a polygonal knot with n edges in time roughly proportional to n(1.6). We also implement a different, simple algorithm and provide experimental evidence for its practical efficiency.},
author = {Agarwal, Pankaj K and Herbert Edelsbrunner and Wang, Yusu},
pages = {791 -- 799},
publisher = {SIAM},
title = {{Computing the writhing number of a polygonal knot}},
year = {2002},
}
@article{4139,
author = {Jitka Polechova and Stopka,P.},
journal = {Canadian Journal of Zoology},
number = {8},
pages = {1383 -- 1388},
publisher = {NRC Research Press},
title = {{Geometry of social relationships in the Old World wood mouse, Apodemus sylvaticus}},
doi = {3820},
volume = {80},
year = {2002},
}
@article{4258,
abstract = {We studied the effect of multilocus balancing selection on neutral nucleotide variability at linked sites by simulating a model where diallelic polymorphisms are maintained at an arbitrary number of selected loci by means of symmetric overdominance. Different combinations of alleles define different genetic backgrounds that subdivide the population and strongly affect variability. Several multilocus fitness regimes with different degrees of epistasis and gametic disequilibrium are allowed. Analytical results based on a multilocus extension of the structured coalescent predict that the expected linked neutral diversity increases exponentially with the number of selected loci and can become extremely large. Our simulation results show that although variability increases with the number of genetic backgrounds that are maintained in the population, it is reduced by random fluctuations in the frequencies of those backgrounds and does not reach high levels even in very large populations. We also show that previous results on balancing selection in single-locus systems do not extend to the multilocus scenario in a straightforward way. Different patterns of linkage disequilibrium and of the frequency spectrum of neutral mutations are expected under different degrees of epistasis. Interestingly, the power to detect balancing selection using deviations from a neutral distribution of allele frequencies seems to be diminished under the fitness regime that leads to the largest increase of variability over the neutral case. This and other results are discussed in the light of data from the Mhc.},
author = {Navarro, Arcadio and Nicholas Barton},
journal = {Genetics},
number = {2},
pages = {849 -- 863},
publisher = {Genetics Society of America},
title = {{The effects of multilocus balancing selection on neutral variability}},
volume = {161},
year = {2002},
}
@article{4259,
abstract = {We extend current multilocus models to describe the effects of migration, recombination, selection, and nonrandom mating on sets of genes in diploids with varied modes of inheritance, allowing us to consider the patterns of nuclear and cytonuclear associations (disequilibria) under various models of migration. We show the relationship between the multilocus notation recently presented by Kirkpatrick, Johnson, and Barton (developed from previous work by Barton and Turelli) and the cytonuclear parameterization of Asmussen, Arnold, and Avise and extend this notation to describe associations between cytoplasmic elements and multiple nuclear genes. Under models with sexual symmetry, both nuclear-nuclear and cytonuclear disequilibria are equivalent. They differ, however, in cases involving some type of sexual asymmetry, which is then reflected in the asymmetric inheritance of cytoplasmic markers. An example given is the case of different migration rates in males and females; simulations using 2, 3, 4, or 5 unlinked autosomal markers with a maternally inherited cytoplasmic marker illustrate how nuclear-nuclear and cytonuclear associations can be used to separately estimate female and male migration rates. The general framework developed here allows us to investigate conditions where associations between loci with different modes of inheritance are not equivalent and to use this nonequivalence to test for deviations from simple models of admixture. },
author = {Orive, Maria E and Nicholas Barton},
journal = {Genetics},
number = {3},
pages = {1469 -- 1485},
publisher = {Genetics Society of America},
title = {{Associations between cytoplasmic and nuclear loci in hybridizing populations}},
volume = {162},
year = {2002},
}
@article{4260,
abstract = {We calculate the fixation probability of a beneficial allele that arises as the result of a unique mutation in an asexual population that is subject to recurrent deleterious mutation at rate U. Our analysis is an extension of previous works, which make a biologically restrictive assumption that selection against deleterious alleles is stronger than that on the beneficial allele of interest. We show that when selection against deleterious alleles is weak, beneficial alleles that confer a selective advantage that is small relative to U have greatly reduced probabilities of fixation. We discuss the consequences of this effect for the distribution of effects of alleles fixed during adaptation. We show that a selective sweep will increase the fixation probabilities of other beneficial mutations arising during some short interval afterward. We use the calculated fixation probabilities to estimate the expected rate of fitness improvement in an asexual population when beneficial alleles arise continually at some low rate proportional to U. We estimate the rate of mutation that is optimal in the sense that it maximizes this rate of fitness improvement. Again, this analysis relaxes the assumption made previously that selection against deleterious alleles is stronger than on beneficial alleles. },
author = {Johnson, Toby and Nicholas Barton},
journal = {Genetics},
number = {1},
pages = {395 -- 411},
publisher = {Genetics Society of America},
title = {{The effect of deleterious alleles on adaptation in asexual populations}},
volume = {162},
year = {2002},
}
@article{4261,
abstract = {Until recently, it was impracticable to identify the genes that are responsible for variation in continuous traits, or to directly observe the effects of their different alleles. Now, the abundance of genetic markers has made it possible to identify quantitative trait loci (QTL) — the regions of a chromosome or, ideally, individual sequence variants that are responsible for trait variation. What kind of QTL do we expect to find and what can our observations of QTL tell us about how organisms evolve? The key to understanding the evolutionary significance of QTL is to understand the nature of inherited variation, not in the immediate mechanistic sense of how genes influence phenotype, but, rather, to know what evolutionary forces maintain genetic variability.},
author = {Nicholas Barton and Keightley, Peter D},
journal = {Nature Reviews Genetics},
pages = {11 -- 21},
publisher = {Nature Publishing Group},
title = {{Understanding quantitative genetic variation}},
doi = {10.1038/nrg700},
volume = {3},
year = {2002},
}
@article{4262,
abstract = {Natural populations are structured spatially into local populations and genetically into diverse ‘genetic backgrounds’ defined by different combinations of selected alleles. If selection maintains genetic backgrounds at constant frequency then neutral diversity is enhanced. By contrast, if background frequencies fluctuate then diversity is reduced. Provided that the population size of each background is large enough, these effects can be described by the structured coalescent process. Almost all the extant results based on the coalescent deal with a single selected locus. Yet we know that very large numbers of genes are under selection and that any substantial effects are likely to be due to the cumulative effects of many loci. Here, we set up a general framework for the extension of the coalescent to multilocus scenarios and we use it to study the simplest model, where strong balancing selection acting on a set of n loci maintains 2n backgrounds at constant frequencies and at linkage equilibrium. Analytical results show that the expected linked neutral diversity increases exponentially with the number of selected loci and can become extremely large. However, simulation results reveal that the structured coalescent approach breaks down when the number of backgrounds approaches the population size, because of stochastic fluctuations in background frequencies. A new method is needed to extend the structured coalescent to cases with large numbers of backgrounds.},
author = {Nicholas Barton and Navarro, Arcadio},
journal = {Genetical Research},
number = {2},
pages = {129 -- 139},
publisher = {Cambridge University Press},
title = {{Extending the coalescent to multilocus systems: the case of balancing selection}},
doi = {10.1017/S0016672301005493},
volume = {79},
year = {2002},
}
@article{4263,
abstract = {We introduce a general recursion for the probability of identity in state of two individuals sampled from a population subject to mutation, migration, and random drift in a two-dimensional continuum. The recursion allows for the interactions induced by density-dependent regulation of the population, which are inevitable in a continuous population. We give explicit series expansions for large neighbourhood size and for low mutation rates respectively and investigate the accuracy of the classical Malécot formula for these general models. When neighbourhood size is small, this formula does not give the identity even over large scales. However, for large neighbourhood size, it is an accurate approximation which summarises the local population structure in terms of three quantities: the effective dispersal rate, σe; the effective population density, ρe; and a local scale, κ, at which local interactions become significant. The results are illustrated by simulations.},
author = {Nicholas Barton and Depaulis, Frantz and Etheridge, Alison M},
journal = {Theoretical Population Biology},
number = {1},
pages = {31 -- 48},
publisher = {Academic Press},
title = {{Neutral evolution in spatially continuous populations}},
doi = {10.1006/tpbi.2001.1557},
volume = {61},
year = {2002},
}
@article{4347,
abstract = {Phylogenetic trees can be rooted by a number of criteria. Here, we introduce a Bayesian method for inferring the root of a phylogenetic tree by using one of several criteria: the outgroup, molecular clock, and nonreversible model of DNA substitution. We perform simulation analyses to examine the relative ability of these three criteria to correctly identify the root of the tree. The outgroup and molecular clock criteria were best able to identify the root of the tree, whereas the nonreversible model was able to identify the root only when the substitution process was highly nonreversible. We also examined the performance of the criteria for a tree of four species for which the topology and root position are well supported. Results of the analyses of these data are consistent with the simulation results.},
author = {Huelsenbeck, John P and Jonathan Bollback and Levine, Amy M},
journal = {Systematic Biology},
number = {1},
pages = {32 -- 43},
publisher = {Oxford University Press},
title = {{Inferring the root of a phylogenetic tree}},
doi = {10.1080/106351502753475862},
volume = {51},
year = {2002},
}
@article{4349,
abstract = {Bayesian inference is becoming a common statistical approach to phylogenetic estimation because, among other reasons, it allows for rapid analysis of large data sets with complex evolutionary models. Conveniently, Bayesian phylogenetic methods use currently available stochastic models of sequence evolution. However, as with other model-based approaches, the results of Bayesian inference are conditional on the assumed model of evolution: inadequate models (models that poorly fit the data) may result in erroneous inferences. In this article, I present a Bayesian phylogenetic method that evaluates the adequacy of evolutionary models using posterior predictive distributions. By evaluating a model's posterior predictive performance, an adequate model can be selected for a Bayesian phylogenetic study. Although I present a single test statistic that assesses the overall (global) performance of a phylogenetic model, a variety of test statistics can be tailored to evaluate specific features (local performance) of evolutionary models to identify sources failure. The method presented here, unlike the likelihood-ratio test and parametric bootstrap, accounts for uncertainty in the phylogeny and model parameters.},
author = {Jonathan Bollback},
journal = {Molecular Biology and Evolution},
number = {7},
pages = {1171 -- 80},
publisher = {Oxford University Press},
title = {{Bayesian model adequacy and choice in phylogenetics}},
volume = {19},
year = {2002},
}
@article{4407,
abstract = {This paper presents a complete axiomatization of two decidable propositional real-time linear temporal logics: Event Clock Logic (EventClockTL) and Metric Interval Temporal Logic with past (MetricIntervalTL). The completeness proof consists of an effective proof building procedure for EventClockTL. From this result we obtain a complete axiomatization of MetricIntervalTL by providing axioms translating MetricIntervalTL formulae into EventClockTL formulae, the two logics being equally expressive. Our proof is structured to yield axiomatizations also for interesting fragments of these logics, such as the linear temporal logic of the real numbers (TLR).},
author = {Raskin, Jean-François and Schobbens, Pierre Y and Thomas Henzinger},
journal = {Theoretical Computer Science},
number = {1-2},
pages = {151 -- 182},
publisher = {Elsevier},
title = {{Axioms for real-time logics}},
doi = {10.1016/S0304-3975(00)00308-X},
volume = {274},
year = {2002},
}
@inproceedings{4413,
abstract = {An essential problem in component-based design is how to compose components designed in isolation. Several approaches have been proposed for specifying component interfaces that capture behavioral aspects such as interaction protocols, and for verifying interface compatibility. Likewise, several approaches have been developed for synthesizing converters between incompatible protocols. In this paper, we introduce the notion of adaptability as the property that two interfaces have when they can be made compatible by communicating through a converter that meets specified requirements. We show that verifying adaptability and synthesizing an appropriate converter are two faces of the same coin: adaptability can be formalized and solved using a game-theoretic framework, and then the converter can be synthesized as a strategy that always wins the game. Finally we show that this framework can be related to the rectification problem in trace theory.},
author = {Passerone, Roberto and de Alfaro, Luca and Thomas Henzinger and Sangiovanni-Vincentelli, Alberto},
pages = {132 -- 139},
publisher = {IEEE},
title = {{Convertibility verification and converter synthesis: Two faces of the same coin}},
doi = {10.1145/774572.774592},
year = {2002},
}
@inproceedings{4421,
abstract = {We demonstrate the feasibility and benefits of Giotto-based control software development by reimplementing the autopilot system of an autonomously flying model helicopter. Giotto offers a clean separation between the platform-independent concerns of software functionality and I/O timing, and the platform-dependent concerns of software scheduling and execution. Functionality code such as code computing control laws can be generated automatically from Simulink models or, as in the case of this project, inherited from a legacy system. I/O timing code is generated automatically from Giotto models that specify real-time requirements such as task frequencies and actuator update rates. We extend Simulink to support the design of Giotto models, and from these models, the automatic generation of Giotto code that supervises the interaction of the functionality code with the physical environment. The Giotto compiler performs a schedulability analysis on the Giotto code, and generates timing code for the helicopter platform. The Giotto methodology guarantees the stringent hard real-time requirements of the autopilot system, and at the same time supports the automation of the software development process in a way that produces a transparent software architecture with predictable behavior and reusable components.},
author = {Kirsch, Christoph M and Sanvido, Marco A and Thomas Henzinger and Pree, Wolfgang},
pages = {46 -- 60},
publisher = {ACM},
title = {{A Giotto-based helicopter control system}},
doi = {10.1007/3-540-45828-X_5},
volume = {2491},
year = {2002},
}
@inproceedings{4422,
abstract = {Behavioral properties of open systems can be formalized as objectives in two-player games. Turn-based games model asynchronous interaction between the players (the system and its environment) by interleaving their moves. Concurrent games model synchronous interaction: the players always move simultaneously. Infinitary winning criteria are considered: Büchi, co-Büchi, and more general parity conditions. A generalization of determinacy for parity games to concurrent parity games demands probabilistic (mixed) strategies: either player 1 has a mixed strategy to win with probability 1 (almost-sure winning), or player 2 has a mixed strategy to win with positive probability.
This work provides efficient reductions of concurrent probabilistic Büchi and co-Büchi games to turn-based games with Büchi condition and parity winning condition with three priorities, respectively. From a theoretical point of view, the latter reduction shows that one can trade the probabilistic nature of almost-sure winning for a more general parity (fairness) condition. The reductions improve understanding of concurrent games and provide an alternative simple proof of determinacy of concurrent Büchi and co-Büchi games. From a practical point of view, the reductions turn solvers of turn-based games into solvers of concurrent probabilistic games. Thus improvements in the well-studied algorithms for the former carry over immediately to the latter. In particular, a recent improvement in the complexity of solving turn-based parity games yields an improvement in time complexity of solving concurrent probabilistic co-Büchi games from cubic to quadratic.},
author = {Jurdziński, Marcin and Kupferman, Orna and Thomas Henzinger},
pages = {292 -- 305},
publisher = {Springer},
title = {{Trading probability for fairness}},
doi = {10.1007/3-540-45793-3_20},
volume = {2471},
year = {2002},
}
@inproceedings{4423,
abstract = {Automation control systems typically incorporate legacy code and components that were originally designed to operate independently. Furthermore, they operate under stringent safety and timing constraints. Current design strategies deal with these requirements and characteristics with ad hoc approaches. In particular, when designing control laws, implementation constraints are often ignored or cursorily estimated. Indeed, costly redesigns are needed after a prototype of the control system is built due to missed timing constraints and subtle transient errors. In this paper, we use the concepts of platform-based design, and the Giotto programming language, to develop a methodology for the design of automation control systems that builds in modularity and correct-by-construction procedures. We illustrate our strategy by describing the (successful) application of the methodology to the design of a time-based control system for a rotorcraft Uninhabited Aerial Vehicle (UAV).},
author = {Horowitz, Benjamin and Liebman, Judith and Ma, Cedric and Koo, T John and Thomas Henzinger and Sangiovanni-Vincentelli, Alberto and Sastry, Shankar},
number = {1},
publisher = {Elsevier},
title = {{Embedded software design and system integration for rotorcraft UAV using platforms}},
doi = {10.3182/20020721-6-ES-1901.01628},
volume = {15},
year = {2002},
}
@inproceedings{4444,
abstract = {The Embedded Machine is a virtual machine that mediates in real time the interaction between software processes and physical processes. It separates the compilation of embedded programs into two phases. The first, platform-independent compiler phase generates E code (code executed by the Embedded Machine), which supervises the timing ---not the scheduling--- of application tasks relative to external events, such as clock ticks and sensor interrupts. E~code is portable and exhibits, given an input behavior, predictable (i.e., deterministic) timing and output behavior. The second, platform-dependent compiler phase checks the time safety of the E code, that is, whether platform performance (determined by the hardware) and platform utilization (determined by the scheduler of the operating system) enable its timely execution. We have used the Embedded Machine to compile and execute high-performance control applications written in Giotto, such as the flight control system of an autonomous model helicopter.},
author = {Thomas Henzinger and Kirsch, Christoph M},
pages = {315 -- 326},
publisher = {ACM},
title = {{The embedded machine: predictable, portable real-time code}},
doi = {10.1145/512529.512567},
year = {2002},
}
@inproceedings{4470,
abstract = {Giotto is a platform-independent language for specifying software for high-performance control applications. In this paper we present a new approach to the compilation of Giotto. Following this approach, the Giotto compiler generates code for a virtual machine, called the E machine, which can be ported to different platforms. The Giotto compiler also checks if the generated E code is time safe for a given platform, that is, if the platform offers sufficient performance to ensure that the E code is executed in a timely fashion that conforms with the Giotto semantics. Time-safety checking requires a schedulability analysis. We show that while for arbitrary E code, the analysis is exponential, for E code generated from typical Giotto programs, the analysis is polynomial. This supports our claim that Giotto identifies a useful fragment of embedded programs.},
author = {Thomas Henzinger and Kirsch, Christoph M and Majumdar, Ritankar S and Matic, Slobodan},
pages = {76 -- 92},
publisher = {ACM},
title = {{Time-safety checking for embedded programs}},
doi = {10.1007/3-540-45828-X_7},
volume = {2491},
year = {2002},
}
@inproceedings{4471,
abstract = {The sequential synthesis problem, which is closely related to Church’s solvability problem, asks, given a specification in the form of a binary relation between input and output streams, for the construction of a finite-state stream transducer that converts inputs to appropriate outputs. For efficiency reasons, practical sequential hardware is often designed to operate without prior initialization. Such hardware designs can be modeled by uninitialized state machines, which are required to satisfy their specification if started from any state. In this paper we solve the sequential synthesis problem for uninitialized systems, that is, we construct uninitialized finite-state stream transducers. We consider specifications given by LTL formulas, deterministic, nondeterministic, universal, and alternating Büchi automata. We solve this uninitialized synthesis problem by reducing it to the well-understood initialized synthesis problem. While our solution is straightforward, it leads, for some specification formalisms, to upper bounds that are exponentially worse than the complexity of the corresponding initialized problems. However, we prove lower bounds to show that our simple solutions are optimal for all considered specification formalisms. We also study the problem of deciding whether a given specification is uninitialized, that is, if its uninitialized and initialized synthesis problems coincide. We show that this problem has, for each specification formalism, the same complexity as the equivalence problem.},
author = {Thomas Henzinger and Krishnan, Sriram C and Kupferman, Orna and Mang, Freddy Y},
pages = {644 -- 656},
publisher = {Springer},
title = {{Synthesis of uninitialized systems}},
doi = {10.1007/3-540-45465-9_55},
volume = {2380},
year = {2002},
}
@inproceedings{4472,
abstract = {We present a methodology and tool for verifying and certifying systems code. The verification is based on the lazy-abstraction paradigm for intertwining the following three logical steps: construct a predicate abstraction from the code, model check the abstraction, and automatically refine the abstraction based on counterexample analysis. The certification is based on the proof-carrying code paradigm. Lazy abstraction enables the automatic construction of small proof certificates. The methodology is implemented in Blast, the Berkeley Lazy Abstraction Software verification Tool. We describe our experience applying Blast to Linux and Windows device drivers. Given the C code for a driver and for a temporal-safety monitor, Blast automatically generates an easily checkable correctness certificate if the driver satisfies the specification, and an error trace otherwise.},
author = {Thomas Henzinger and Necula, George C and Jhala, Ranjit and Sutre, Grégoire and Majumdar, Ritankar S and Weimer, Westley},
pages = {526 -- 538},
publisher = {Springer},
title = {{Temporal safety proofs for systems code}},
doi = {10.1007/3-540-45657-0_45},
volume = {2404},
year = {2002},
}
@article{4473,
abstract = {The simulation preorder on state transition systems is widely accepted as a useful notion of refinement, both in its own right and as an efficiently checkable sufficient condition for trace containment. For composite systems, due to the exponential explosion of the state space, there is a need for decomposing a simulation check of the form P ≤s Q, denoting "P is simulated by Q," into simpler simulation checks on the components of P and Q. We present an assume-guarantee rule that enables such a decomposition. To the best of our knowledge, this is the first assume-guarantee rule that applies to a refinement relation different from trace containment. Our rule is circular, and its soundness proof requires induction on trace trees. The proof is constructive: given simulation relations that witness the simulation preorder between corresponding components of P and Q, we provide a procedure for constructing a witness relation for P ≤s Q. We also extend our assume-guarantee rule to account for fairness constraints on transition systems.},
author = {Thomas Henzinger and Qadeer,Shaz and Rajamani, Sriram K and Tasiran, Serdar},
journal = {ACM Transactions on Programming Languages and Systems (TOPLAS)},
number = {1},
pages = {51 -- 64},
publisher = {ACM},
title = {{An assume-guarantee rule for checking simulation}},
doi = {10.1145/509705.509707},
volume = {24},
year = {2002},
}