@inproceedings{5959, abstract = {Formalizing properties of systems with continuous dynamics is a challenging task. In this paper, we propose a formal framework for specifying and monitoring rich temporal properties of real-valued signals. We introduce signal first-order logic (SFO) as a specification language that combines first-order logic with linear-real arithmetic and unary function symbols interpreted as piecewise-linear signals. We first show that while the satisfiability problem for SFO is undecidable, its membership and monitoring problems are decidable. We develop an offline monitoring procedure for SFO that has polynomial complexity in the size of the input trace and the specification, for a fixed number of quantifiers and function symbols. We show that the algorithm has computation time linear in the size of the input trace for the important fragment of bounded-response specifications interpreted over input traces with finite variability. We can use our results to extend signal temporal logic with first-order quantifiers over time and value parameters, while preserving its efficient monitoring. We finally demonstrate the practical appeal of our logic through a case study in the micro-electronics domain.}, author = {Bakhirkin, Alexey and Ferrere, Thomas and Henzinger, Thomas A and Nickovicl, Deian}, booktitle = {2018 International Conference on Embedded Software}, isbn = {9781538655603}, location = {Turin, Italy}, pages = {1--10}, publisher = {IEEE}, title = {{Keynote: The first-order logic of signals}}, doi = {10.1109/emsoft.2018.8537203}, year = {2018}, } @inproceedings{5962, abstract = {Stochastic Gradient Descent (SGD) is a fundamental algorithm in machine learning, representing the optimization backbone for training several classic models, from regression to neural networks. Given the recent practical focus on distributed machine learning, significant work has been dedicated to the convergence properties of this algorithm under the inconsistent and noisy updates arising from execution in a distributed environment. However, surprisingly, the convergence properties of this classic algorithm in the standard shared-memory model are still not well-understood. In this work, we address this gap, and provide new convergence bounds for lock-free concurrent stochastic gradient descent, executing in the classic asynchronous shared memory model, against a strong adaptive adversary. Our results give improved upper and lower bounds on the "price of asynchrony'' when executing the fundamental SGD algorithm in a concurrent setting. They show that this classic optimization tool can converge faster and with a wider range of parameters than previously known under asynchronous iterations. At the same time, we exhibit a fundamental trade-off between the maximum delay in the system and the rate at which SGD can converge, which governs the set of parameters under which this algorithm can still work efficiently.}, author = {Alistarh, Dan-Adrian and De Sa, Christopher and Konstantinov, Nikola H}, booktitle = {Proceedings of the 2018 ACM Symposium on Principles of Distributed Computing - PODC '18}, isbn = {9781450357951}, location = {Egham, United Kingdom}, pages = {169--178}, publisher = {ACM Press}, title = {{The convergence of stochastic gradient descent in asynchronous shared memory}}, doi = {10.1145/3212734.3212763}, year = {2018}, } @article{5860, abstract = {A major problem for evolutionary theory is understanding the so-called open-ended nature of evolutionary change, from its definition to its origins. Open-ended evolution (OEE) refers to the unbounded increase in complexity that seems to characterize evolution on multiple scales. This property seems to be a characteristic feature of biological and technological evolution and is strongly tied to the generative potential associated with combinatorics, which allows the system to grow and expand their available state spaces. Interestingly, many complex systems presumably displaying OEE, from language to proteins, share a common statistical property: the presence of Zipf's Law. Given an inventory of basic items (such as words or protein domains) required to build more complex structures (sentences or proteins) Zipf's Law tells us that most of these elements are rare whereas a few of them are extremely common. Using algorithmic information theory, in this paper we provide a fundamental definition for open-endedness, which can be understood as postulates. Its statistical counterpart, based on standard Shannon information theory, has the structure of a variational problem which is shown to lead to Zipf's Law as the expected consequence of an evolutionary process displaying OEE. We further explore the problem of information conservation through an OEE process and we conclude that statistical information (standard Shannon information) is not conserved, resulting in the paradoxical situation in which the increase of information content has the effect of erasing itself. We prove that this paradox is solved if we consider non-statistical forms of information. This last result implies that standard information theory may not be a suitable theoretical framework to explore the persistence and increase of the information content in OEE systems.}, author = {Corominas-Murtra, Bernat and Seoane, Luís F. and Solé, Ricard}, issn = {17425689}, journal = {Journal of the Royal Society Interface}, number = {149}, publisher = {Royal Society Publishing}, title = {{Zipf's Law, unbounded complexity and open-ended evolution}}, doi = {10.1098/rsif.2018.0395}, volume = {15}, year = {2018}, } @inproceedings{5961, abstract = {The area of machine learning has made considerable progress over the past decade, enabled by the widespread availability of large datasets, as well as by improved algorithms and models. Given the large computational demands of machine learning workloads, parallelism, implemented either through single-node concurrency or through multi-node distribution, has been a third key ingredient to advances in machine learning. The goal of this tutorial is to provide the audience with an overview of standard distribution techniques in machine learning, with an eye towards the intriguing trade-offs between synchronization and communication costs of distributed machine learning algorithms, on the one hand, and their convergence, on the other.The tutorial will focus on parallelization strategies for the fundamental stochastic gradient descent (SGD) algorithm, which is a key tool when training machine learning models, from classical instances such as linear regression, to state-of-the-art neural network architectures. The tutorial will describe the guarantees provided by this algorithm in the sequential case, and then move on to cover both shared-memory and message-passing parallelization strategies, together with the guarantees they provide, and corresponding trade-offs. The presentation will conclude with a broad overview of ongoing research in distributed and concurrent machine learning. The tutorial will assume no prior knowledge beyond familiarity with basic concepts in algebra and analysis. }, author = {Alistarh, Dan-Adrian}, booktitle = {Proceedings of the 2018 ACM Symposium on Principles of Distributed Computing - PODC '18}, isbn = {9781450357951}, location = {Egham, United Kingdom}, pages = {487--488}, publisher = {ACM Press}, title = {{A brief tutorial on distributed and concurrent machine learning}}, doi = {10.1145/3212734.3212798}, year = {2018}, } @article{5960, abstract = {In this paper we present a reliable method to verify the existence of loops along the uncertain trajectory of a robot, based on proprioceptive measurements only, within a bounded-error context. The loop closure detection is one of the key points in simultaneous localization and mapping (SLAM) methods, especially in homogeneous environments with difficult scenes recognitions. The proposed approach is generic and could be coupled with conventional SLAM algorithms to reliably reduce their computing burden, thus improving the localization and mapping processes in the most challenging environments such as unexplored underwater extents. To prove that a robot performed a loop whatever the uncertainties in its evolution, we employ the notion of topological degree that originates in the field of differential topology. We show that a verification tool based on the topological degree is an optimal method for proving robot loops. This is demonstrated both on datasets from real missions involving autonomous underwater vehicles and by a mathematical discussion.}, author = {Rohou, Simon and Franek, Peter and Aubry, Clément and Jaulin, Luc}, issn = {1741-3176}, journal = {The International Journal of Robotics Research}, number = {12}, pages = {1500--1516}, publisher = {SAGE Publications}, title = {{Proving the existence of loops in robot trajectories}}, doi = {10.1177/0278364918808367}, volume = {37}, year = {2018}, }