@inproceedings{193,
abstract = {We show attacks on five data-independent memory-hard functions (iMHF) that were submitted to the password hashing competition (PHC). Informally, an MHF is a function which cannot be evaluated on dedicated hardware, like ASICs, at significantly lower hardware and/or energy cost than evaluating a single instance on a standard single-core architecture. Data-independent means the memory access pattern of the function is independent of the input; this makes iMHFs harder to construct than data-dependent ones, but the latter can be attacked by various side-channel attacks. Following [Alwen-Blocki'16], we capture the evaluation of an iMHF as a directed acyclic graph (DAG). The cumulative parallel pebbling complexity of this DAG is a measure for the hardware cost of evaluating the iMHF on an ASIC. Ideally, one would like the complexity of a DAG underlying an iMHF to be as close to quadratic in the number of nodes of the graph as possible. Instead, we show that (the DAGs underlying) the following iMHFs are far from this bound: Rig.v2, TwoCats and Gambit each having an exponent no more than 1.75. Moreover, we show that the complexity of the iMHF modes of the PHC finalists Pomelo and Lyra2 have exponents at most 1.83 and 1.67 respectively. To show this we investigate a combinatorial property of each underlying DAG (called its depth-robustness. By establishing upper bounds on this property we are then able to apply the general technique of [Alwen-Block'16] for analyzing the hardware costs of an iMHF.},
author = {Alwen, Joel F and Gazi, Peter and Kamath Hosdurg, Chethan and Klein, Karen and Osang, Georg F and Pietrzak, Krzysztof Z and Reyzin, Lenoid and Rolinek, Michal and Rybar, Michal},
booktitle = {Proceedings of the 2018 on Asia Conference on Computer and Communication Security},
location = {Incheon, Republic of Korea},
pages = {51 -- 65},
publisher = {ACM},
title = {{On the memory hardness of data independent password hashing functions}},
doi = {10.1145/3196494.3196534},
year = {2018},
}
@article{87,
abstract = {Using the geodesic distance on the n-dimensional sphere, we study the expected radius function of the Delaunay mosaic of a random set of points. Specifically, we consider the partition of the mosaic into intervals of the radius function and determine the expected number of intervals whose radii are less than or equal to a given threshold. We find that the expectations are essentially the same as for the Poisson–Delaunay mosaic in n-dimensional Euclidean space. Assuming the points are not contained in a hemisphere, the Delaunay mosaic is isomorphic to the boundary complex of the convex hull in Rn+1, so we also get the expected number of faces of a random inscribed polytope. As proved in Antonelli et al. [Adv. in Appl. Probab. 9–12 (1977–1980)], an orthant section of the n-sphere is isometric to the standard n-simplex equipped with the Fisher information metric. It follows that the latter space has similar stochastic properties as the n-dimensional Euclidean space. Our results are therefore relevant in information geometry and in population genetics.},
author = {Edelsbrunner, Herbert and Nikitenko, Anton},
journal = {Annals of Applied Probability},
number = {5},
pages = {3215 -- 3238},
publisher = {Institute of Mathematical Statistics},
title = {{Random inscribed polytopes have similar radius functions as Poisson-Delaunay mosaics}},
doi = {10.1214/18-AAP1389},
volume = {28},
year = {2018},
}
@article{106,
abstract = {The goal of this article is to introduce the reader to the theory of intrinsic geometry of convex surfaces. We illustrate the power of the tools by proving a theorem on convex surfaces containing an arbitrarily long closed simple geodesic. Let us remind ourselves that a curve in a surface is called geodesic if every sufficiently short arc of the curve is length minimizing; if, in addition, it has no self-intersections, we call it simple geodesic. A tetrahedron with equal opposite edges is called isosceles. The axiomatic method of Alexandrov geometry allows us to work with the metrics of convex surfaces directly, without approximating it first by a smooth or polyhedral metric. Such approximations destroy the closed geodesics on the surface; therefore it is difficult (if at all possible) to apply approximations in the proof of our theorem. On the other hand, a proof in the smooth or polyhedral case usually admits a translation into Alexandrov’s language; such translation makes the result more general. In fact, our proof resembles a translation of the proof given by Protasov. Note that the main theorem implies in particular that a smooth convex surface does not have arbitrarily long simple closed geodesics. However we do not know a proof of this corollary that is essentially simpler than the one presented below.},
author = {Akopyan, Arseniy and Petrunin, Anton},
journal = {Mathematical Intelligencer},
number = {3},
pages = {26 -- 31},
publisher = {Springer},
title = {{Long geodesics on convex surfaces}},
doi = {10.1007/s00283-018-9795-5},
volume = {40},
year = {2018},
}
@article{1064,
abstract = {In 1945, A.W. Goodman and R.E. Goodman proved the following conjecture by P. Erdős: Given a family of (round) disks of radii r1, … , rn in the plane, it is always possible to cover them by a disk of radius R= ∑ ri, provided they cannot be separated into two subfamilies by a straight line disjoint from the disks. In this note we show that essentially the same idea may work for different analogues and generalizations of their result. In particular, we prove the following: Given a family of positive homothetic copies of a fixed convex body K⊂ Rd with homothety coefficients τ1, … , τn> 0 , it is always possible to cover them by a translate of d+12(∑τi)K, provided they cannot be separated into two subfamilies by a hyperplane disjoint from the homothets.},
author = {Akopyan, Arseniy and Balitskiy, Alexey and Grigorev, Mikhail},
issn = {14320444},
journal = {Discrete & Computational Geometry},
number = {4},
pages = {1001--1009},
publisher = {Springer},
title = {{On the circle covering theorem by A.W. Goodman and R.E. Goodman}},
doi = {10.1007/s00454-017-9883-x},
volume = {59},
year = {2018},
}
@article{312,
abstract = {Motivated by biological questions, we study configurations of equal spheres that neither pack nor cover. Placing their centers on a lattice, we define the soft density of the configuration by penalizing multiple overlaps. Considering the 1-parameter family of diagonally distorted 3-dimensional integer lattices, we show that the soft density is maximized at the FCC lattice.},
author = {Edelsbrunner, Herbert and Iglesias Ham, Mabel},
issn = {08954801},
journal = {SIAM J Discrete Math},
number = {1},
pages = {750 -- 782},
publisher = {Society for Industrial and Applied Mathematics },
title = {{On the optimality of the FCC lattice for soft sphere packing}},
doi = {10.1137/16M1097201},
volume = {32},
year = {2018},
}
@article{718,
abstract = {Mapping every simplex in the Delaunay mosaic of a discrete point set to the radius of the smallest empty circumsphere gives a generalized discrete Morse function. Choosing the points from a Poisson point process in ℝ n , we study the expected number of simplices in the Delaunay mosaic as well as the expected number of critical simplices and nonsingular intervals in the corresponding generalized discrete gradient. Observing connections with other probabilistic models, we obtain precise expressions for the expected numbers in low dimensions. In particular, we obtain the expected numbers of simplices in the Poisson–Delaunay mosaic in dimensions n ≤ 4.},
author = {Edelsbrunner, Herbert and Nikitenko, Anton and Reitzner, Matthias},
issn = {00018678},
journal = {Advances in Applied Probability},
number = {3},
pages = {745 -- 767},
publisher = {Cambridge University Press},
title = {{Expected sizes of poisson Delaunay mosaics and their discrete Morse functions}},
doi = {10.1017/apr.2017.20},
volume = {49},
year = {2017},
}
@article{737,
abstract = {We generalize Brazas’ topology on the fundamental group to the whole universal path space X˜ i.e., to the set of homotopy classes of all based paths. We develop basic properties of the new notion and provide a complete comparison of the obtained topology with the established topologies, in particular with the Lasso topology and the CO topology, i.e., the topology that is induced by the compact-open topology. It turns out that the new topology is the finest topology contained in the CO topology, for which the action of the fundamental group on the universal path space is a continuous group action.},
author = {Virk, Ziga and Zastrow, Andreas},
issn = {01668641},
journal = {Topology and its Applications},
pages = {186 -- 196},
publisher = {Elsevier},
title = {{A new topology on the universal path space}},
doi = {10.1016/j.topol.2017.09.015},
volume = {231},
year = {2017},
}
@article{481,
abstract = {We introduce planar matchings on directed pseudo-line arrangements, which yield a planar set of pseudo-line segments such that only matching-partners are adjacent. By translating the planar matching problem into a corresponding stable roommates problem we show that such matchings always exist. Using our new framework, we establish, for the first time, a complete, rigorous definition of weighted straight skeletons, which are based on a so-called wavefront propagation process. We present a generalized and unified approach to treat structural changes in the wavefront that focuses on the restoration of weak planarity by finding planar matchings.},
author = {Biedl, Therese and Huber, Stefan and Palfrader, Peter},
journal = {International Journal of Computational Geometry and Applications},
number = {3-4},
pages = {211 -- 229},
publisher = {World Scientific Publishing},
title = {{Planar matchings for weighted straight skeletons}},
doi = {10.1142/S0218195916600050},
volume = {26},
year = {2017},
}
@article{521,
abstract = {Let X and Y be proper metric spaces. We show that a coarsely n-to-1 map f:X→Y induces an n-to-1 map of Higson coronas. This viewpoint turns out to be successful in showing that the classical dimension raising theorems hold in large scale; that is, if f:X→Y is a coarsely n-to-1 map between proper metric spaces X and Y then asdim(Y)≤asdim(X)+n−1. Furthermore we introduce coarsely open coarsely n-to-1 maps, which include the natural quotient maps via a finite group action, and prove that they preserve the asymptotic dimension.},
author = {Austin, Kyle and Virk, Ziga},
issn = {01668641},
journal = {Topology and its Applications},
pages = {45 -- 57},
publisher = {Elsevier},
title = {{Higson compactification and dimension raising}},
doi = {10.1016/j.topol.2016.10.005},
volume = {215},
year = {2017},
}
@article{568,
abstract = {We study robust properties of zero sets of continuous maps f: X → ℝn. Formally, we analyze the family Z< r(f) := (g-1(0): ||g - f|| < r) of all zero sets of all continuous maps g closer to f than r in the max-norm. All of these sets are outside A := (x: |f(x)| ≥ r) and we claim that Z< r(f) is fully determined by A and an element of a certain cohomotopy group which (by a recent result) is computable whenever the dimension of X is at most 2n - 3. By considering all r > 0 simultaneously, the pointed cohomotopy groups form a persistence module-a structure leading to persistence diagrams as in the case of persistent homology or well groups. Eventually, we get a descriptor of persistent robust properties of zero sets that has better descriptive power (Theorem A) and better computability status (Theorem B) than the established well diagrams. Moreover, if we endow every point of each zero set with gradients of the perturbation, the robust description of the zero sets by elements of cohomotopy groups is in some sense the best possible (Theorem C).},
author = {Franek, Peter and Krcál, Marek},
issn = {15320073},
journal = {Homology, Homotopy and Applications},
number = {2},
pages = {313 -- 342},
publisher = {International Press},
title = {{Persistence of zero sets}},
doi = {10.4310/HHA.2017.v19.n2.a16},
volume = {19},
year = {2017},
}
@phdthesis{6287,
abstract = {The main objects considered in the present work are simplicial and CW-complexes with vertices forming a random point cloud. In particular, we consider a Poisson point process in R^n and study Delaunay and Voronoi complexes of the first and higher orders and weighted Delaunay complexes obtained as sections of Delaunay complexes, as well as the Čech complex. Further, we examine theDelaunay complex of a Poisson point process on the sphere S^n, as well as of a uniform point cloud, which is equivalent to the convex hull, providing a connection to the theory of random polytopes. Each of the complexes in question can be endowed with a radius function, which maps its cells to the radii of appropriately chosen circumspheres, called the radius of the cell. Applying and developing discrete Morse theory for these functions, joining it together with probabilistic and sometimes analytic machinery, and developing several integral geometric tools, we aim at getting the distributions of circumradii of typical cells. For all considered complexes, we are able to generalize and obtain up to constants the distribution of radii of typical intervals of all types. In low dimensions the constants can be computed explicitly, thus providing the explicit expressions for the expected numbers of cells. In particular, it allows to find the expected density of simplices of every dimension for a Poisson point process in R^4, whereas the result for R^3 was known already in 1970's.},
author = {Nikitenko, Anton},
pages = {86},
publisher = {IST Austria},
title = {{Discrete Morse theory for random complexes }},
doi = {10.15479/AT:ISTA:th_873},
year = {2017},
}
@inproceedings{688,
abstract = {We show that the framework of topological data analysis can be extended from metrics to general Bregman divergences, widening the scope of possible applications. Examples are the Kullback - Leibler divergence, which is commonly used for comparing text and images, and the Itakura - Saito divergence, popular for speech and sound. In particular, we prove that appropriately generalized čech and Delaunay (alpha) complexes capture the correct homotopy type, namely that of the corresponding union of Bregman balls. Consequently, their filtrations give the correct persistence diagram, namely the one generated by the uniformly growing Bregman balls. Moreover, we show that unlike the metric setting, the filtration of Vietoris-Rips complexes may fail to approximate the persistence diagram. We propose algorithms to compute the thus generalized čech, Vietoris-Rips and Delaunay complexes and experimentally test their efficiency. Lastly, we explain their surprisingly good performance by making a connection with discrete Morse theory. },
author = {Edelsbrunner, Herbert and Wagner, Hubert},
issn = {18688969},
location = {Brisbane, Australia},
pages = {391--3916},
publisher = {Schloss Dagstuhl - Leibniz-Zentrum für Informatik},
title = {{Topological data analysis with Bregman divergences}},
doi = {10.4230/LIPIcs.SoCG.2017.39},
volume = {77},
year = {2017},
}
@article{707,
abstract = {We answer a question of M. Gromov on the waist of the unit ball.},
author = {Akopyan, Arseniy and Karasev, Roman},
issn = {00246093},
journal = {Bulletin of the London Mathematical Society},
number = {4},
pages = {690 -- 693},
publisher = {Wiley-Blackwell},
title = {{A tight estimate for the waist of the ball }},
doi = {10.1112/blms.12062},
volume = {49},
year = {2017},
}
@inproceedings{833,
abstract = {We present an efficient algorithm to compute Euler characteristic curves of gray scale images of arbitrary dimension. In various applications the Euler characteristic curve is used as a descriptor of an image. Our algorithm is the first streaming algorithm for Euler characteristic curves. The usage of streaming removes the necessity to store the entire image in RAM. Experiments show that our implementation handles terabyte scale images on commodity hardware. Due to lock-free parallelism, it scales well with the number of processor cores. Additionally, we put the concept of the Euler characteristic curve in the wider context of computational topology. In particular, we explain the connection with persistence diagrams.},
author = {Heiss, Teresa and Wagner, Hubert},
editor = {Felsberg, Michael and Heyden, Anders and Krüger, Norbert},
issn = {03029743},
location = {Ystad, Sweden},
pages = {397 -- 409},
publisher = {Springer},
title = {{Streaming algorithm for Euler characteristic curves of multidimensional images}},
doi = {10.1007/978-3-319-64689-3_32},
volume = {10424},
year = {2017},
}
@inproceedings{836,
abstract = {Recent research has examined how to study the topological features of a continuous self-map by means of the persistence of the eigenspaces, for given eigenvalues, of the endomorphism induced in homology over a field. This raised the question of how to select dynamically significant eigenvalues. The present paper aims to answer this question, giving an algorithm that computes the persistence of eigenspaces for every eigenvalue simultaneously, also expressing said eigenspaces as direct sums of “finite” and “singular” subspaces.},
author = {Ethier, Marc and Jablonski, Grzegorz and Mrozek, Marian},
booktitle = {Special Sessions in Applications of Computer Algebra},
isbn = {978-331956930-7},
location = {Kalamata, Greece},
pages = {119 -- 136},
publisher = {Springer},
title = {{Finding eigenvalues of self-maps with the Kronecker canonical form}},
doi = {10.1007/978-3-319-56932-1_8},
volume = {198},
year = {2017},
}
@inbook{84,
abstract = {The advent of high-throughput technologies and the concurrent advances in information sciences have led to a data revolution in biology. This revolution is most significant in molecular biology, with an increase in the number and scale of the “omics” projects over the last decade. Genomics projects, for example, have produced impressive advances in our knowledge of the information concealed into genomes, from the many genes that encode for the proteins that are responsible for most if not all cellular functions, to the noncoding regions that are now known to provide regulatory functions. Proteomics initiatives help to decipher the role of post-translation modifications on the protein structures and provide maps of protein-protein interactions, while functional genomics is the field that attempts to make use of the data produced by these projects to understand protein functions. The biggest challenge today is to assimilate the wealth of information provided by these initiatives into a conceptual framework that will help us decipher life. For example, the current views of the relationship between protein structure and function remain fragmented. We know of their sequences, more and more about their structures, we have information on their biological activities, but we have difficulties connecting this dotted line into an informed whole. We lack the experimental and computational tools for directly studying protein structure, function, and dynamics at the molecular and supra-molecular levels. In this chapter, we review some of the current developments in building the computational tools that are needed, focusing on the role that geometry and topology play in these efforts. One of our goals is to raise the general awareness about the importance of geometric methods in elucidating the mysterious foundations of our very existence. Another goal is the broadening of what we consider a geometric algorithm. There is plenty of valuable no-man’s-land between combinatorial and numerical algorithms, and it seems opportune to explore this land with a computational-geometric frame of mind.},
author = {Edelsbrunner, Herbert and Koehl, Patrice},
booktitle = {Handbook of Discrete and Computational Geometry, Third Edition},
editor = {Toth, Csaba and O'Rourke, Joseph and Goodman, Jacob},
pages = {1709 -- 1735},
publisher = {CRC Press},
title = {{Computational topology for structural molecular biology}},
doi = {10.1201/9781315119601},
year = {2017},
}
@article{909,
abstract = {We study the lengths of curves passing through a fixed number of points on the boundary of a convex shape in the plane. We show that, for any convex shape K, there exist four points on the boundary of K such that the length of any curve passing through these points is at least half of the perimeter of K. It is also shown that the same statement does not remain valid with the additional constraint that the points are extreme points of K. Moreover, the factor ½ cannot be achieved with any fixed number of extreme points. We conclude the paper with a few other inequalities related to the perimeter of a convex shape.},
author = {Akopyan, Arseniy and Vysotsky, Vladislav},
issn = {00029890},
journal = {The American Mathematical Monthly},
number = {7},
pages = {588 -- 596},
publisher = {Mathematical Association of America},
title = {{On the lengths of curves passing through boundary points of a planar convex shape}},
doi = {10.4169/amer.math.monthly.124.7.588},
volume = {124},
year = {2017},
}
@article{1180,
abstract = {In this article we define an algebraic vertex of a generalized polyhedron and show that the set of algebraic vertices is the smallest set of points needed to define the polyhedron. We prove that the indicator function of a generalized polytope P is a linear combination of indicator functions of simplices whose vertices are algebraic vertices of P. We also show that the indicator function of any generalized polyhedron is a linear combination, with integer coefficients, of indicator functions of cones with apices at algebraic vertices and line-cones. The concept of an algebraic vertex is closely related to the Fourier–Laplace transform. We show that a point v is an algebraic vertex of a generalized polyhedron P if and only if the tangent cone of P, at v, has non-zero Fourier–Laplace transform.},
author = {Akopyan, Arseniy and Bárány, Imre and Robins, Sinai},
issn = {00018708},
journal = {Advances in Mathematics},
pages = {627 -- 644},
publisher = {Academic Press},
title = {{Algebraic vertices of non-convex polyhedra}},
doi = {10.1016/j.aim.2016.12.026},
volume = {308},
year = {2017},
}
@article{1433,
abstract = {Phat is an open-source C. ++ library for the computation of persistent homology by matrix reduction, targeted towards developers of software for topological data analysis. We aim for a simple generic design that decouples algorithms from data structures without sacrificing efficiency or user-friendliness. We provide numerous different reduction strategies as well as data types to store and manipulate the boundary matrix. We compare the different combinations through extensive experimental evaluation and identify optimization techniques that work well in practical situations. We also compare our software with various other publicly available libraries for persistent homology.},
author = {Bauer, Ulrich and Kerber, Michael and Reininghaus, Jan and Wagner, Hubert},
issn = { 07477171},
journal = {Journal of Symbolic Computation},
pages = {76 -- 90},
publisher = {Academic Press},
title = {{Phat - Persistent homology algorithms toolbox}},
doi = {10.1016/j.jsc.2016.03.008},
volume = {78},
year = {2017},
}
@article{1022,
abstract = {We introduce a multiscale topological description of the Megaparsec web-like cosmic matter distribution. Betti numbers and topological persistence offer a powerful means of describing the rich connectivity structure of the cosmic web and of its multiscale arrangement of matter and galaxies. Emanating from algebraic topology and Morse theory, Betti numbers and persistence diagrams represent an extension and deepening of the cosmologically familiar topological genus measure and the related geometric Minkowski functionals. In addition to a description of the mathematical background, this study presents the computational procedure for computing Betti numbers and persistence diagrams for density field filtrations. The field may be computed starting from a discrete spatial distribution of galaxies or simulation particles. The main emphasis of this study concerns an extensive and systematic exploration of the imprint of different web-like morphologies and different levels of multiscale clustering in the corresponding computed Betti numbers and persistence diagrams. To this end, we use Voronoi clustering models as templates for a rich variety of web-like configurations and the fractal-like Soneira-Peebles models exemplify a range of multiscale configurations. We have identified the clear imprint of cluster nodes, filaments, walls, and voids in persistence diagrams, along with that of the nested hierarchy of structures in multiscale point distributions. We conclude by outlining the potential of persistent topology for understanding the connectivity structure of the cosmic web, in large simulations of cosmic structure formation and in the challenging context of the observed galaxy distribution in large galaxy surveys.},
author = {Pranav, Pratyush and Edelsbrunner, Herbert and Van De Weygaert, Rien and Vegter, Gert and Kerber, Michael and Jones, Bernard and Wintraecken, Mathijs},
issn = {00358711},
journal = {Monthly Notices of the Royal Astronomical Society},
number = {4},
pages = {4281 -- 4310},
publisher = {Oxford University Press},
title = {{The topology of the cosmic web in terms of persistent Betti numbers}},
doi = {10.1093/mnras/stw2862},
volume = {465},
year = {2017},
}