@article{1930, abstract = {(Figure Presented) Data acquisition, numerical inaccuracies, and sampling often introduce noise in measurements and simulations. Removing this noise is often necessary for efficient analysis and visualization of this data, yet many denoising techniques change the minima and maxima of a scalar field. For example, the extrema can appear or disappear, spatially move, and change their value. This can lead to wrong interpretations of the data, e.g., when the maximum temperature over an area is falsely reported being a few degrees cooler because the denoising method is unaware of these features. Recently, a topological denoising technique based on a global energy optimization was proposed, which allows the topology-controlled denoising of 2D scalar fields. While this method preserves the minima and maxima, it is constrained by the size of the data. We extend this work to large 2D data and medium-sized 3D data by introducing a novel domain decomposition approach. It allows processing small patches of the domain independently while still avoiding the introduction of new critical points. Furthermore, we propose an iterative refinement of the solution, which decreases the optimization energy compared to the previous approach and therefore gives smoother results that are closer to the input. We illustrate our technique on synthetic and real-world 2D and 3D data sets that highlight potential applications.}, author = {Günther, David and Jacobson, Alec and Reininghaus, Jan and Seidel, Hans and Sorkine Hornung, Olga and Weinkauf, Tino}, journal = {IEEE Transactions on Visualization and Computer Graphics}, number = {12}, pages = {2585 -- 2594}, publisher = {IEEE}, title = {{Fast and memory-efficient topological denoising of 2D and 3D scalar fields}}, doi = {10.1109/TVCG.2014.2346432}, volume = {20}, year = {2014}, } @inproceedings{2043, abstract = {Persistent homology is a popular and powerful tool for capturing topological features of data. Advances in algorithms for computing persistent homology have reduced the computation time drastically – as long as the algorithm does not exhaust the available memory. Following up on a recently presented parallel method for persistence computation on shared memory systems [1], we demonstrate that a simple adaption of the standard reduction algorithm leads to a variant for distributed systems. Our algorithmic design ensures that the data is distributed over the nodes without redundancy; this permits the computation of much larger instances than on a single machine. Moreover, we observe that the parallelism at least compensates for the overhead caused by communication between nodes, and often even speeds up the computation compared to sequential and even parallel shared memory algorithms. In our experiments, we were able to compute the persistent homology of filtrations with more than a billion (109) elements within seconds on a cluster with 32 nodes using less than 6GB of memory per node.}, author = {Bauer, Ulrich and Kerber, Michael and Reininghaus, Jan}, booktitle = {Proceedings of the Workshop on Algorithm Engineering and Experiments}, editor = { McGeoch, Catherine and Meyer, Ulrich}, location = {Portland, USA}, pages = {31 -- 38}, publisher = {Society of Industrial and Applied Mathematics}, title = {{Distributed computation of persistent homology}}, doi = {10.1137/1.9781611973198.4}, year = {2014}, } @inbook{2044, abstract = {We present a parallel algorithm for computing the persistent homology of a filtered chain complex. Our approach differs from the commonly used reduction algorithm by first computing persistence pairs within local chunks, then simplifying the unpaired columns, and finally applying standard reduction on the simplified matrix. The approach generalizes a technique by Günther et al., which uses discrete Morse Theory to compute persistence; we derive the same worst-case complexity bound in a more general context. The algorithm employs several practical optimization techniques, which are of independent interest. Our sequential implementation of the algorithm is competitive with state-of-the-art methods, and we further improve the performance through parallel computation.}, author = {Bauer, Ulrich and Kerber, Michael and Reininghaus, Jan}, booktitle = {Topological Methods in Data Analysis and Visualization III}, editor = {Bremer, Peer-Timo and Hotz, Ingrid and Pascucci, Valerio and Peikert, Ronald}, pages = {103 -- 117}, publisher = {Springer}, title = {{Clear and Compress: Computing Persistent Homology in Chunks}}, doi = {10.1007/978-3-319-04099-8_7}, year = {2014}, } @inproceedings{2153, abstract = {We define a simple, explicit map sending a morphism f : M → N of pointwise finite dimensional persistence modules to a matching between the barcodes of M and N. Our main result is that, in a precise sense, the quality of this matching is tightly controlled by the lengths of the longest intervals in the barcodes of ker f and coker f . As an immediate corollary, we obtain a new proof of the algebraic stability theorem for persistence barcodes [5, 9], a fundamental result in the theory of persistent homology. In contrast to previous proofs, ours shows explicitly how a δ-interleaving morphism between two persistence modules induces a δ-matching between the barcodes of the two modules. Our main result also specializes to a structure theorem for submodules and quotients of persistence modules. Copyright is held by the owner/author(s).}, author = {Bauer, Ulrich and Lesnick, Michael}, booktitle = {Proceedings of the Annual Symposium on Computational Geometry}, location = {Kyoto, Japan}, pages = {355 -- 364}, publisher = {ACM}, title = {{Induced matchings of barcodes and the algebraic stability of persistence}}, doi = {10.1145/2582112.2582168}, year = {2014}, } @inproceedings{2156, abstract = {We propose a metric for Reeb graphs, called the functional distortion distance. Under this distance, the Reeb graph is stable against small changes of input functions. At the same time, it remains discriminative at differentiating input functions. In particular, the main result is that the functional distortion distance between two Reeb graphs is bounded from below by the bottleneck distance between both the ordinary and extended persistence diagrams for appropriate dimensions. As an application of our results, we analyze a natural simplification scheme for Reeb graphs, and show that persistent features in Reeb graph remains persistent under simplification. Understanding the stability of important features of the Reeb graph under simplification is an interesting problem on its own right, and critical to the practical usage of Reeb graphs. Copyright is held by the owner/author(s).}, author = {Bauer, Ulrich and Ge, Xiaoyin and Wang, Yusu}, booktitle = {Proceedings of the Annual Symposium on Computational Geometry}, location = {Kyoto, Japan}, pages = {464 -- 473}, publisher = {ACM}, title = {{Measuring distance between Reeb graphs}}, doi = {10.1145/2582112.2582169}, year = {2014}, } @inproceedings{2155, abstract = {Given a finite set of points in Rn and a positive radius, we study the Čech, Delaunay-Čech, alpha, and wrap complexes as instances of a generalized discrete Morse theory. We prove that the latter three complexes are simple-homotopy equivalent. Our results have applications in topological data analysis and in the reconstruction of shapes from sampled data. Copyright is held by the owner/author(s).}, author = {Bauer, Ulrich and Edelsbrunner, Herbert}, booktitle = {Proceedings of the Annual Symposium on Computational Geometry}, location = {Kyoto, Japan}, pages = {484 -- 490}, publisher = {ACM}, title = {{The morse theory of Čech and Delaunay filtrations}}, doi = {10.1145/2582112.2582167}, year = {2014}, } @inproceedings{2177, abstract = {We give evidence for the difficulty of computing Betti numbers of simplicial complexes over a finite field. We do this by reducing the rank computation for sparse matrices with to non-zero entries to computing Betti numbers of simplicial complexes consisting of at most a constant times to simplices. Together with the known reduction in the other direction, this implies that the two problems have the same computational complexity.}, author = {Edelsbrunner, Herbert and Parsa, Salman}, booktitle = {Proceedings of the Annual ACM-SIAM Symposium on Discrete Algorithms}, location = {Portland, USA}, pages = {152 -- 160}, publisher = {SIAM}, title = {{On the computational complexity of betti numbers reductions from matrix rank}}, doi = {10.1137/1.9781611973402.11}, year = {2014}, } @article{2184, abstract = {Given topological spaces X,Y, a fundamental problem of algebraic topology is understanding the structure of all continuous maps X→ Y. We consider a computational version, where X,Y are given as finite simplicial complexes, and the goal is to compute [X,Y], that is, all homotopy classes of suchmaps.We solve this problem in the stable range, where for some d ≥ 2, we have dim X ≤ 2d-2 and Y is (d-1)-connected; in particular, Y can be the d-dimensional sphere Sd. The algorithm combines classical tools and ideas from homotopy theory (obstruction theory, Postnikov systems, and simplicial sets) with algorithmic tools from effective algebraic topology (locally effective simplicial sets and objects with effective homology). In contrast, [X,Y] is known to be uncomputable for general X,Y, since for X = S1 it includes a well known undecidable problem: testing triviality of the fundamental group of Y. In follow-up papers, the algorithm is shown to run in polynomial time for d fixed, and extended to other problems, such as the extension problem, where we are given a subspace A ⊂ X and a map A→ Y and ask whether it extends to a map X → Y, or computing the Z2-index-everything in the stable range. Outside the stable range, the extension problem is undecidable.}, author = {Čadek, Martin and Krcál, Marek and Matoušek, Jiří and Sergeraert, Francis and Vokřínek, Lukáš and Wagner, Uli}, journal = {Journal of the ACM}, number = {3}, publisher = {ACM}, title = {{Computing all maps into a sphere}}, doi = {10.1145/2597629}, volume = {61}, year = {2014}, } @inproceedings{2905, abstract = {Persistent homology is a recent grandchild of homology that has found use in science and engineering as well as in mathematics. This paper surveys the method as well as the applications, neglecting completeness in favor of highlighting ideas and directions.}, author = {Edelsbrunner, Herbert and Morozovy, Dmitriy}, location = {Kraków, Poland}, pages = {31 -- 50}, publisher = {European Mathematical Society Publishing House}, title = {{Persistent homology: Theory and practice}}, doi = {10.4171/120-1/3}, year = {2014}, } @inproceedings{10892, abstract = {In this paper, we introduce planar matchings on directed pseudo-line arrangements, which yield a planar set of pseudo-line segments such that only matching-partners are adjacent. By translating the planar matching problem into a corresponding stable roommates problem we show that such matchings always exist. Using our new framework, we establish, for the first time, a complete, rigorous definition of weighted straight skeletons, which are based on a so-called wavefront propagation process. We present a generalized and unified approach to treat structural changes in the wavefront that focuses on the restoration of weak planarity by finding planar matchings.}, author = {Biedl, Therese and Huber, Stefan and Palfrader, Peter}, booktitle = {25th International Symposium, ISAAC 2014}, isbn = {9783319130743}, issn = {1611-3349}, location = {Jeonju, Korea}, pages = {117--127}, publisher = {Springer Nature}, title = {{Planar matchings for weighted straight skeletons}}, doi = {10.1007/978-3-319-13075-0_10}, volume = {8889}, year = {2014}, } @book{6853, abstract = {This monograph presents a short course in computational geometry and topology. In the first part the book covers Voronoi diagrams and Delaunay triangulations, then it presents the theory of alpha complexes which play a crucial role in biology. The central part of the book is the homology theory and their computation, including the theory of persistence which is indispensable for applications, e.g. shape reconstruction. The target audience comprises researchers and practitioners in mathematics, biology, neuroscience and computer science, but the book may also be beneficial to graduate students of these fields.}, author = {Edelsbrunner, Herbert}, isbn = {9-783-3190-5956-3}, issn = {2191-5318}, pages = {IX, 110}, publisher = {Springer Nature}, title = {{A Short Course in Computational Geometry and Topology}}, doi = {10.1007/978-3-319-05957-0}, year = {2014}, } @inproceedings{10886, abstract = {We propose a method for visualizing two-dimensional symmetric positive definite tensor fields using the Heat Kernel Signature (HKS). The HKS is derived from the heat kernel and was originally introduced as an isometry invariant shape signature. Each positive definite tensor field defines a Riemannian manifold by considering the tensor field as a Riemannian metric. On this Riemmanian manifold we can apply the definition of the HKS. The resulting scalar quantity is used for the visualization of tensor fields. The HKS is closely related to the Gaussian curvature of the Riemannian manifold and the time parameter of the heat kernel allows a multiscale analysis in a natural way. In this way, the HKS represents field related scale space properties, enabling a level of detail analysis of tensor fields. This makes the HKS an interesting new scalar quantity for tensor fields, which differs significantly from usual tensor invariants like the trace or the determinant. A method for visualization and a numerical realization of the HKS for tensor fields is proposed in this chapter. To validate the approach we apply it to some illustrating simple examples as isolated critical points and to a medical diffusion tensor data set.}, author = {Zobel, Valentin and Reininghaus, Jan and Hotz, Ingrid}, booktitle = {Topological Methods in Data Analysis and Visualization III }, isbn = {9783319040981}, issn = {2197-666X}, pages = {249--262}, publisher = {Springer}, title = {{Visualization of two-dimensional symmetric positive definite tensor fields using the heat kernel signature}}, doi = {10.1007/978-3-319-04099-8_16}, year = {2014}, } @inbook{10817, abstract = {The Morse-Smale complex can be either explicitly or implicitly represented. Depending on the type of representation, the simplification of the Morse-Smale complex works differently. In the explicit representation, the Morse-Smale complex is directly simplified by explicitly reconnecting the critical points during the simplification. In the implicit representation, on the other hand, the Morse-Smale complex is given by a combinatorial gradient field. In this setting, the simplification changes the combinatorial flow, which yields an indirect simplification of the Morse-Smale complex. The topological complexity of the Morse-Smale complex is reduced in both representations. However, the simplifications generally yield different results. In this chapter, we emphasize properties of the two representations that cause these differences. We also provide a complexity analysis of the two schemes with respect to running time and memory consumption.}, author = {Günther, David and Reininghaus, Jan and Seidel, Hans-Peter and Weinkauf, Tino}, booktitle = {Topological Methods in Data Analysis and Visualization III.}, editor = {Bremer, Peer-Timo and Hotz, Ingrid and Pascucci, Valerio and Peikert, Ronald}, isbn = {9783319040981}, issn = {2197-666X}, pages = {135--150}, publisher = {Springer Nature}, title = {{Notes on the simplification of the Morse-Smale complex}}, doi = {10.1007/978-3-319-04099-8_9}, year = {2014}, } @article{2255, abstract = {Motivated by applications in biology, we present an algorithm for estimating the length of tube-like shapes in 3-dimensional Euclidean space. In a first step, we combine the tube formula of Weyl with integral geometric methods to obtain an integral representation of the length, which we approximate using a variant of the Koksma-Hlawka Theorem. In a second step, we use tools from computational topology to decrease the dependence on small perturbations of the shape. We present computational experiments that shed light on the stability and the convergence rate of our algorithm.}, author = {Edelsbrunner, Herbert and Pausinger, Florian}, issn = {09249907}, journal = {Journal of Mathematical Imaging and Vision}, number = {1}, pages = {164 -- 177}, publisher = {Springer}, title = {{Stable length estimates of tube-like shapes}}, doi = {10.1007/s10851-013-0468-x}, volume = {50}, year = {2014}, } @inproceedings{10894, abstract = {PHAT is a C++ library for the computation of persistent homology by matrix reduction. We aim for a simple generic design that decouples algorithms from data structures without sacrificing efficiency or user-friendliness. This makes PHAT a versatile platform for experimenting with algorithmic ideas and comparing them to state of the art implementations.}, author = {Bauer, Ulrich and Kerber, Michael and Reininghaus, Jan and Wagner, Hubert}, booktitle = {ICMS 2014: International Congress on Mathematical Software}, isbn = {9783662441985}, issn = {1611-3349}, location = {Seoul, South Korea}, pages = {137--143}, publisher = {Springer Berlin Heidelberg}, title = {{PHAT – Persistent Homology Algorithms Toolbox}}, doi = {10.1007/978-3-662-44199-2_24}, volume = {8592}, year = {2014}, } @unpublished{2012, abstract = {The classical sphere packing problem asks for the best (infinite) arrangement of non-overlapping unit balls which cover as much space as possible. We define a generalized version of the problem, where we allow each ball a limited amount of overlap with other balls. We study two natural choices of overlap measures and obtain the optimal lattice packings in a parameterized family of lattices which contains the FCC, BCC, and integer lattice.}, author = {Iglesias Ham, Mabel and Kerber, Michael and Uhler, Caroline}, booktitle = {arXiv}, title = {{Sphere packing with limited overlap}}, doi = {10.48550/arXiv.1401.0468}, year = {2014}, } @inproceedings{2209, abstract = {A straight skeleton is a well-known geometric structure, and several algorithms exist to construct the straight skeleton for a given polygon or planar straight-line graph. In this paper, we ask the reverse question: Given the straight skeleton (in form of a planar straight-line graph, with some rays to infinity), can we reconstruct a planar straight-line graph for which this was the straight skeleton? We show how to reduce this problem to the problem of finding a line that intersects a set of convex polygons. We can find these convex polygons and all such lines in $O(nlog n)$ time in the Real RAM computer model, where $n$ denotes the number of edges of the input graph. We also explain how our approach can be used for recognizing Voronoi diagrams of points, thereby completing a partial solution provided by Ash and Bolker in 1985. }, author = {Biedl, Therese and Held, Martin and Huber, Stefan}, location = {St. Petersburg, Russia}, pages = {37 -- 46}, publisher = {IEEE}, title = {{Recognizing straight skeletons and Voronoi diagrams and reconstructing their input}}, doi = {10.1109/ISVD.2013.11}, year = {2013}, } @inproceedings{2210, abstract = {A straight skeleton is a well-known geometric structure, and several algorithms exist to construct the straight skeleton for a given polygon. In this paper, we ask the reverse question: Given the straight skeleton (in form of a tree with a drawing in the plane, but with the exact position of the leaves unspecified), can we reconstruct the polygon? We show that in most cases there exists at most one polygon; in the remaining case there is an infinite number of polygons determined by one angle that can range in an interval. We can find this (set of) polygon(s) in linear time in the Real RAM computer model.}, author = {Biedl, Therese and Held, Martin and Huber, Stefan}, booktitle = {29th European Workshop on Computational Geometry}, location = {Braunschweig, Germany}, pages = {95 -- 98}, publisher = {TU Braunschweig}, title = {{Reconstructing polygons from embedded straight skeletons}}, year = {2013}, } @article{2304, abstract = {This extended abstract is concerned with the irregularities of distribution of one-dimensional permuted van der Corput sequences that are generated from linear permutations. We show how to obtain upper bounds for the discrepancy and diaphony of these sequences, by relating them to Kronecker sequences and applying earlier results of Faure and Niederreiter.}, author = {Pausinger, Florian}, journal = {Electronic Notes in Discrete Mathematics}, pages = {43 -- 50}, publisher = {Elsevier}, title = {{Van der Corput sequences and linear permutations}}, doi = {10.1016/j.endm.2013.07.008}, volume = {43}, year = {2013}, } @inproceedings{2807, abstract = {We consider several basic problems of algebraic topology, with connections to combinatorial and geometric questions, from the point of view of computational complexity. The extension problem asks, given topological spaces X; Y , a subspace A ⊆ X, and a (continuous) map f : A → Y , whether f can be extended to a map X → Y . For computational purposes, we assume that X and Y are represented as finite simplicial complexes, A is a subcomplex of X, and f is given as a simplicial map. In this generality the problem is undecidable, as follows from Novikov's result from the 1950s on uncomputability of the fundamental group π1(Y ). We thus study the problem under the assumption that, for some k ≥ 2, Y is (k - 1)-connected; informally, this means that Y has \no holes up to dimension k-1" (a basic example of such a Y is the sphere Sk). We prove that, on the one hand, this problem is still undecidable for dimX = 2k. On the other hand, for every fixed k ≥ 2, we obtain an algorithm that solves the extension problem in polynomial time assuming Y (k - 1)-connected and dimX ≤ 2k - 1. For dimX ≤ 2k - 2, the algorithm also provides a classification of all extensions up to homotopy (continuous deformation). This relies on results of our SODA 2012 paper, and the main new ingredient is a machinery of objects with polynomial-time homology, which is a polynomial-time analog of objects with effective homology developed earlier by Sergeraert et al. We also consider the computation of the higher homotopy groups πk(Y ), k ≥ 2, for a 1-connected Y . Their computability was established by Brown in 1957; we show that πk(Y ) can be computed in polynomial time for every fixed k ≥ 2. On the other hand, Anick proved in 1989 that computing πk(Y ) is #P-hard if k is a part of input, where Y is a cell complex with certain rather compact encoding. We strengthen his result to #P-hardness for Y given as a simplicial complex. }, author = {Čadek, Martin and Krcál, Marek and Matoušek, Jiří and Vokřínek, Lukáš and Wagner, Uli}, booktitle = {45th Annual ACM Symposium on theory of computing}, location = {Palo Alto, CA, United States}, pages = {595 -- 604}, publisher = {ACM}, title = {{Extending continuous maps: Polynomiality and undecidability}}, doi = {10.1145/2488608.2488683}, year = {2013}, }