@article{1930, abstract = {(Figure Presented) Data acquisition, numerical inaccuracies, and sampling often introduce noise in measurements and simulations. Removing this noise is often necessary for efficient analysis and visualization of this data, yet many denoising techniques change the minima and maxima of a scalar field. For example, the extrema can appear or disappear, spatially move, and change their value. This can lead to wrong interpretations of the data, e.g., when the maximum temperature over an area is falsely reported being a few degrees cooler because the denoising method is unaware of these features. Recently, a topological denoising technique based on a global energy optimization was proposed, which allows the topology-controlled denoising of 2D scalar fields. While this method preserves the minima and maxima, it is constrained by the size of the data. We extend this work to large 2D data and medium-sized 3D data by introducing a novel domain decomposition approach. It allows processing small patches of the domain independently while still avoiding the introduction of new critical points. Furthermore, we propose an iterative refinement of the solution, which decreases the optimization energy compared to the previous approach and therefore gives smoother results that are closer to the input. We illustrate our technique on synthetic and real-world 2D and 3D data sets that highlight potential applications.}, author = {Günther, David and Jacobson, Alec and Reininghaus, Jan and Seidel, Hans and Sorkine Hornung, Olga and Weinkauf, Tino}, journal = {IEEE Transactions on Visualization and Computer Graphics}, number = {12}, pages = {2585 -- 2594}, publisher = {IEEE}, title = {{Fast and memory-efficient topological denoising of 2D and 3D scalar fields}}, doi = {10.1109/TVCG.2014.2346432}, volume = {20}, year = {2014}, } @inproceedings{2043, abstract = {Persistent homology is a popular and powerful tool for capturing topological features of data. Advances in algorithms for computing persistent homology have reduced the computation time drastically – as long as the algorithm does not exhaust the available memory. Following up on a recently presented parallel method for persistence computation on shared memory systems [1], we demonstrate that a simple adaption of the standard reduction algorithm leads to a variant for distributed systems. Our algorithmic design ensures that the data is distributed over the nodes without redundancy; this permits the computation of much larger instances than on a single machine. Moreover, we observe that the parallelism at least compensates for the overhead caused by communication between nodes, and often even speeds up the computation compared to sequential and even parallel shared memory algorithms. In our experiments, we were able to compute the persistent homology of filtrations with more than a billion (109) elements within seconds on a cluster with 32 nodes using less than 6GB of memory per node.}, author = {Bauer, Ulrich and Kerber, Michael and Reininghaus, Jan}, booktitle = {Proceedings of the Workshop on Algorithm Engineering and Experiments}, editor = { McGeoch, Catherine and Meyer, Ulrich}, location = {Portland, USA}, pages = {31 -- 38}, publisher = {Society of Industrial and Applied Mathematics}, title = {{Distributed computation of persistent homology}}, doi = {10.1137/1.9781611973198.4}, year = {2014}, } @inbook{2044, abstract = {We present a parallel algorithm for computing the persistent homology of a filtered chain complex. Our approach differs from the commonly used reduction algorithm by first computing persistence pairs within local chunks, then simplifying the unpaired columns, and finally applying standard reduction on the simplified matrix. The approach generalizes a technique by Günther et al., which uses discrete Morse Theory to compute persistence; we derive the same worst-case complexity bound in a more general context. The algorithm employs several practical optimization techniques, which are of independent interest. Our sequential implementation of the algorithm is competitive with state-of-the-art methods, and we further improve the performance through parallel computation.}, author = {Bauer, Ulrich and Kerber, Michael and Reininghaus, Jan}, booktitle = {Topological Methods in Data Analysis and Visualization III}, editor = {Bremer, Peer-Timo and Hotz, Ingrid and Pascucci, Valerio and Peikert, Ronald}, pages = {103 -- 117}, publisher = {Springer}, title = {{Clear and Compress: Computing Persistent Homology in Chunks}}, doi = {10.1007/978-3-319-04099-8_7}, year = {2014}, } @inproceedings{2153, abstract = {We define a simple, explicit map sending a morphism f : M → N of pointwise finite dimensional persistence modules to a matching between the barcodes of M and N. Our main result is that, in a precise sense, the quality of this matching is tightly controlled by the lengths of the longest intervals in the barcodes of ker f and coker f . As an immediate corollary, we obtain a new proof of the algebraic stability theorem for persistence barcodes [5, 9], a fundamental result in the theory of persistent homology. In contrast to previous proofs, ours shows explicitly how a δ-interleaving morphism between two persistence modules induces a δ-matching between the barcodes of the two modules. Our main result also specializes to a structure theorem for submodules and quotients of persistence modules. Copyright is held by the owner/author(s).}, author = {Bauer, Ulrich and Lesnick, Michael}, booktitle = {Proceedings of the Annual Symposium on Computational Geometry}, location = {Kyoto, Japan}, pages = {355 -- 364}, publisher = {ACM}, title = {{Induced matchings of barcodes and the algebraic stability of persistence}}, doi = {10.1145/2582112.2582168}, year = {2014}, } @inproceedings{2156, abstract = {We propose a metric for Reeb graphs, called the functional distortion distance. Under this distance, the Reeb graph is stable against small changes of input functions. At the same time, it remains discriminative at differentiating input functions. In particular, the main result is that the functional distortion distance between two Reeb graphs is bounded from below by the bottleneck distance between both the ordinary and extended persistence diagrams for appropriate dimensions. As an application of our results, we analyze a natural simplification scheme for Reeb graphs, and show that persistent features in Reeb graph remains persistent under simplification. Understanding the stability of important features of the Reeb graph under simplification is an interesting problem on its own right, and critical to the practical usage of Reeb graphs. Copyright is held by the owner/author(s).}, author = {Bauer, Ulrich and Ge, Xiaoyin and Wang, Yusu}, booktitle = {Proceedings of the Annual Symposium on Computational Geometry}, location = {Kyoto, Japan}, pages = {464 -- 473}, publisher = {ACM}, title = {{Measuring distance between Reeb graphs}}, doi = {10.1145/2582112.2582169}, year = {2014}, }