@article{1930, abstract = {(Figure Presented) Data acquisition, numerical inaccuracies, and sampling often introduce noise in measurements and simulations. Removing this noise is often necessary for efficient analysis and visualization of this data, yet many denoising techniques change the minima and maxima of a scalar field. For example, the extrema can appear or disappear, spatially move, and change their value. This can lead to wrong interpretations of the data, e.g., when the maximum temperature over an area is falsely reported being a few degrees cooler because the denoising method is unaware of these features. Recently, a topological denoising technique based on a global energy optimization was proposed, which allows the topology-controlled denoising of 2D scalar fields. While this method preserves the minima and maxima, it is constrained by the size of the data. We extend this work to large 2D data and medium-sized 3D data by introducing a novel domain decomposition approach. It allows processing small patches of the domain independently while still avoiding the introduction of new critical points. Furthermore, we propose an iterative refinement of the solution, which decreases the optimization energy compared to the previous approach and therefore gives smoother results that are closer to the input. We illustrate our technique on synthetic and real-world 2D and 3D data sets that highlight potential applications.}, author = {Günther, David and Jacobson, Alec and Reininghaus, Jan and Seidel, Hans and Sorkine Hornung, Olga and Weinkauf, Tino}, journal = {IEEE Transactions on Visualization and Computer Graphics}, number = {12}, pages = {2585 -- 2594}, publisher = {IEEE}, title = {{Fast and memory-efficient topological denoising of 2D and 3D scalar fields}}, doi = {10.1109/TVCG.2014.2346432}, volume = {20}, year = {2014}, } @inproceedings{2043, abstract = {Persistent homology is a popular and powerful tool for capturing topological features of data. Advances in algorithms for computing persistent homology have reduced the computation time drastically – as long as the algorithm does not exhaust the available memory. Following up on a recently presented parallel method for persistence computation on shared memory systems [1], we demonstrate that a simple adaption of the standard reduction algorithm leads to a variant for distributed systems. Our algorithmic design ensures that the data is distributed over the nodes without redundancy; this permits the computation of much larger instances than on a single machine. Moreover, we observe that the parallelism at least compensates for the overhead caused by communication between nodes, and often even speeds up the computation compared to sequential and even parallel shared memory algorithms. In our experiments, we were able to compute the persistent homology of filtrations with more than a billion (109) elements within seconds on a cluster with 32 nodes using less than 6GB of memory per node.}, author = {Bauer, Ulrich and Kerber, Michael and Reininghaus, Jan}, booktitle = {Proceedings of the Workshop on Algorithm Engineering and Experiments}, editor = { McGeoch, Catherine and Meyer, Ulrich}, location = {Portland, USA}, pages = {31 -- 38}, publisher = {Society of Industrial and Applied Mathematics}, title = {{Distributed computation of persistent homology}}, doi = {10.1137/1.9781611973198.4}, year = {2014}, } @inbook{2044, abstract = {We present a parallel algorithm for computing the persistent homology of a filtered chain complex. Our approach differs from the commonly used reduction algorithm by first computing persistence pairs within local chunks, then simplifying the unpaired columns, and finally applying standard reduction on the simplified matrix. The approach generalizes a technique by Günther et al., which uses discrete Morse Theory to compute persistence; we derive the same worst-case complexity bound in a more general context. The algorithm employs several practical optimization techniques, which are of independent interest. Our sequential implementation of the algorithm is competitive with state-of-the-art methods, and we further improve the performance through parallel computation.}, author = {Bauer, Ulrich and Kerber, Michael and Reininghaus, Jan}, booktitle = {Topological Methods in Data Analysis and Visualization III}, editor = {Bremer, Peer-Timo and Hotz, Ingrid and Pascucci, Valerio and Peikert, Ronald}, pages = {103 -- 117}, publisher = {Springer}, title = {{Clear and Compress: Computing Persistent Homology in Chunks}}, doi = {10.1007/978-3-319-04099-8_7}, year = {2014}, } @inproceedings{2153, abstract = {We define a simple, explicit map sending a morphism f : M → N of pointwise finite dimensional persistence modules to a matching between the barcodes of M and N. Our main result is that, in a precise sense, the quality of this matching is tightly controlled by the lengths of the longest intervals in the barcodes of ker f and coker f . As an immediate corollary, we obtain a new proof of the algebraic stability theorem for persistence barcodes [5, 9], a fundamental result in the theory of persistent homology. In contrast to previous proofs, ours shows explicitly how a δ-interleaving morphism between two persistence modules induces a δ-matching between the barcodes of the two modules. Our main result also specializes to a structure theorem for submodules and quotients of persistence modules. Copyright is held by the owner/author(s).}, author = {Bauer, Ulrich and Lesnick, Michael}, booktitle = {Proceedings of the Annual Symposium on Computational Geometry}, location = {Kyoto, Japan}, pages = {355 -- 364}, publisher = {ACM}, title = {{Induced matchings of barcodes and the algebraic stability of persistence}}, doi = {10.1145/2582112.2582168}, year = {2014}, } @inproceedings{2156, abstract = {We propose a metric for Reeb graphs, called the functional distortion distance. Under this distance, the Reeb graph is stable against small changes of input functions. At the same time, it remains discriminative at differentiating input functions. In particular, the main result is that the functional distortion distance between two Reeb graphs is bounded from below by the bottleneck distance between both the ordinary and extended persistence diagrams for appropriate dimensions. As an application of our results, we analyze a natural simplification scheme for Reeb graphs, and show that persistent features in Reeb graph remains persistent under simplification. Understanding the stability of important features of the Reeb graph under simplification is an interesting problem on its own right, and critical to the practical usage of Reeb graphs. Copyright is held by the owner/author(s).}, author = {Bauer, Ulrich and Ge, Xiaoyin and Wang, Yusu}, booktitle = {Proceedings of the Annual Symposium on Computational Geometry}, location = {Kyoto, Japan}, pages = {464 -- 473}, publisher = {ACM}, title = {{Measuring distance between Reeb graphs}}, doi = {10.1145/2582112.2582169}, year = {2014}, } @inproceedings{2155, abstract = {Given a finite set of points in Rn and a positive radius, we study the Čech, Delaunay-Čech, alpha, and wrap complexes as instances of a generalized discrete Morse theory. We prove that the latter three complexes are simple-homotopy equivalent. Our results have applications in topological data analysis and in the reconstruction of shapes from sampled data. Copyright is held by the owner/author(s).}, author = {Bauer, Ulrich and Edelsbrunner, Herbert}, booktitle = {Proceedings of the Annual Symposium on Computational Geometry}, location = {Kyoto, Japan}, pages = {484 -- 490}, publisher = {ACM}, title = {{The morse theory of Čech and Delaunay filtrations}}, doi = {10.1145/2582112.2582167}, year = {2014}, } @inproceedings{2177, abstract = {We give evidence for the difficulty of computing Betti numbers of simplicial complexes over a finite field. We do this by reducing the rank computation for sparse matrices with to non-zero entries to computing Betti numbers of simplicial complexes consisting of at most a constant times to simplices. Together with the known reduction in the other direction, this implies that the two problems have the same computational complexity.}, author = {Edelsbrunner, Herbert and Parsa, Salman}, booktitle = {Proceedings of the Annual ACM-SIAM Symposium on Discrete Algorithms}, location = {Portland, USA}, pages = {152 -- 160}, publisher = {SIAM}, title = {{On the computational complexity of betti numbers reductions from matrix rank}}, doi = {10.1137/1.9781611973402.11}, year = {2014}, } @article{2184, abstract = {Given topological spaces X,Y, a fundamental problem of algebraic topology is understanding the structure of all continuous maps X→ Y. We consider a computational version, where X,Y are given as finite simplicial complexes, and the goal is to compute [X,Y], that is, all homotopy classes of suchmaps.We solve this problem in the stable range, where for some d ≥ 2, we have dim X ≤ 2d-2 and Y is (d-1)-connected; in particular, Y can be the d-dimensional sphere Sd. The algorithm combines classical tools and ideas from homotopy theory (obstruction theory, Postnikov systems, and simplicial sets) with algorithmic tools from effective algebraic topology (locally effective simplicial sets and objects with effective homology). In contrast, [X,Y] is known to be uncomputable for general X,Y, since for X = S1 it includes a well known undecidable problem: testing triviality of the fundamental group of Y. In follow-up papers, the algorithm is shown to run in polynomial time for d fixed, and extended to other problems, such as the extension problem, where we are given a subspace A ⊂ X and a map A→ Y and ask whether it extends to a map X → Y, or computing the Z2-index-everything in the stable range. Outside the stable range, the extension problem is undecidable.}, author = {Čadek, Martin and Krcál, Marek and Matoušek, Jiří and Sergeraert, Francis and Vokřínek, Lukáš and Wagner, Uli}, journal = {Journal of the ACM}, number = {3}, publisher = {ACM}, title = {{Computing all maps into a sphere}}, doi = {10.1145/2597629}, volume = {61}, year = {2014}, } @inproceedings{2905, abstract = {Persistent homology is a recent grandchild of homology that has found use in science and engineering as well as in mathematics. This paper surveys the method as well as the applications, neglecting completeness in favor of highlighting ideas and directions.}, author = {Edelsbrunner, Herbert and Morozovy, Dmitriy}, location = {Kraków, Poland}, pages = {31 -- 50}, publisher = {European Mathematical Society Publishing House}, title = {{Persistent homology: Theory and practice}}, doi = {10.4171/120-1/3}, year = {2014}, } @inproceedings{10892, abstract = {In this paper, we introduce planar matchings on directed pseudo-line arrangements, which yield a planar set of pseudo-line segments such that only matching-partners are adjacent. By translating the planar matching problem into a corresponding stable roommates problem we show that such matchings always exist. Using our new framework, we establish, for the first time, a complete, rigorous definition of weighted straight skeletons, which are based on a so-called wavefront propagation process. We present a generalized and unified approach to treat structural changes in the wavefront that focuses on the restoration of weak planarity by finding planar matchings.}, author = {Biedl, Therese and Huber, Stefan and Palfrader, Peter}, booktitle = {25th International Symposium, ISAAC 2014}, isbn = {9783319130743}, issn = {1611-3349}, location = {Jeonju, Korea}, pages = {117--127}, publisher = {Springer Nature}, title = {{Planar matchings for weighted straight skeletons}}, doi = {10.1007/978-3-319-13075-0_10}, volume = {8889}, year = {2014}, } @book{6853, abstract = {This monograph presents a short course in computational geometry and topology. In the first part the book covers Voronoi diagrams and Delaunay triangulations, then it presents the theory of alpha complexes which play a crucial role in biology. The central part of the book is the homology theory and their computation, including the theory of persistence which is indispensable for applications, e.g. shape reconstruction. The target audience comprises researchers and practitioners in mathematics, biology, neuroscience and computer science, but the book may also be beneficial to graduate students of these fields.}, author = {Edelsbrunner, Herbert}, isbn = {9-783-3190-5956-3}, issn = {2191-5318}, pages = {IX, 110}, publisher = {Springer Nature}, title = {{A Short Course in Computational Geometry and Topology}}, doi = {10.1007/978-3-319-05957-0}, year = {2014}, } @inproceedings{10886, abstract = {We propose a method for visualizing two-dimensional symmetric positive definite tensor fields using the Heat Kernel Signature (HKS). The HKS is derived from the heat kernel and was originally introduced as an isometry invariant shape signature. Each positive definite tensor field defines a Riemannian manifold by considering the tensor field as a Riemannian metric. On this Riemmanian manifold we can apply the definition of the HKS. The resulting scalar quantity is used for the visualization of tensor fields. The HKS is closely related to the Gaussian curvature of the Riemannian manifold and the time parameter of the heat kernel allows a multiscale analysis in a natural way. In this way, the HKS represents field related scale space properties, enabling a level of detail analysis of tensor fields. This makes the HKS an interesting new scalar quantity for tensor fields, which differs significantly from usual tensor invariants like the trace or the determinant. A method for visualization and a numerical realization of the HKS for tensor fields is proposed in this chapter. To validate the approach we apply it to some illustrating simple examples as isolated critical points and to a medical diffusion tensor data set.}, author = {Zobel, Valentin and Reininghaus, Jan and Hotz, Ingrid}, booktitle = {Topological Methods in Data Analysis and Visualization III }, isbn = {9783319040981}, issn = {2197-666X}, pages = {249--262}, publisher = {Springer}, title = {{Visualization of two-dimensional symmetric positive definite tensor fields using the heat kernel signature}}, doi = {10.1007/978-3-319-04099-8_16}, year = {2014}, } @inbook{10817, abstract = {The Morse-Smale complex can be either explicitly or implicitly represented. Depending on the type of representation, the simplification of the Morse-Smale complex works differently. In the explicit representation, the Morse-Smale complex is directly simplified by explicitly reconnecting the critical points during the simplification. In the implicit representation, on the other hand, the Morse-Smale complex is given by a combinatorial gradient field. In this setting, the simplification changes the combinatorial flow, which yields an indirect simplification of the Morse-Smale complex. The topological complexity of the Morse-Smale complex is reduced in both representations. However, the simplifications generally yield different results. In this chapter, we emphasize properties of the two representations that cause these differences. We also provide a complexity analysis of the two schemes with respect to running time and memory consumption.}, author = {Günther, David and Reininghaus, Jan and Seidel, Hans-Peter and Weinkauf, Tino}, booktitle = {Topological Methods in Data Analysis and Visualization III.}, editor = {Bremer, Peer-Timo and Hotz, Ingrid and Pascucci, Valerio and Peikert, Ronald}, isbn = {9783319040981}, issn = {2197-666X}, pages = {135--150}, publisher = {Springer Nature}, title = {{Notes on the simplification of the Morse-Smale complex}}, doi = {10.1007/978-3-319-04099-8_9}, year = {2014}, } @article{2255, abstract = {Motivated by applications in biology, we present an algorithm for estimating the length of tube-like shapes in 3-dimensional Euclidean space. In a first step, we combine the tube formula of Weyl with integral geometric methods to obtain an integral representation of the length, which we approximate using a variant of the Koksma-Hlawka Theorem. In a second step, we use tools from computational topology to decrease the dependence on small perturbations of the shape. We present computational experiments that shed light on the stability and the convergence rate of our algorithm.}, author = {Edelsbrunner, Herbert and Pausinger, Florian}, issn = {09249907}, journal = {Journal of Mathematical Imaging and Vision}, number = {1}, pages = {164 -- 177}, publisher = {Springer}, title = {{Stable length estimates of tube-like shapes}}, doi = {10.1007/s10851-013-0468-x}, volume = {50}, year = {2014}, } @inproceedings{10894, abstract = {PHAT is a C++ library for the computation of persistent homology by matrix reduction. We aim for a simple generic design that decouples algorithms from data structures without sacrificing efficiency or user-friendliness. This makes PHAT a versatile platform for experimenting with algorithmic ideas and comparing them to state of the art implementations.}, author = {Bauer, Ulrich and Kerber, Michael and Reininghaus, Jan and Wagner, Hubert}, booktitle = {ICMS 2014: International Congress on Mathematical Software}, isbn = {9783662441985}, issn = {1611-3349}, location = {Seoul, South Korea}, pages = {137--143}, publisher = {Springer Berlin Heidelberg}, title = {{PHAT – Persistent Homology Algorithms Toolbox}}, doi = {10.1007/978-3-662-44199-2_24}, volume = {8592}, year = {2014}, } @unpublished{2012, abstract = {The classical sphere packing problem asks for the best (infinite) arrangement of non-overlapping unit balls which cover as much space as possible. We define a generalized version of the problem, where we allow each ball a limited amount of overlap with other balls. We study two natural choices of overlap measures and obtain the optimal lattice packings in a parameterized family of lattices which contains the FCC, BCC, and integer lattice.}, author = {Iglesias Ham, Mabel and Kerber, Michael and Uhler, Caroline}, booktitle = {arXiv}, title = {{Sphere packing with limited overlap}}, doi = {10.48550/arXiv.1401.0468}, year = {2014}, } @inproceedings{2209, abstract = {A straight skeleton is a well-known geometric structure, and several algorithms exist to construct the straight skeleton for a given polygon or planar straight-line graph. In this paper, we ask the reverse question: Given the straight skeleton (in form of a planar straight-line graph, with some rays to infinity), can we reconstruct a planar straight-line graph for which this was the straight skeleton? We show how to reduce this problem to the problem of finding a line that intersects a set of convex polygons. We can find these convex polygons and all such lines in $O(nlog n)$ time in the Real RAM computer model, where $n$ denotes the number of edges of the input graph. We also explain how our approach can be used for recognizing Voronoi diagrams of points, thereby completing a partial solution provided by Ash and Bolker in 1985. }, author = {Biedl, Therese and Held, Martin and Huber, Stefan}, location = {St. Petersburg, Russia}, pages = {37 -- 46}, publisher = {IEEE}, title = {{Recognizing straight skeletons and Voronoi diagrams and reconstructing their input}}, doi = {10.1109/ISVD.2013.11}, year = {2013}, } @inproceedings{2210, abstract = {A straight skeleton is a well-known geometric structure, and several algorithms exist to construct the straight skeleton for a given polygon. In this paper, we ask the reverse question: Given the straight skeleton (in form of a tree with a drawing in the plane, but with the exact position of the leaves unspecified), can we reconstruct the polygon? We show that in most cases there exists at most one polygon; in the remaining case there is an infinite number of polygons determined by one angle that can range in an interval. We can find this (set of) polygon(s) in linear time in the Real RAM computer model.}, author = {Biedl, Therese and Held, Martin and Huber, Stefan}, booktitle = {29th European Workshop on Computational Geometry}, location = {Braunschweig, Germany}, pages = {95 -- 98}, publisher = {TU Braunschweig}, title = {{Reconstructing polygons from embedded straight skeletons}}, year = {2013}, } @article{2304, abstract = {This extended abstract is concerned with the irregularities of distribution of one-dimensional permuted van der Corput sequences that are generated from linear permutations. We show how to obtain upper bounds for the discrepancy and diaphony of these sequences, by relating them to Kronecker sequences and applying earlier results of Faure and Niederreiter.}, author = {Pausinger, Florian}, journal = {Electronic Notes in Discrete Mathematics}, pages = {43 -- 50}, publisher = {Elsevier}, title = {{Van der Corput sequences and linear permutations}}, doi = {10.1016/j.endm.2013.07.008}, volume = {43}, year = {2013}, } @inproceedings{2807, abstract = {We consider several basic problems of algebraic topology, with connections to combinatorial and geometric questions, from the point of view of computational complexity. The extension problem asks, given topological spaces X; Y , a subspace A ⊆ X, and a (continuous) map f : A → Y , whether f can be extended to a map X → Y . For computational purposes, we assume that X and Y are represented as finite simplicial complexes, A is a subcomplex of X, and f is given as a simplicial map. In this generality the problem is undecidable, as follows from Novikov's result from the 1950s on uncomputability of the fundamental group π1(Y ). We thus study the problem under the assumption that, for some k ≥ 2, Y is (k - 1)-connected; informally, this means that Y has \no holes up to dimension k-1" (a basic example of such a Y is the sphere Sk). We prove that, on the one hand, this problem is still undecidable for dimX = 2k. On the other hand, for every fixed k ≥ 2, we obtain an algorithm that solves the extension problem in polynomial time assuming Y (k - 1)-connected and dimX ≤ 2k - 1. For dimX ≤ 2k - 2, the algorithm also provides a classification of all extensions up to homotopy (continuous deformation). This relies on results of our SODA 2012 paper, and the main new ingredient is a machinery of objects with polynomial-time homology, which is a polynomial-time analog of objects with effective homology developed earlier by Sergeraert et al. We also consider the computation of the higher homotopy groups πk(Y ), k ≥ 2, for a 1-connected Y . Their computability was established by Brown in 1957; we show that πk(Y ) can be computed in polynomial time for every fixed k ≥ 2. On the other hand, Anick proved in 1989 that computing πk(Y ) is #P-hard if k is a part of input, where Y is a cell complex with certain rather compact encoding. We strengthen his result to #P-hardness for Y given as a simplicial complex. }, author = {Čadek, Martin and Krcál, Marek and Matoušek, Jiří and Vokřínek, Lukáš and Wagner, Uli}, booktitle = {45th Annual ACM Symposium on theory of computing}, location = {Palo Alto, CA, United States}, pages = {595 -- 604}, publisher = {ACM}, title = {{Extending continuous maps: Polynomiality and undecidability}}, doi = {10.1145/2488608.2488683}, year = {2013}, } @inproceedings{2812, abstract = {We consider the problem of deciding whether the persistent homology group of a simplicial pair (K, L) can be realized as the homology H* (X) of some complex X with L ⊂ X ⊂ K. We show that this problem is NP-complete even if K is embedded in ℝ3. As a consequence, we show that it is NP-hard to simplify level and sublevel sets of scalar functions on S3 within a given tolerance constraint. This problem has relevance to the visualization of medical images by isosurfaces. We also show an implication to the theory of well groups of scalar functions: not every well group can be realized by some level set, and deciding whether a well group can be realized is NP-hard.}, author = {Attali, Dominique and Bauer, Ulrich and Devillers, Olivier and Glisse, Marc and Lieutier, André}, booktitle = {Proceedings of the 29th annual symposium on Computational Geometry}, location = {Rio de Janeiro, Brazil}, pages = {117 -- 125}, publisher = {ACM}, title = {{Homological reconstruction and simplification in R3}}, doi = {10.1145/2462356.2462373}, year = {2013}, } @article{2822, abstract = {Identification of genes that control root system architecture in crop plants requires innovations that enable high-throughput and accurate measurements of root system architecture through time. We demonstrate the ability of a semiautomated 3D in vivo imaging and digital phenotyping pipeline to interrogate the quantitative genetic basis of root system growth in a rice biparental mapping population, Bala x Azucena. We phenotyped >1,400 3D root models and >57,000 2D images for a suite of 25 traits that quantified the distribution, shape, extent of exploration, and the intrinsic size of root networks at days 12, 14, and 16 of growth in a gellan gum medium. From these data we identified 89 quantitative trait loci, some of which correspond to those found previously in soil-grown plants, and provide evidence for genetic tradeoffs in root growth allocations, such as between the extent and thoroughness of exploration. We also developed a multivariate method for generating and mapping central root architecture phenotypes and used it to identify five major quantitative trait loci (r2 = 24-37%), two of which were not identified by our univariate analysis. Our imaging and analytical platform provides a means to identify genes with high potential for improving root traits and agronomic qualities of crops.}, author = {Topp, Christopher and Iyer Pascuzzi, Anjali and Anderson, Jill and Lee, Cheng and Zurek, Paul and Symonova, Olga and Zheng, Ying and Bucksch, Alexander and Mileyko, Yuriy and Galkovskyi, Taras and Moore, Brad and Harer, John and Edelsbrunner, Herbert and Mitchell Olds, Thomas and Weitz, Joshua and Benfey, Philip}, journal = {PNAS}, number = {18}, pages = {E1695 -- E1704}, publisher = {National Academy of Sciences}, title = {{3D phenotyping and quantitative trait locus mapping identify core regions of the rice genome controlling root architecture}}, doi = {10.1073/pnas.1304354110}, volume = {110}, year = {2013}, } @inproceedings{2843, abstract = {Mathematical objects can be measured unambiguously, but not so objects from our physical world. Even the total length of tubelike shapes has its difficulties. We introduce a combination of geometric, probabilistic, and topological methods to design a stable length estimate for tube-like shapes; that is: one that is insensitive to small shape changes.}, author = {Edelsbrunner, Herbert and Pausinger, Florian}, booktitle = {17th IAPR International Conference on Discrete Geometry for Computer Imagery}, location = {Seville, Spain}, pages = {XV -- XIX}, publisher = {Springer}, title = {{Stable length estimates of tube-like shapes}}, doi = {10.1007/978-3-642-37067-0}, volume = {7749}, year = {2013}, } @article{2859, abstract = {Given a continuous function f:X-R on a topological space, we consider the preimages of intervals and their homology groups and show how to read the ranks of these groups from the extended persistence diagram of f. In addition, we quantify the robustness of the homology classes under perturbations of f using well groups, and we show how to read the ranks of these groups from the same extended persistence diagram. The special case X=R3 has ramifications in the fields of medical imaging and scientific visualization.}, author = {Bendich, Paul and Edelsbrunner, Herbert and Morozov, Dmitriy and Patel, Amit}, journal = {Homology, Homotopy and Applications}, number = {1}, pages = {51 -- 72}, publisher = {International Press}, title = {{Homology and robustness of level and interlevel sets}}, doi = {10.4310/HHA.2013.v15.n1.a3}, volume = {15}, year = {2013}, } @article{2887, abstract = {Root system growth and development is highly plastic and is influenced by the surrounding environment. Roots frequently grow in heterogeneous environments that include interactions from neighboring plants and physical impediments in the rhizosphere. To investigate how planting density and physical objects affect root system growth, we grew rice in a transparent gel system in close proximity with another plant or a physical object. Root systems were imaged and reconstructed in three dimensions. Root-root interaction strength was calculated using quantitative metrics that characterize the extent towhich the reconstructed root systems overlap each other. Surprisingly, we found the overlap of root systems of the same genotype was significantly higher than that of root systems of different genotypes. Root systems of the same genotype tended to grow toward each other but those of different genotypes appeared to avoid each other. Shoot separation experiments excluded the possibility of aerial interactions, suggesting root communication. Staggered plantings indicated that interactions likely occur at root tips in close proximity. Recognition of obstacles also occurred through root tips, but through physical contact in a size-dependent manner. These results indicate that root systems use two different forms of communication to recognize objects and alter root architecture: root-root recognition, possibly mediated through root exudates, and root-object recognition mediated by physical contact at the root tips. This finding suggests that root tips act as local sensors that integrate rhizosphere information into global root architectural changes.}, author = {Fang, Suqin and Clark, Randy and Zheng, Ying and Iyer Pascuzzi, Anjali and Weitz, Joshua and Kochian, Leon and Edelsbrunner, Herbert and Liao, Hong and Benfey, Philip}, journal = {PNAS}, number = {7}, pages = {2670 -- 2675}, publisher = {National Academy of Sciences}, title = {{Genotypic recognition and spatial responses by rice roots}}, doi = {10.1073/pnas.1222821110}, volume = {110}, year = {2013}, } @inproceedings{2901, abstract = { We introduce the M-modes problem for graphical models: predicting the M label configurations of highest probability that are at the same time local maxima of the probability landscape. M-modes have multiple possible applications: because they are intrinsically diverse, they provide a principled alternative to non-maximum suppression techniques for structured prediction, they can act as codebook vectors for quantizing the configuration space, or they can form component centers for mixture model approximation. We present two algorithms for solving the M-modes problem. The first algorithm solves the problem in polynomial time when the underlying graphical model is a simple chain. The second algorithm solves the problem for junction chains. In synthetic and real dataset, we demonstrate how M-modes can improve the performance of prediction. We also use the generated modes as a tool to understand the topography of the probability distribution of configurations, for example with relation to the training set size and amount of noise in the data. }, author = {Chen, Chao and Kolmogorov, Vladimir and Yan, Zhu and Metaxas, Dimitris and Lampert, Christoph}, location = {Scottsdale, AZ, United States}, pages = {161 -- 169}, publisher = {JMLR}, title = {{Computing the M most probable modes of a graphical model}}, volume = {31}, year = {2013}, } @inproceedings{2906, abstract = {Motivated by an application in cell biology, we describe an extension of the kinetic data structures framework from Delaunay triangulations to fixed-radius alpha complexes. Our algorithm is implemented using CGAL, following the exact geometric computation paradigm. We report on several techniques to accelerate the computation that turn our implementation applicable to the underlying biological problem.}, author = {Kerber, Michael and Edelsbrunner, Herbert}, booktitle = {2013 Proceedings of the 15th Workshop on Algorithm Engineering and Experiments}, location = {New Orleans, LA, United States}, pages = {70 -- 77}, publisher = {Society of Industrial and Applied Mathematics}, title = {{3D kinetic alpha complexes and their implementation}}, doi = {10.1137/1.9781611972931.6}, year = {2013}, } @article{2815, abstract = {The fact that a sum of isotropic Gaussian kernels can have more modes than kernels is surprising. Extra (ghost) modes do not exist in ℝ1 and are generally not well studied in higher dimensions. We study a configuration of n+1 Gaussian kernels for which there are exactly n+2 modes. We show that all modes lie on a finite set of lines, which we call axes, and study the restriction of the Gaussian mixture to these axes in order to discover that there are an exponential number of critical points in this configuration. Although the existence of ghost modes remained unknown due to the difficulty of finding examples in ℝ2, we show that the resilience of ghost modes grows like the square root of the dimension. In addition, we exhibit finite configurations of isotropic Gaussian kernels with superlinearly many modes.}, author = {Edelsbrunner, Herbert and Fasy, Brittany Terese and Rote, Günter}, issn = {1432-0444}, journal = {Discrete & Computational Geometry}, number = {4}, pages = {797 -- 822}, publisher = {Springer}, title = {{Add isotropic Gaussian kernels at own risk: More and more resilient modes in higher dimensions}}, doi = {10.1007/s00454-013-9517-x}, volume = {49}, year = {2013}, } @article{2939, abstract = {In this paper, we present the first output-sensitive algorithm to compute the persistence diagram of a filtered simplicial complex. For any Γ > 0, it returns only those homology classes with persistence at least Γ. Instead of the classical reduction via column operations, our algorithm performs rank computations on submatrices of the boundary matrix. For an arbitrary constant δ ∈ (0, 1), the running time is O (C (1 - δ) Γ R d (n) log n), where C (1 - δ) Γ is the number of homology classes with persistence at least (1 - δ) Γ, n is the total number of simplices in the complex, d its dimension, and R d (n) is the complexity of computing the rank of an n × n matrix with O (d n) nonzero entries. Depending on the choice of the rank algorithm, this yields a deterministic O (C (1 - δ) Γ n 2.376) algorithm, an O (C (1 - δ) Γ n 2.28) Las-Vegas algorithm, or an O (C (1 - δ) Γ n 2 + ε{lunate}) Monte-Carlo algorithm for an arbitrary ε{lunate} > 0. The space complexity of the Monte-Carlo version is bounded by O (d n) = O (n log n).}, author = {Chen, Chao and Kerber, Michael}, journal = {Computational Geometry: Theory and Applications}, number = {4}, pages = {435 -- 447}, publisher = {Elsevier}, title = {{An output sensitive algorithm for persistent homology}}, doi = {10.1016/j.comgeo.2012.02.010}, volume = {46}, year = {2013}, } @inproceedings{10897, abstract = {Taking images is an efficient way to collect data about the physical world. It can be done fast and in exquisite detail. By definition, image processing is the field that concerns itself with the computation aimed at harnessing the information contained in images [10]. This talk is concerned with topological information. Our main thesis is that persistent homology [5] is a useful method to quantify and summarize topological information, building a bridge that connects algebraic topology with applications. We provide supporting evidence for this thesis by touching upon four technical developments in the overlap between persistent homology and image processing.}, author = {Edelsbrunner, Herbert}, booktitle = {Graph-Based Representations in Pattern Recognition}, isbn = {9783642382208}, issn = {1611-3349}, location = {Vienna, Austria}, pages = {182--183}, publisher = {Springer Nature}, title = {{Persistent homology in image processing}}, doi = {10.1007/978-3-642-38221-5_19}, volume = {7877}, year = {2013}, } @article{2849, author = {Edelsbrunner, Herbert and Strelkova, Nataliya}, journal = {Russian Mathematical Surveys}, number = {6}, pages = {1167 -- 1168}, publisher = {IOP Publishing Ltd.}, title = {{On the configuration space of Steiner minimal trees}}, doi = {10.1070/RM2012v067n06ABEH004820}, volume = {67}, year = {2012}, } @inproceedings{2903, abstract = {In order to enjoy a digital version of the Jordan Curve Theorem, it is common to use the closed topology for the foreground and the open topology for the background of a 2-dimensional binary image. In this paper, we introduce a single topology that enjoys this theorem for all thresholds decomposing a real-valued image into foreground and background. This topology is easy to construct and it generalizes to n-dimensional images.}, author = {Edelsbrunner, Herbert and Symonova, Olga}, location = {New Brunswick, NJ, USA }, pages = {41 -- 48}, publisher = {IEEE}, title = {{The adaptive topology of a digital image}}, doi = {10.1109/ISVD.2012.11}, year = {2012}, } @article{2941, author = {Dolbilin, Nikolai and Edelsbrunner, Herbert and Musin, Oleg}, journal = {Russian Mathematical Surveys}, number = {4}, pages = {781 -- 783}, publisher = {IOP Publishing}, title = {{On the optimality of functionals over triangulations of Delaunay sets}}, doi = {10.1070/RM2012v067n04ABEH004807}, volume = {67}, year = {2012}, } @inproceedings{2971, abstract = {We study the task of interactive semantic labeling of a segmentation hierarchy. To this end we propose a framework interleaving two components: an automatic labeling step, based on a Conditional Random Field whose dependencies are defined by the inclusion tree of the segmentation hierarchy, and an interaction step that integrates incremental input from a human user. Evaluated on two distinct datasets, the proposed interactive approach efficiently integrates human interventions and illustrates the advantages of structured prediction in an interactive framework. }, author = {Zankl, Georg and Haxhimusa, Yll and Ion, Adrian}, location = {Graz, Austria}, pages = {11 -- 20}, publisher = {Springer}, title = {{Interactive labeling of image segmentation hierarchies}}, doi = {10.1007/978-3-642-32717-9_2}, volume = {7476}, year = {2012}, } @article{3120, abstract = {We introduce a strategy based on Kustin-Miller unprojection that allows us to construct many hundreds of Gorenstein codimension 4 ideals with 9 × 16 resolutions (that is, nine equations and sixteen first syzygies). Our two basic games are called Tom and Jerry; the main application is the biregular construction of most of the anticanonically polarised Mori Fano 3-folds of Altinok's thesis. There are 115 cases whose numerical data (in effect, the Hilbert series) allow a Type I projection. In every case, at least one Tom and one Jerry construction works, providing at least two deformation families of quasismooth Fano 3-folds having the same numerics but different topology. © 2012 Copyright Foundation Compositio Mathematica.}, author = {Brown, Gavin and Kerber, Michael and Reid, Miles}, journal = {Compositio Mathematica}, number = {4}, pages = {1171 -- 1194}, publisher = {Cambridge University Press}, title = {{Fano 3 folds in codimension 4 Tom and Jerry Part I}}, doi = {10.1112/S0010437X11007226}, volume = {148}, year = {2012}, } @inproceedings{3133, abstract = {This note contributes to the point calculus of persistent homology by extending Alexander duality from spaces to real-valued functions. Given a perfect Morse function f: S n+1 →[0, 1 and a decomposition S n+1 = U ∪ V into two (n + 1)-manifolds with common boundary M, we prove elementary relationships between the persistence diagrams of f restricted to U, to V, and to M. }, author = {Edelsbrunner, Herbert and Kerber, Michael}, booktitle = {Proceedings of the twenty-eighth annual symposium on Computational geometry }, location = {Chapel Hill, NC, USA}, pages = {249 -- 258}, publisher = {ACM}, title = {{Alexander duality for functions: The persistent behavior of land and water and shore}}, doi = {10.1145/2261250.2261287}, year = {2012}, } @inproceedings{3134, abstract = {It has been an open question whether the sum of finitely many isotropic Gaussian kernels in n ≥ 2 dimensions can have more modes than kernels, until in 2003 Carreira-Perpiñán and Williams exhibited n +1 isotropic Gaussian kernels in ℝ n with n + 2 modes. We give a detailed analysis of this example, showing that it has exponentially many critical points and that the resilience of the extra mode grows like √n. In addition, we exhibit finite configurations of isotropic Gaussian kernels with superlinearly many modes. }, author = {Edelsbrunner, Herbert and Fasy, Brittany and Rote, Günter}, booktitle = {Proceedings of the twenty-eighth annual symposium on Computational geometry }, location = {Chapel Hill, NC, USA}, pages = {91 -- 100}, publisher = {ACM}, title = {{Add isotropic Gaussian kernels at own risk: More and more resilient modes in higher dimensions}}, doi = {10.1145/2261250.2261265}, year = {2012}, } @article{3256, abstract = {We use a distortion to define the dual complex of a cubical subdivision of ℝ n as an n-dimensional subcomplex of the nerve of the set of n-cubes. Motivated by the topological analysis of high-dimensional digital image data, we consider such subdivisions defined by generalizations of quad- and oct-trees to n dimensions. Assuming the subdivision is balanced, we show that mapping each vertex to the center of the corresponding n-cube gives a geometric realization of the dual complex in ℝ n.}, author = {Edelsbrunner, Herbert and Kerber, Michael}, journal = {Discrete & Computational Geometry}, number = {2}, pages = {393 -- 414}, publisher = {Springer}, title = {{Dual complexes of cubical subdivisions of ℝn}}, doi = {10.1007/s00454-011-9382-4}, volume = {47}, year = {2012}, } @inproceedings{3265, abstract = {We propose a mid-level statistical model for image segmentation that composes multiple figure-ground hypotheses (FG) obtained by applying constraints at different locations and scales, into larger interpretations (tilings) of the entire image. Inference is cast as optimization over sets of maximal cliques sampled from a graph connecting all non-overlapping figure-ground segment hypotheses. Potential functions over cliques combine unary, Gestalt-based figure qualities, and pairwise compatibilities among spatially neighboring segments, constrained by T-junctions and the boundary interface statistics of real scenes. Learning the model parameters is based on maximum likelihood, alternating between sampling image tilings and optimizing their potential function parameters. State of the art results are reported on the Berkeley and Stanford segmentation datasets, as well as VOC2009, where a 28% improvement was achieved.}, author = {Ion, Adrian and Carreira, Joao and Sminchisescu, Cristian}, location = {Barcelona, Spain}, publisher = {IEEE}, title = {{Image segmentation by figure-ground composition into maximal cliques}}, doi = {10.1109/ICCV.2011.6126486}, year = {2012}, } @article{3115, abstract = {We consider the offset-deconstruction problem: Given a polygonal shape Q with n vertices, can it be expressed, up to a tolerance ε in Hausdorff distance, as the Minkowski sum of another polygonal shape P with a disk of fixed radius? If it does, we also seek a preferably simple-looking solution P; then, P's offset constitutes an accurate, vertex-reduced, and smoothened approximation of Q. We give an O(nlogn)-time exact decision algorithm that handles any polygonal shape, assuming the real-RAM model of computation. A variant of the algorithm, which we have implemented using the cgal library, is based on rational arithmetic and answers the same deconstruction problem up to an uncertainty parameter δ its running time additionally depends on δ. If the input shape is found to be approximable, this algorithm also computes an approximate solution for the problem. It also allows us to solve parameter-optimization problems induced by the offset-deconstruction problem. For convex shapes, the complexity of the exact decision algorithm drops to O(n), which is also the time required to compute a solution P with at most one more vertex than a vertex-minimal one.}, author = {Berberich, Eric and Halperin, Dan and Kerber, Michael and Pogalnikova, Roza}, journal = {Discrete & Computational Geometry}, number = {4}, pages = {964 -- 989}, publisher = {Springer}, title = {{Deconstructing approximate offsets}}, doi = {10.1007/s00454-012-9441-5}, volume = {48}, year = {2012}, } @article{3331, abstract = {Computing the topology of an algebraic plane curve C means computing a combinatorial graph that is isotopic to C and thus represents its topology in R2. We prove that, for a polynomial of degree n with integer coefficients bounded by 2ρ, the topology of the induced curve can be computed with bit operations ( indicates that we omit logarithmic factors). Our analysis improves the previous best known complexity bounds by a factor of n2. The improvement is based on new techniques to compute and refine isolating intervals for the real roots of polynomials, and on the consequent amortized analysis of the critical fibers of the algebraic curve.}, author = {Kerber, Michael and Sagraloff, Michael}, journal = { Journal of Symbolic Computation}, number = {3}, pages = {239 -- 258}, publisher = {Elsevier}, title = {{A worst case bound for topology computation of algebraic curves}}, doi = {10.1016/j.jsc.2011.11.001}, volume = {47}, year = {2012}, } @inproceedings{3129, abstract = {Let K be a simplicial complex and g the rank of its p-th homology group Hp(K) defined with ℤ2 coefficients. We show that we can compute a basis H of Hp(K) and annotate each p-simplex of K with a binary vector of length g with the following property: the annotations, summed over all p-simplices in any p-cycle z, provide the coordinate vector of the homology class [z] in the basis H. The basis and the annotations for all simplices can be computed in O(n ω ) time, where n is the size of K and ω < 2.376 is a quantity so that two n×n matrices can be multiplied in O(n ω ) time. The precomputed annotations permit answering queries about the independence or the triviality of p-cycles efficiently. Using annotations of edges in 2-complexes, we derive better algorithms for computing optimal basis and optimal homologous cycles in 1 - dimensional homology. Specifically, for computing an optimal basis of H1(K) , we improve the previously known time complexity from O(n 4) to O(n ω  + n 2 g ω − 1). Here n denotes the size of the 2-skeleton of K and g the rank of H1(K) . Computing an optimal cycle homologous to a given 1-cycle is NP-hard even for surfaces and an algorithm taking 2 O(g) nlogn time is known for surfaces. We extend this algorithm to work with arbitrary 2-complexes in O(n ω ) + 2 O(g) n 2logn time using annotations. }, author = {Busaryev, Oleksiy and Cabello, Sergio and Chen, Chao and Dey, Tamal and Wang, Yusu}, location = {Helsinki, Finland}, pages = {189 -- 200}, publisher = {Springer}, title = {{Annotating simplices with a homology basis and its applications}}, doi = {10.1007/978-3-642-31155-0_17}, volume = {7357}, year = {2012}, } @article{3159, abstract = {The structure of hierarchical networks in biological and physical systems has long been characterized using the Horton-Strahler ordering scheme. The scheme assigns an integer order to each edge in the network based on the topology of branching such that the order increases from distal parts of the network (e.g., mountain streams or capillaries) to the "root" of the network (e.g., the river outlet or the aorta). However, Horton-Strahler ordering cannot be applied to networks with loops because they they create a contradiction in the edge ordering in terms of which edge precedes another in the hierarchy. Here, we present a generalization of the Horton-Strahler order to weighted planar reticular networks, where weights are assumed to correlate with the importance of network edges, e.g., weights estimated from edge widths may correlate to flow capacity. Our method assigns hierarchical levels not only to edges of the network, but also to its loops, and classifies the edges into reticular edges, which are responsible for loop formation, and tree edges. In addition, we perform a detailed and rigorous theoretical analysis of the sensitivity of the hierarchical levels to weight perturbations. In doing so, we show that the ordering of the reticular edges is more robust to noise in weight estimation than is the ordering of the tree edges. We discuss applications of this generalized Horton-Strahler ordering to the study of leaf venation and other biological networks.}, author = {Mileyko, Yuriy and Edelsbrunner, Herbert and Price, Charles and Weitz, Joshua}, journal = {PLoS One}, number = {6}, publisher = {Public Library of Science}, title = {{Hierarchical ordering of reticular networks}}, doi = {10.1371/journal.pone.0036715}, volume = {7}, year = {2012}, } @article{3310, abstract = {The theory of persistent homology opens up the possibility to reason about topological features of a space or a function quantitatively and in combinatorial terms. We refer to this new angle at a classical subject within algebraic topology as a point calculus, which we present for the family of interlevel sets of a real-valued function. Our account of the subject is expository, devoid of proofs, and written for non-experts in algebraic topology.}, author = {Bendich, Paul and Cabello, Sergio and Edelsbrunner, Herbert}, journal = {Pattern Recognition Letters}, number = {11}, pages = {1436 -- 1444}, publisher = {Elsevier}, title = {{A point calculus for interlevel set homology}}, doi = {10.1016/j.patrec.2011.10.007}, volume = {33}, year = {2012}, } @article{6588, abstract = {First we note that the best polynomial approximation to vertical bar x vertical bar on the set, which consists of an interval on the positive half-axis and a point on the negative half-axis, can be given by means of the classical Chebyshev polynomials. Then we explore the cases when a solution of the related problem on two intervals can be given in elementary functions.}, author = {Pausinger, Florian}, issn = {1812-9471}, journal = {Journal of Mathematical Physics, Analysis, Geometry}, number = {1}, pages = {63--78}, publisher = {B. Verkin Institute for Low Temperature Physics and Engineering}, title = {{Elementary solutions of the Bernstein problem on two intervals}}, volume = {8}, year = {2012}, } @article{2912, author = {Edelsbrunner, Herbert and Strelkova, Nataliya}, journal = {Russian Mathematical Surveys}, number = {6}, pages = {1167–1168}, publisher = {Russian Academy of Sciences}, title = {{On the configuration space for the shortest networks}}, doi = {10.4213/rm9503}, volume = {67}, year = {2012}, } @inproceedings{3127, abstract = {When searching for characteristic subpatterns in potentially noisy graph data, it appears self-evident that having multiple observations would be better than having just one. However, it turns out that the inconsistencies introduced when different graph instances have different edge sets pose a serious challenge. In this work we address this challenge for the problem of finding maximum weighted cliques. We introduce the concept of most persistent soft-clique. This is subset of vertices, that 1) is almost fully or at least densely connected, 2) occurs in all or almost all graph instances, and 3) has the maximum weight. We present a measure of clique-ness, that essentially counts the number of edge missing to make a subset of vertices into a clique. With this measure, we show that the problem of finding the most persistent soft-clique problem can be cast either as: a) a max-min two person game optimization problem, or b) a min-min soft margin optimization problem. Both formulations lead to the same solution when using a partial Lagrangian method to solve the optimization problems. By experiments on synthetic data and on real social network data, we show that the proposed method is able to reliably find soft cliques in graph data, even if that is distorted by random noise or unreliable observations.}, author = {Quadrianto, Novi and Lampert, Christoph and Chen, Chao}, booktitle = {Proceedings of the 29th International Conference on Machine Learning}, location = {Edinburgh, United Kingdom}, pages = {211--218}, publisher = {ML Research Press}, title = {{The most persistent soft-clique in a set of sampled graphs}}, year = {2012}, } @article{2904, abstract = {Generalized van der Corput sequences are onedimensional, infinite sequences in the unit interval. They are generated from permutations in integer base b and are the building blocks of the multi-dimensional Halton sequences. Motivated by recent progress of Atanassov on the uniform distribution behavior of Halton sequences, we study, among others, permutations of the form P(i) = ai (mod b) for coprime integers a and b. We show that multipliers a that either divide b - 1 or b + 1 generate van der Corput sequences with weak distribution properties. We give explicit lower bounds for the asymptotic distribution behavior of these sequences and relate them to sequences generated from the identity permutation in smaller bases, which are, due to Faure, the weakest distributed generalized van der Corput sequences.}, author = {Pausinger, Florian}, issn = {2118-8572}, journal = {Journal de Theorie des Nombres des Bordeaux}, number = {3}, pages = {729 -- 749}, publisher = {Université de Bordeaux}, title = {{Weak multipliers for generalized van der Corput sequences}}, doi = {10.5802/jtnb.819}, volume = {24}, year = {2012}, } @article{2902, abstract = {We present an algorithm for simplifying linear cartographic objects and results obtained with a computer program implementing this algorithm. }, author = {Edelsbrunner, Herbert and Musin, Oleg and Ukhalov, Alexey and Yakimova, Olga and Alexeev, Vladislav and Bogaevskaya, Victoriya and Gorohov, Andrey and Preobrazhenskaya, Margarita}, journal = {Modeling and Analysis of Information Systems}, number = {6}, pages = {152 -- 160}, publisher = {Russian Academy of Sciences}, title = {{Fractal and computational geometry for generalizing cartographic objects}}, volume = {19}, year = {2012}, } @inproceedings{3266, abstract = {We present a joint image segmentation and labeling model (JSL) which, given a bag of figure-ground segment hypotheses extracted at multiple image locations and scales, constructs a joint probability distribution over both the compatible image interpretations (tilings or image segmentations) composed from those segments, and over their labeling into categories. The process of drawing samples from the joint distribution can be interpreted as first sampling tilings, modeled as maximal cliques, from a graph connecting spatially non-overlapping segments in the bag [1], followed by sampling labels for those segments, conditioned on the choice of a particular tiling. We learn the segmentation and labeling parameters jointly, based on Maximum Likelihood with a novel Incremental Saddle Point estimation procedure. The partition function over tilings and labelings is increasingly more accurately approximated by including incorrect configurations that a not-yet-competent model rates probable during learning. We show that the proposed methodologymatches the current state of the art in the Stanford dataset [2], as well as in VOC2010, where 41.7% accuracy on the test set is achieved.}, author = {Ion, Adrian and Carreira, Joao and Sminchisescu, Cristian}, booktitle = {NIPS Proceedings}, location = {Granada, Spain}, pages = {1827 -- 1835}, publisher = {Neural Information Processing Systems Foundation}, title = {{Probabilistic joint image segmentation and labeling}}, volume = {24}, year = {2011}, }