@article{1408,
abstract = {The concept of well group in a special but important case captures homological properties of the zero set of a continuous map (Formula presented.) on a compact space K that are invariant with respect to perturbations of f. The perturbations are arbitrary continuous maps within (Formula presented.) distance r from f for a given (Formula presented.). The main drawback of the approach is that the computability of well groups was shown only when (Formula presented.) or (Formula presented.). Our contribution to the theory of well groups is twofold: on the one hand we improve on the computability issue, but on the other hand we present a range of examples where the well groups are incomplete invariants, that is, fail to capture certain important robust properties of the zero set. For the first part, we identify a computable subgroup of the well group that is obtained by cap product with the pullback of the orientation of (Formula presented.) by f. In other words, well groups can be algorithmically approximated from below. When f is smooth and (Formula presented.), our approximation of the (Formula presented.)th well group is exact. For the second part, we find examples of maps (Formula presented.) with all well groups isomorphic but whose perturbations have different zero sets. We discuss on a possible replacement of the well groups of vector valued maps by an invariant of a better descriptive power and computability status.},
author = {Franek, Peter and Krcál, Marek},
journal = {Discrete & Computational Geometry},
number = {1},
pages = {126 -- 164},
publisher = {Springer},
title = {{On computability and triviality of well groups}},
doi = {10.1007/s00454-016-9794-2},
volume = {56},
year = {2016},
}
@article{1289,
abstract = {Aiming at the automatic diagnosis of tumors using narrow band imaging (NBI) magnifying endoscopic (ME) images of the stomach, we combine methods from image processing, topology, geometry, and machine learning to classify patterns into three classes: oval, tubular and irregular. Training the algorithm on a small number of images of each type, we achieve a high rate of correct classifications. The analysis of the learning algorithm reveals that a handful of geometric and topological features are responsible for the overwhelming majority of decisions.},
author = {Dunaeva, Olga and Edelsbrunner, Herbert and Lukyanov, Anton and Machin, Michael and Malkova, Daria and Kuvaev, Roman and Kashin, Sergey},
journal = {Pattern Recognition Letters},
number = {1},
pages = {13 -- 22},
publisher = {Elsevier},
title = {{The classification of endoscopy images with persistent homology}},
doi = {10.1016/j.patrec.2015.12.012},
volume = {83},
year = {2016},
}
@article{1617,
abstract = {We study the discrepancy of jittered sampling sets: such a set P⊂ [0,1]d is generated for fixed m∈ℕ by partitioning [0,1]d into md axis aligned cubes of equal measure and placing a random point inside each of the N=md cubes. We prove that, for N sufficiently large, 1/10 d/N1/2+1/2d ≤EDN∗(P)≤ √d(log N) 1/2/N1/2+1/2d, where the upper bound with an unspecified constant Cd was proven earlier by Beck. Our proof makes crucial use of the sharp Dvoretzky-Kiefer-Wolfowitz inequality and a suitably taylored Bernstein inequality; we have reasons to believe that the upper bound has the sharp scaling in N. Additional heuristics suggest that jittered sampling should be able to improve known bounds on the inverse of the star-discrepancy in the regime N≳dd. We also prove a partition principle showing that every partition of [0,1]d combined with a jittered sampling construction gives rise to a set whose expected squared L2-discrepancy is smaller than that of purely random points.},
author = {Pausinger, Florian and Steinerberger, Stefan},
journal = {Journal of Complexity},
pages = {199 -- 216},
publisher = {Academic Press},
title = {{On the discrepancy of jittered sampling}},
doi = {10.1016/j.jco.2015.11.003},
volume = {33},
year = {2016},
}
@article{1662,
abstract = {We introduce a modification of the classic notion of intrinsic volume using persistence moments of height functions. Evaluating the modified first intrinsic volume on digital approximations of a compact body with smoothly embedded boundary in Rn, we prove convergence to the first intrinsic volume of the body as the resolution of the approximation improves. We have weaker results for the other modified intrinsic volumes, proving they converge to the corresponding intrinsic volumes of the n-dimensional unit ball.},
author = {Edelsbrunner, Herbert and Pausinger, Florian},
journal = {Advances in Mathematics},
pages = {674 -- 703},
publisher = {Academic Press},
title = {{Approximation and convergence of the intrinsic volume}},
doi = {10.1016/j.aim.2015.10.004},
volume = {287},
year = {2016},
}
@inproceedings{1424,
abstract = {We consider the problem of statistical computations with persistence diagrams, a summary representation of topological features in data. These diagrams encode persistent homology, a widely used invariant in topological data analysis. While several avenues towards a statistical treatment of the diagrams have been explored recently, we follow an alternative route that is motivated by the success of methods based on the embedding of probability measures into reproducing kernel Hilbert spaces. In fact, a positive definite kernel on persistence diagrams has recently been proposed, connecting persistent homology to popular kernel-based learning techniques such as support vector machines. However, important properties of that kernel enabling a principled use in the context of probability measure embeddings remain to be explored. Our contribution is to close this gap by proving universality of a variant of the original kernel, and to demonstrate its effective use in twosample hypothesis testing on synthetic as well as real-world data.},
author = {Kwitt, Roland and Huber, Stefan and Niethammer, Marc and Lin, Weili and Bauer, Ulrich},
location = {Montreal, Canada},
pages = {3070 -- 3078},
publisher = {Neural Information Processing Systems},
title = {{Statistical topological data analysis-A kernel perspective}},
volume = {28},
year = {2015},
}