@article{7553,
abstract = {Normative theories and statistical inference provide complementary approaches for the study of biological systems. A normative theory postulates that organisms have adapted to efficiently solve essential tasks, and proceeds to mathematically work out testable consequences of such optimality; parameters that maximize the hypothesized organismal function can be derived ab initio, without reference to experimental data. In contrast, statistical inference focuses on efficient utilization of data to learn model parameters, without reference to any a priori notion of biological function, utility, or fitness. Traditionally, these two approaches were developed independently and applied separately. Here we unify them in a coherent Bayesian framework that embeds a normative theory into a family of maximum-entropy “optimization priors.” This family defines a smooth interpolation between a data-rich inference regime (characteristic of “bottom-up” statistical models), and a data-limited ab inito prediction regime (characteristic of “top-down” normative theory). We demonstrate the applicability of our framework using data from the visual cortex, and argue that the flexibility it affords is essential to address a number of fundamental challenges relating to inference and prediction in complex, high-dimensional biological problems.},
author = {Mlynarski, Wiktor F and Hledik, Michal and Sokolowski, Thomas R and Tkačik, Gašper},
journal = {Neuron},
number = {7},
pages = {1227--1241.e5},
publisher = {Cell Press},
title = {{Statistical analysis and optimality of neural systems}},
doi = {10.1016/j.neuron.2021.01.020},
volume = {109},
year = {2021},
}
@article{9816,
abstract = {Aims: Mass antigen testing programs have been challenged because of an alleged insufficient specificity, leading to a large number of false positives. The objective of this study is to derive a lower bound of the specificity of the SD Biosensor Standard Q Ag-Test in large scale practical use.
Methods: Based on county data from the nationwide tests for SARS-CoV-2 in Slovakia between 31.10.–1.11. 2020 we calculate a lower confidence bound for the specificity. As positive test results were not systematically verified by PCR tests, we base the lower bound on a worst case assumption, assuming all positives to be false positives.
Results: 3,625,332 persons from 79 counties were tested. The lowest positivity rate was observed in the county of Rožňava where 100 out of 34307 (0.29%) tests were positive. This implies a test specificity of at least 99.6% (97.5% one-sided lower confidence bound, adjusted for multiplicity).
Conclusion: The obtained lower bound suggests a higher specificity compared to earlier studies in spite of the underlying worst case assumption and the application in a mass testing setting. The actual specificity is expected to exceed 99.6% if the prevalence in the respective regions was non-negligible at the time of testing. To our knowledge, this estimate constitutes the first bound obtained from large scale practical use of an antigen test.},
author = {Hledik, Michal and Polechova, Jitka and Beiglböck, Mathias and Herdina, Anna Nele and Strassl, Robert and Posch, Martin},
issn = {19326203},
journal = {PLoS ONE},
number = {7},
publisher = {Public Library of Science},
title = {{Analysis of the specificity of a COVID-19 antigen test in the Slovak mass testing program}},
doi = {10.1371/journal.pone.0255267},
volume = {16},
year = {2021},
}
@inproceedings{7606,
abstract = {We derive a tight lower bound on equivocation (conditional entropy), or equivalently a tight upper bound on mutual information between a signal variable and channel outputs. The bound is in terms of the joint distribution of the signals and maximum a posteriori decodes (most probable signals given channel output). As part of our derivation, we describe the key properties of the distribution of signals, channel outputs and decodes, that minimizes equivocation and maximizes mutual information. This work addresses a problem in data analysis, where mutual information between signals and decodes is sometimes used to lower bound the mutual information between signals and channel outputs. Our result provides a corresponding upper bound.},
author = {Hledik, Michal and Sokolowski, Thomas R and Tkačik, Gašper},
booktitle = {IEEE Information Theory Workshop, ITW 2019},
isbn = {9781538669006},
location = {Visby, Sweden},
publisher = {IEEE},
title = {{A tight upper bound on mutual information}},
doi = {10.1109/ITW44776.2019.8989292},
year = {2019},
}