@misc{9712, author = {Tugrul, Murat and Paixao, Tiago and Barton, Nicholas H and Tkačik, Gašper}, publisher = {Public Library of Science}, title = {{Other fitness models for comparison & for interacting TFBSs}}, doi = {10.1371/journal.pgen.1005639.s001}, year = {2015}, } @article{1666, abstract = {Evolution of gene regulation is crucial for our understanding of the phenotypic differences between species, populations and individuals. Sequence-specific binding of transcription factors to the regulatory regions on the DNA is a key regulatory mechanism that determines gene expression and hence heritable phenotypic variation. We use a biophysical model for directional selection on gene expression to estimate the rates of gain and loss of transcription factor binding sites (TFBS) in finite populations under both point and insertion/deletion mutations. Our results show that these rates are typically slow for a single TFBS in an isolated DNA region, unless the selection is extremely strong. These rates decrease drastically with increasing TFBS length or increasingly specific protein-DNA interactions, making the evolution of sites longer than ∼ 10 bp unlikely on typical eukaryotic speciation timescales. Similarly, evolution converges to the stationary distribution of binding sequences very slowly, making the equilibrium assumption questionable. The availability of longer regulatory sequences in which multiple binding sites can evolve simultaneously, the presence of “pre-sites” or partially decayed old sites in the initial sequence, and biophysical cooperativity between transcription factors, can all facilitate gain of TFBS and reconcile theoretical calculations with timescales inferred from comparative genomics.}, author = {Tugrul, Murat and Paixao, Tiago and Barton, Nicholas H and Tkacik, Gasper}, journal = {PLoS Genetics}, number = {11}, publisher = {Public Library of Science}, title = {{Dynamics of transcription factor binding site evolution}}, doi = {10.1371/journal.pgen.1005639}, volume = {11}, year = {2015}, } @article{1576, abstract = {Gene expression is controlled primarily by interactions between transcription factor proteins (TFs) and the regulatory DNA sequence, a process that can be captured well by thermodynamic models of regulation. These models, however, neglect regulatory crosstalk: the possibility that noncognate TFs could initiate transcription, with potentially disastrous effects for the cell. Here, we estimate the importance of crosstalk, suggest that its avoidance strongly constrains equilibrium models of TF binding, and propose an alternative nonequilibrium scheme that implements kinetic proofreading to suppress erroneous initiation. This proposal is consistent with the observed covalent modifications of the transcriptional apparatus and predicts increased noise in gene expression as a trade-off for improved specificity. Using information theory, we quantify this trade-off to find when optimal proofreading architectures are favored over their equilibrium counterparts. Such architectures exhibit significant super-Poisson noise at low expression in steady state.}, author = {Cepeda Humerez, Sarah A and Rieckh, Georg and Tkacik, Gasper}, journal = {Physical Review Letters}, number = {24}, publisher = {American Physical Society}, title = {{Stochastic proofreading mechanism alleviates crosstalk in transcriptional regulation}}, doi = {10.1103/PhysRevLett.115.248101}, volume = {115}, year = {2015}, } @article{1655, abstract = {Quantifying behaviors of robots which were generated autonomously from task-independent objective functions is an important prerequisite for objective comparisons of algorithms and movements of animals. The temporal sequence of such a behavior can be considered as a time series and hence complexity measures developed for time series are natural candidates for its quantification. The predictive information and the excess entropy are such complexity measures. They measure the amount of information the past contains about the future and thus quantify the nonrandom structure in the temporal sequence. However, when using these measures for systems with continuous states one has to deal with the fact that their values will depend on the resolution with which the systems states are observed. For deterministic systems both measures will diverge with increasing resolution. We therefore propose a new decomposition of the excess entropy in resolution dependent and resolution independent parts and discuss how they depend on the dimensionality of the dynamics, correlations and the noise level. For the practical estimation we propose to use estimates based on the correlation integral instead of the direct estimation of the mutual information based on next neighbor statistics because the latter allows less control of the scale dependencies. Using our algorithm we are able to show how autonomous learning generates behavior of increasing complexity with increasing learning duration.}, author = {Martius, Georg S and Olbrich, Eckehard}, journal = {Entropy}, number = {10}, pages = {7266 -- 7297}, publisher = {MDPI}, title = {{Quantifying emergent behavior of autonomous robots}}, doi = {10.3390/e17107266}, volume = {17}, year = {2015}, } @inproceedings{1708, abstract = {It has been long argued that, because of inherent ambiguity and noise, the brain needs to represent uncertainty in the form of probability distributions. The neural encoding of such distributions remains however highly controversial. Here we present a novel circuit model for representing multidimensional real-valued distributions using a spike based spatio-temporal code. Our model combines the computational advantages of the currently competing models for probabilistic codes and exhibits realistic neural responses along a variety of classic measures. Furthermore, the model highlights the challenges associated with interpreting neural activity in relation to behavioral uncertainty and points to alternative population-level approaches for the experimental validation of distributed representations.}, author = {Savin, Cristina and Denève, Sophie}, location = {Montreal, Canada}, number = {January}, pages = {2024 -- 2032}, publisher = {Neural Information Processing Systems}, title = {{Spatio-temporal representations of uncertainty in spiking neural networks}}, volume = {3}, year = {2014}, } @article{1886, abstract = {Information processing in the sensory periphery is shaped by natural stimulus statistics. In the periphery, a transmission bottleneck constrains performance; thus efficient coding implies that natural signal components with a predictably wider range should be compressed. In a different regime—when sampling limitations constrain performance—efficient coding implies that more resources should be allocated to informative features that are more variable. We propose that this regime is relevant for sensory cortex when it extracts complex features from limited numbers of sensory samples. To test this prediction, we use central visual processing as a model: we show that visual sensitivity for local multi-point spatial correlations, described by dozens of independently-measured parameters, can be quantitatively predicted from the structure of natural images. This suggests that efficient coding applies centrally, where it extends to higher-order sensory features and operates in a regime in which sensitivity increases with feature variability.}, author = {Hermundstad, Ann and Briguglio, John and Conte, Mary and Victor, Jonathan and Balasubramanian, Vijay and Tkacik, Gasper}, journal = {eLife}, number = {November}, publisher = {eLife Sciences Publications}, title = {{Variance predicts salience in central sensory processing}}, doi = {10.7554/eLife.03722}, year = {2014}, } @article{1896, abstract = {Biopolymer length regulation is a complex process that involves a large number of biological, chemical, and physical subprocesses acting simultaneously across multiple spatial and temporal scales. An illustrative example important for genomic stability is the length regulation of telomeres - nucleoprotein structures at the ends of linear chromosomes consisting of tandemly repeated DNA sequences and a specialized set of proteins. Maintenance of telomeres is often facilitated by the enzyme telomerase but, particularly in telomerase-free systems, the maintenance of chromosomal termini depends on alternative lengthening of telomeres (ALT) mechanisms mediated by recombination. Various linear and circular DNA structures were identified to participate in ALT, however, dynamics of the whole process is still poorly understood. We propose a chemical kinetics model of ALT with kinetic rates systematically derived from the biophysics of DNA diffusion and looping. The reaction system is reduced to a coagulation-fragmentation system by quasi-steady-state approximation. The detailed treatment of kinetic rates yields explicit formulas for expected size distributions of telomeres that demonstrate the key role played by the J factor, a quantitative measure of bending of polymers. The results are in agreement with experimental data and point out interesting phenomena: an appearance of very long telomeric circles if the total telomere density exceeds a critical value (excess mass) and a nonlinear response of the telomere size distributions to the amount of telomeric DNA in the system. The results can be of general importance for understanding dynamics of telomeres in telomerase-independent systems as this mode of telomere maintenance is similar to the situation in tumor cells lacking telomerase activity. Furthermore, due to its universality, the model may also serve as a prototype of an interaction between linear and circular DNA structures in various settings.}, author = {Kollár, Richard and Bod'ová, Katarína and Nosek, Jozef and Tomáška, Ľubomír}, journal = {Physical Review E Statistical Nonlinear and Soft Matter Physics}, number = {3}, publisher = {American Institute of Physics}, title = {{Mathematical model of alternative mechanism of telomere length maintenance}}, doi = {10.1103/PhysRevE.89.032701}, volume = {89}, year = {2014}, } @article{1909, abstract = {Summary: Phenotypes are often environmentally dependent, which requires organisms to track environmental change. The challenge for organisms is to construct phenotypes using the most accurate environmental cue. Here, we use a quantitative genetic model of adaptation by additive genetic variance, within- and transgenerational plasticity via linear reaction norms and indirect genetic effects respectively. We show how the relative influence on the eventual phenotype of these components depends on the predictability of environmental change (fast or slow, sinusoidal or stochastic) and the developmental lag τ between when the environment is perceived and when selection acts. We then decompose expected mean fitness into three components (variance load, adaptation and fluctuation load) to study the fitness costs of within- and transgenerational plasticity. A strongly negative maternal effect coefficient m minimizes the variance load, but a strongly positive m minimises the fluctuation load. The adaptation term is maximized closer to zero, with positive or negative m preferred under different environmental scenarios. Phenotypic plasticity is higher when τ is shorter and when the environment changes frequently between seasonal extremes. Expected mean population fitness is highest away from highest observed levels of phenotypic plasticity. Within- and transgenerational plasticity act in concert to deliver well-adapted phenotypes, which emphasizes the need to study both simultaneously when investigating phenotypic evolution.}, author = {Ezard, Thomas and Prizak, Roshan and Hoyle, Rebecca}, journal = {Functional Ecology}, number = {3}, pages = {693 -- 701}, publisher = {Wiley-Blackwell}, title = {{The fitness costs of adaptation via phenotypic plasticity and maternal effects}}, doi = {10.1111/1365-2435.12207}, volume = {28}, year = {2014}, } @article{1928, abstract = {In infectious disease epidemiology the basic reproductive ratio, R0, is defined as the average number of new infections caused by a single infected individual in a fully susceptible population. Many models describing competition for hosts between non-interacting pathogen strains in an infinite population lead to the conclusion that selection favors invasion of new strains if and only if they have higher R0 values than the resident. Here we demonstrate that this picture fails in finite populations. Using a simple stochastic SIS model, we show that in general there is no analogous optimization principle. We find that successive invasions may in some cases lead to strains that infect a smaller fraction of the host population, and that mutually invasible pathogen strains exist. In the limit of weak selection we demonstrate that an optimization principle does exist, although it differs from R0 maximization. For strains with very large R0, we derive an expression for this local fitness function and use it to establish a lower bound for the error caused by neglecting stochastic effects. Furthermore, we apply this weak selection limit to investigate the selection dynamics in the presence of a trade-off between the virulence and the transmission rate of a pathogen.}, author = {Humplik, Jan and Hill, Alison and Nowak, Martin}, journal = {Journal of Theoretical Biology}, pages = {149 -- 162}, publisher = {Elsevier}, title = {{Evolutionary dynamics of infectious diseases in finite populations}}, doi = {10.1016/j.jtbi.2014.06.039}, volume = {360}, year = {2014}, } @article{1931, abstract = {A wealth of experimental evidence suggests that working memory circuits preferentially represent information that is behaviorally relevant. Still, we are missing a mechanistic account of how these representations come about. Here we provide a simple explanation for a range of experimental findings, in light of prefrontal circuits adapting to task constraints by reward-dependent learning. In particular, we model a neural network shaped by reward-modulated spike-timing dependent plasticity (r-STDP) and homeostatic plasticity (intrinsic excitability and synaptic scaling). We show that the experimentally-observed neural representations naturally emerge in an initially unstructured circuit as it learns to solve several working memory tasks. These results point to a critical, and previously unappreciated, role for reward-dependent learning in shaping prefrontal cortex activity.}, author = {Savin, Cristina and Triesch, Jochen}, journal = {Frontiers in Computational Neuroscience}, number = {MAY}, publisher = {Frontiers Research Foundation}, title = {{Emergence of task-dependent representations in working memory circuits}}, doi = {10.3389/fncom.2014.00057}, volume = {8}, year = {2014}, } @article{2028, abstract = {Understanding the dynamics of noisy neurons remains an important challenge in neuroscience. Here, we describe a simple probabilistic model that accurately describes the firing behavior in a large class (type II) of neurons. To demonstrate the usefulness of this model, we show how it accurately predicts the interspike interval (ISI) distributions, bursting patterns and mean firing rates found by: (1) simulations of the classic Hodgkin-Huxley model with channel noise, (2) experimental data from squid giant axon with a noisy input current and (3) experimental data on noisy firing from a neuron within the suprachiasmatic nucleus (SCN). This simple model has 6 parameters, however, in some cases, two of these parameters are coupled and only 5 parameters account for much of the known behavior. From these parameters, many properties of spiking can be found through simple calculation. Thus, we show how the complex effects of noise can be understood through a simple and general probabilistic model.}, author = {Bodova, Katarina and Paydarfar, David and Forger, Daniel}, journal = { Journal of Theoretical Biology}, pages = {40 -- 54}, publisher = {Academic Press}, title = {{Characterizing spiking in noisy type II neurons}}, doi = {10.1016/j.jtbi.2014.09.041}, volume = {365}, year = {2014}, } @article{2183, abstract = {We describe a simple adaptive network of coupled chaotic maps. The network reaches a stationary state (frozen topology) for all values of the coupling parameter, although the dynamics of the maps at the nodes of the network can be nontrivial. The structure of the network shows interesting hierarchical properties and in certain parameter regions the dynamics is polysynchronous: Nodes can be divided in differently synchronized classes but, contrary to cluster synchronization, nodes in the same class need not be connected to each other. These complicated synchrony patterns have been conjectured to play roles in systems biology and circuits. The adaptive system we study describes ways whereby this behavior can evolve from undifferentiated nodes.}, author = {Botella Soler, Vicente and Glendinning, Paul}, journal = {Physical Review E Statistical Nonlinear and Soft Matter Physics}, number = {6}, publisher = {American Institute of Physics}, title = {{Hierarchy and polysynchrony in an adaptive network }}, doi = {10.1103/PhysRevE.89.062809}, volume = {89}, year = {2014}, } @article{2231, abstract = {Based on the measurements of noise in gene expression performed during the past decade, it has become customary to think of gene regulation in terms of a two-state model, where the promoter of a gene can stochastically switch between an ON and an OFF state. As experiments are becoming increasingly precise and the deviations from the two-state model start to be observable, we ask about the experimental signatures of complex multistate promoters, as well as the functional consequences of this additional complexity. In detail, we i), extend the calculations for noise in gene expression to promoters described by state transition diagrams with multiple states, ii), systematically compute the experimentally accessible noise characteristics for these complex promoters, and iii), use information theory to evaluate the channel capacities of complex promoter architectures and compare them with the baseline provided by the two-state model. We find that adding internal states to the promoter generically decreases channel capacity, except in certain cases, three of which (cooperativity, dual-role regulation, promoter cycling) we analyze in detail.}, author = {Rieckh, Georg and Tkacik, Gasper}, issn = {00063495}, journal = {Biophysical Journal}, number = {5}, pages = {1194 -- 1204}, publisher = {Biophysical Society}, title = {{Noise and information transmission in promoters with multiple internal states}}, doi = {10.1016/j.bpj.2014.01.014}, volume = {106}, year = {2014}, } @article{3263, abstract = {Adaptation in the retina is thought to optimize the encoding of natural light signals into sequences of spikes sent to the brain. While adaptive changes in retinal processing to the variations of the mean luminance level and second-order stimulus statistics have been documented before, no such measurements have been performed when higher-order moments of the light distribution change. We therefore measured the ganglion cell responses in the tiger salamander retina to controlled changes in the second (contrast), third (skew) and fourth (kurtosis) moments of the light intensity distribution of spatially uniform temporally independent stimuli. The skew and kurtosis of the stimuli were chosen to cover the range observed in natural scenes. We quantified adaptation in ganglion cells by studying linear-nonlinear models that capture well the retinal encoding properties across all stimuli. We found that the encoding properties of retinal ganglion cells change only marginally when higher-order statistics change, compared to the changes observed in response to the variation in contrast. By analyzing optimal coding in LN-type models, we showed that neurons can maintain a high information rate without large dynamic adaptation to changes in skew or kurtosis. This is because, for uncorrelated stimuli, spatio-temporal summation within the receptive field averages away non-gaussian aspects of the light intensity distribution.}, author = {Tkacik, Gasper and Ghosh, Anandamohan and Schneidman, Elad and Segev, Ronen}, journal = {PLoS One}, number = {1}, publisher = {Public Library of Science}, title = {{Adaptation to changes in higher-order stimulus statistics in the salamander retina}}, doi = {10.1371/journal.pone.0085841}, volume = {9}, year = {2014}, } @article{537, abstract = {Transgenerational effects are broader than only parental relationships. Despite mounting evidence that multigenerational effects alter phenotypic and life-history traits, our understanding of how they combine to determine fitness is not well developed because of the added complexity necessary to study them. Here, we derive a quantitative genetic model of adaptation to an extraordinary new environment by an additive genetic component, phenotypic plasticity, maternal and grandmaternal effects. We show how, at equilibrium, negative maternal and negative grandmaternal effects maximize expected population mean fitness. We define negative transgenerational effects as those that have a negative effect on trait expression in the subsequent generation, that is, they slow, or potentially reverse, the expected evolutionary dynamic. When maternal effects are positive, negative grandmaternal effects are preferred. As expected under Mendelian inheritance, the grandmaternal effects have a lower impact on fitness than the maternal effects, but this dual inheritance model predicts a more complex relationship between maternal and grandmaternal effects to constrain phenotypic variance and so maximize expected population mean fitness in the offspring.}, author = {Prizak, Roshan and Ezard, Thomas and Hoyle, Rebecca}, journal = {Ecology and Evolution}, number = {15}, pages = {3139 -- 3145}, publisher = {Wiley-Blackwell}, title = {{Fitness consequences of maternal and grandmaternal effects}}, doi = {10.1002/ece3.1150}, volume = {4}, year = {2014}, } @misc{9752, abstract = {Redundancies and correlations in the responses of sensory neurons may seem to waste neural resources, but they can also carry cues about structured stimuli and may help the brain to correct for response errors. To investigate the effect of stimulus structure on redundancy in retina, we measured simultaneous responses from populations of retinal ganglion cells presented with natural and artificial stimuli that varied greatly in correlation structure; these stimuli and recordings are publicly available online. Responding to spatio-temporally structured stimuli such as natural movies, pairs of ganglion cells were modestly more correlated than in response to white noise checkerboards, but they were much less correlated than predicted by a non-adapting functional model of retinal response. Meanwhile, responding to stimuli with purely spatial correlations, pairs of ganglion cells showed increased correlations consistent with a static, non-adapting receptive field and nonlinearity. We found that in response to spatio-temporally correlated stimuli, ganglion cells had faster temporal kernels and tended to have stronger surrounds. These properties of individual cells, along with gain changes that opposed changes in effective contrast at the ganglion cell input, largely explained the pattern of pairwise correlations across stimuli where receptive field measurements were possible.}, author = {Simmons, Kristina and Prentice, Jason and Tkačik, Gašper and Homann, Jan and Yee, Heather and Palmer, Stephanie and Nelson, Philip and Balasubramanian, Vijay}, publisher = {Dryad}, title = {{Data from: Transformation of stimulus correlations by the retina}}, doi = {10.5061/dryad.246qg}, year = {2014}, } @article{2257, abstract = {Maximum entropy models are the least structured probability distributions that exactly reproduce a chosen set of statistics measured in an interacting network. Here we use this principle to construct probabilistic models which describe the correlated spiking activity of populations of up to 120 neurons in the salamander retina as it responds to natural movies. Already in groups as small as 10 neurons, interactions between spikes can no longer be regarded as small perturbations in an otherwise independent system; for 40 or more neurons pairwise interactions need to be supplemented by a global interaction that controls the distribution of synchrony in the population. Here we show that such “K-pairwise” models—being systematic extensions of the previously used pairwise Ising models—provide an excellent account of the data. We explore the properties of the neural vocabulary by: 1) estimating its entropy, which constrains the population's capacity to represent visual information; 2) classifying activity patterns into a small set of metastable collective modes; 3) showing that the neural codeword ensembles are extremely inhomogenous; 4) demonstrating that the state of individual neurons is highly predictable from the rest of the population, allowing the capacity for error correction.}, author = {Tkacik, Gasper and Marre, Olivier and Amodei, Dario and Schneidman, Elad and Bialek, William and Berry, Michael}, issn = {1553734X}, journal = {PLoS Computational Biology}, number = {1}, publisher = {Public Library of Science}, title = {{Searching for collective behavior in a large network of sensory neurons}}, doi = {10.1371/journal.pcbi.1003408}, volume = {10}, year = {2014}, } @inbook{2413, abstract = {Progress in understanding the global brain dynamics has remained slow to date in large part because of the highly multiscale nature of brain activity. Indeed, normal brain dynamics is characterized by complex interactions between multiple levels: from the microscopic scale of single neurons to the mesoscopic level of local groups of neurons, and finally to the macroscopic level of the whole brain. Among the most difficult tasks are those of identifying which scales are significant for a given particular function and describing how the scales affect each other. It is important to realize that the scales of time and space are linked together, or even intertwined, and that causal inference is far more ambiguous between than within levels. We approach this problem from the perspective of our recent work on simultaneous recording from micro- and macroelectrodes in the human brain. We propose a physiological description of these multilevel interactions, based on phase–amplitude coupling of neuronal oscillations that operate at multiple frequencies and on different spatial scales. Specifically, the amplitude of the oscillations on a particular spatial scale is modulated by phasic variations in neuronal excitability induced by lower frequency oscillations that emerge on a larger spatial scale. Following this general principle, it is possible to scale up or scale down the multiscale brain dynamics. It is expected that large-scale network oscillations in the low-frequency range, mediating downward effects, may play an important role in attention and consciousness.}, author = {Valderrama, Mario and Botella Soler, Vicente and Le Van Quyen, Michel}, booktitle = {Multiscale Analysis and Nonlinear Dynamics: From Genes to the Brain}, editor = {Meyer, Misha and Pesenson, Z.}, isbn = {9783527411986 }, publisher = {Wiley-VCH}, title = {{Neuronal oscillations scale up and scale down the brain dynamics }}, doi = {10.1002/9783527671632.ch08}, year = {2013}, } @article{2818, abstract = {Models of neural responses to stimuli with complex spatiotemporal correlation structure often assume that neurons are selective for only a small number of linear projections of a potentially high-dimensional input. In this review, we explore recent modeling approaches where the neural response depends on the quadratic form of the input rather than on its linear projection, that is, the neuron is sensitive to the local covariance structure of the signal preceding the spike. To infer this quadratic dependence in the presence of arbitrary (e.g., naturalistic) stimulus distribution, we review several inference methods, focusing in particular on two information theory–based approaches (maximization of stimulus energy and of noise entropy) and two likelihood-based approaches (Bayesian spike-triggered covariance and extensions of generalized linear models). We analyze the formal relationship between the likelihood-based and information-based approaches to demonstrate how they lead to consistent inference. We demonstrate the practical feasibility of these procedures by using model neurons responding to a flickering variance stimulus.}, author = {Rajan, Kanaka and Marre, Olivier and Tkacik, Gasper}, journal = {Neural Computation}, number = {7}, pages = {1661 -- 1692}, publisher = {MIT Press }, title = {{Learning quadratic receptive fields from neural responses to natural stimuli}}, doi = {10.1162/NECO_a_00463}, volume = {25}, year = {2013}, } @article{2850, abstract = {Recent work emphasizes that the maximum entropy principle provides a bridge between statistical mechanics models for collective behavior in neural networks and experiments on networks of real neurons. Most of this work has focused on capturing the measured correlations among pairs of neurons. Here we suggest an alternative, constructing models that are consistent with the distribution of global network activity, i.e. the probability that K out of N cells in the network generate action potentials in the same small time bin. The inverse problem that we need to solve in constructing the model is analytically tractable, and provides a natural 'thermodynamics' for the network in the limit of large N. We analyze the responses of neurons in a small patch of the retina to naturalistic stimuli, and find that the implied thermodynamics is very close to an unusual critical point, in which the entropy (in proper units) is exactly equal to the energy. © 2013 IOP Publishing Ltd and SISSA Medialab srl. }, author = {Tkacik, Gasper and Marre, Olivier and Mora, Thierry and Amodei, Dario and Berry, Michael and Bialek, William}, journal = {Journal of Statistical Mechanics Theory and Experiment}, number = {3}, publisher = {IOP Publishing Ltd.}, title = {{The simplest maximum entropy model for collective behavior in a neural network}}, doi = {10.1088/1742-5468/2013/03/P03011}, volume = {2013}, year = {2013}, }