[{"quality_controlled":"1","isi":1,"tmp":{"name":"Creative Commons Attribution 4.0 International Public License (CC-BY 4.0)","legal_code_url":"https://creativecommons.org/licenses/by/4.0/legalcode","short":"CC BY (4.0)","image":"/images/cc_by.png"},"external_id":{"isi":["000641474900072"],"pmid":["33857170"]},"oa":1,"language":[{"iso":"eng"}],"doi":"10.1371/journal.pone.0248940","month":"04","publication_identifier":{"eissn":["19326203"]},"publication_status":"published","publisher":"Public Library of Science","department":[{"_id":"GaTk"}],"year":"2021","acknowledgement":"The authors would like to thank Ulisse Ferrari for useful discussions and feedback.","pmid":1,"date_created":"2021-05-02T22:01:28Z","date_updated":"2023-10-18T08:17:42Z","volume":16,"author":[{"full_name":"Chalk, Matthew J","first_name":"Matthew J","last_name":"Chalk","id":"2BAAC544-F248-11E8-B48F-1D18A9856A87","orcid":"0000-0001-7782-4436"},{"orcid":"0000-0002-6699-1455","id":"3D494DCA-F248-11E8-B48F-1D18A9856A87","last_name":"Tkačik","first_name":"Gašper","full_name":"Tkačik, Gašper"},{"first_name":"Olivier","last_name":"Marre","full_name":"Marre, Olivier"}],"article_number":"e0248940","file_date_updated":"2021-05-04T13:22:19Z","article_type":"original","publication":"PLoS ONE","citation":{"apa":"Chalk, M. J., Tkačik, G., & Marre, O. (2021). Inferring the function performed by a recurrent neural network. PLoS ONE. Public Library of Science. https://doi.org/10.1371/journal.pone.0248940","ieee":"M. J. Chalk, G. Tkačik, and O. Marre, “Inferring the function performed by a recurrent neural network,” PLoS ONE, vol. 16, no. 4. Public Library of Science, 2021.","ista":"Chalk MJ, Tkačik G, Marre O. 2021. Inferring the function performed by a recurrent neural network. PLoS ONE. 16(4), e0248940.","ama":"Chalk MJ, Tkačik G, Marre O. Inferring the function performed by a recurrent neural network. PLoS ONE. 2021;16(4). doi:10.1371/journal.pone.0248940","chicago":"Chalk, Matthew J, Gašper Tkačik, and Olivier Marre. “Inferring the Function Performed by a Recurrent Neural Network.” PLoS ONE. Public Library of Science, 2021. https://doi.org/10.1371/journal.pone.0248940.","short":"M.J. Chalk, G. Tkačik, O. Marre, PLoS ONE 16 (2021).","mla":"Chalk, Matthew J., et al. “Inferring the Function Performed by a Recurrent Neural Network.” PLoS ONE, vol. 16, no. 4, e0248940, Public Library of Science, 2021, doi:10.1371/journal.pone.0248940."},"date_published":"2021-04-15T00:00:00Z","scopus_import":"1","day":"15","has_accepted_license":"1","article_processing_charge":"No","status":"public","ddc":["570"],"title":"Inferring the function performed by a recurrent neural network","intvolume":" 16","user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","_id":"9362","file":[{"checksum":"c52da133850307d2031f552d998f00e8","success":1,"date_created":"2021-05-04T13:22:19Z","date_updated":"2021-05-04T13:22:19Z","relation":"main_file","file_id":"9371","file_size":2768282,"content_type":"application/pdf","creator":"kschuh","access_level":"open_access","file_name":"2021_pone_Chalk.pdf"}],"oa_version":"Published Version","type":"journal_article","abstract":[{"text":"A central goal in systems neuroscience is to understand the functions performed by neural circuits. Previous top-down models addressed this question by comparing the behaviour of an ideal model circuit, optimised to perform a given function, with neural recordings. However, this requires guessing in advance what function is being performed, which may not be possible for many neural systems. To address this, we propose an inverse reinforcement learning (RL) framework for inferring the function performed by a neural network from data. We assume that the responses of each neuron in a network are optimised so as to drive the network towards ‘rewarded’ states, that are desirable for performing a given function. We then show how one can use inverse RL to infer the reward function optimised by the network from observing its responses. This inferred reward function can be used to predict how the neural network should adapt its dynamics to perform the same function when the external environment or network structure changes. This could lead to theoretical predictions about how neural network dynamics adapt to deal with cell death and/or varying sensory stimulus statistics.","lang":"eng"}],"issue":"4"},{"month":"01","doi":"10.1073/pnas.1711114115","language":[{"iso":"eng"}],"main_file_link":[{"open_access":"1","url":"https://doi.org/10.1101/152660 "}],"external_id":{"isi":["000419128700049"]},"oa":1,"project":[{"name":"Sensitivity to higher-order statistics in natural scenes","call_identifier":"FWF","grant_number":"P 25651-N26","_id":"254D1A94-B435-11E9-9278-68D0E5697425"}],"quality_controlled":"1","isi":1,"publist_id":"7273","author":[{"first_name":"Matthew J","last_name":"Chalk","id":"2BAAC544-F248-11E8-B48F-1D18A9856A87","orcid":"0000-0001-7782-4436","full_name":"Chalk, Matthew J"},{"full_name":"Marre, Olivier","last_name":"Marre","first_name":"Olivier"},{"first_name":"Gasper","last_name":"Tkacik","id":"3D494DCA-F248-11E8-B48F-1D18A9856A87","orcid":"0000-0002-6699-1455","full_name":"Tkacik, Gasper"}],"volume":115,"date_created":"2018-12-11T11:47:04Z","date_updated":"2023-09-19T10:16:35Z","year":"2018","publisher":"National Academy of Sciences","department":[{"_id":"GaTk"}],"publication_status":"published","article_processing_charge":"No","day":"02","scopus_import":"1","date_published":"2018-01-02T00:00:00Z","citation":{"ista":"Chalk MJ, Marre O, Tkačik G. 2018. Toward a unified theory of efficient, predictive, and sparse coding. PNAS. 115(1), 186–191.","ieee":"M. J. Chalk, O. Marre, and G. Tkačik, “Toward a unified theory of efficient, predictive, and sparse coding,” PNAS, vol. 115, no. 1. National Academy of Sciences, pp. 186–191, 2018.","apa":"Chalk, M. J., Marre, O., & Tkačik, G. (2018). Toward a unified theory of efficient, predictive, and sparse coding. PNAS. National Academy of Sciences. https://doi.org/10.1073/pnas.1711114115","ama":"Chalk MJ, Marre O, Tkačik G. Toward a unified theory of efficient, predictive, and sparse coding. PNAS. 2018;115(1):186-191. doi:10.1073/pnas.1711114115","chicago":"Chalk, Matthew J, Olivier Marre, and Gašper Tkačik. “Toward a Unified Theory of Efficient, Predictive, and Sparse Coding.” PNAS. National Academy of Sciences, 2018. https://doi.org/10.1073/pnas.1711114115.","mla":"Chalk, Matthew J., et al. “Toward a Unified Theory of Efficient, Predictive, and Sparse Coding.” PNAS, vol. 115, no. 1, National Academy of Sciences, 2018, pp. 186–91, doi:10.1073/pnas.1711114115.","short":"M.J. Chalk, O. Marre, G. Tkačik, PNAS 115 (2018) 186–191."},"publication":"PNAS","page":"186 - 191","issue":"1","abstract":[{"lang":"eng","text":"A central goal in theoretical neuroscience is to predict the response properties of sensory neurons from first principles. To this end, “efficient coding” posits that sensory neurons encode maximal information about their inputs given internal constraints. There exist, however, many variants of efficient coding (e.g., redundancy reduction, different formulations of predictive coding, robust coding, sparse coding, etc.), differing in their regimes of applicability, in the relevance of signals to be encoded, and in the choice of constraints. It is unclear how these types of efficient coding relate or what is expected when different coding objectives are combined. Here we present a unified framework that encompasses previously proposed efficient coding models and extends to unique regimes. We show that optimizing neural responses to encode predictive information can lead them to either correlate or decorrelate their inputs, depending on the stimulus statistics; in contrast, at low noise, efficiently encoding the past always predicts decorrelation. Later, we investigate coding of naturalistic movies and show that qualitatively different types of visual motion tuning and levels of response sparsity are predicted, depending on whether the objective is to recover the past or predict the future. Our approach promises a way to explain the observed diversity of sensory neural responses, as due to multiple functional goals and constraints fulfilled by different cell types and/or circuits."}],"type":"journal_article","oa_version":"Submitted Version","_id":"543","user_id":"c635000d-4b10-11ee-a964-aac5a93f6ac1","intvolume":" 115","status":"public","title":"Toward a unified theory of efficient, predictive, and sparse coding"},{"year":"2017","publication_status":"published","department":[{"_id":"GaTk"}],"publisher":"Public Library of Science","author":[{"full_name":"Chalk, Matthew J","first_name":"Matthew J","last_name":"Chalk","id":"2BAAC544-F248-11E8-B48F-1D18A9856A87","orcid":"0000-0001-7782-4436"},{"full_name":"Masset, Paul","last_name":"Masset","first_name":"Paul"},{"full_name":"Gutkin, Boris","first_name":"Boris","last_name":"Gutkin"},{"full_name":"Denève, Sophie","last_name":"Denève","first_name":"Sophie"}],"related_material":{"record":[{"status":"public","relation":"research_data","id":"9855"}]},"date_updated":"2023-02-23T14:10:54Z","date_created":"2018-12-11T11:47:53Z","volume":13,"article_number":"e1005582","file_date_updated":"2020-07-14T12:47:40Z","publist_id":"7035","oa":1,"tmp":{"name":"Creative Commons Attribution 4.0 International Public License (CC-BY 4.0)","legal_code_url":"https://creativecommons.org/licenses/by/4.0/legalcode","short":"CC BY (4.0)","image":"/images/cc_by.png"},"quality_controlled":"1","doi":"10.1371/journal.pcbi.1005582","language":[{"iso":"eng"}],"month":"06","publication_identifier":{"issn":["1553734X"]},"user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","_id":"680","status":"public","ddc":["571"],"title":"Sensory noise predicts divisive reshaping of receptive fields","intvolume":" 13","pubrep_id":"898","oa_version":"Published Version","file":[{"content_type":"application/pdf","file_size":14555676,"creator":"system","file_name":"IST-2017-898-v1+1_journal.pcbi.1005582.pdf","access_level":"open_access","date_created":"2018-12-12T10:07:47Z","date_updated":"2020-07-14T12:47:40Z","checksum":"796a1026076af6f4405a47d985bc7b68","relation":"main_file","file_id":"4645"}],"type":"journal_article","abstract":[{"text":"In order to respond reliably to specific features of their environment, sensory neurons need to integrate multiple incoming noisy signals. Crucially, they also need to compete for the interpretation of those signals with other neurons representing similar features. The form that this competition should take depends critically on the noise corrupting these signals. In this study we show that for the type of noise commonly observed in sensory systems, whose variance scales with the mean signal, sensory neurons should selectively divide their input signals by their predictions, suppressing ambiguous cues while amplifying others. Any change in the stimulus context alters which inputs are suppressed, leading to a deep dynamic reshaping of neural receptive fields going far beyond simple surround suppression. Paradoxically, these highly variable receptive fields go alongside and are in fact required for an invariant representation of external sensory features. In addition to offering a normative account of context-dependent changes in sensory responses, perceptual inference in the presence of signal-dependent noise accounts for ubiquitous features of sensory neurons such as divisive normalization, gain control and contrast dependent temporal dynamics.","lang":"eng"}],"issue":"6","publication":"PLoS Computational Biology","citation":{"ama":"Chalk MJ, Masset P, Gutkin B, Denève S. Sensory noise predicts divisive reshaping of receptive fields. PLoS Computational Biology. 2017;13(6). doi:10.1371/journal.pcbi.1005582","ista":"Chalk MJ, Masset P, Gutkin B, Denève S. 2017. Sensory noise predicts divisive reshaping of receptive fields. PLoS Computational Biology. 13(6), e1005582.","ieee":"M. J. Chalk, P. Masset, B. Gutkin, and S. Denève, “Sensory noise predicts divisive reshaping of receptive fields,” PLoS Computational Biology, vol. 13, no. 6. Public Library of Science, 2017.","apa":"Chalk, M. J., Masset, P., Gutkin, B., & Denève, S. (2017). Sensory noise predicts divisive reshaping of receptive fields. PLoS Computational Biology. Public Library of Science. https://doi.org/10.1371/journal.pcbi.1005582","mla":"Chalk, Matthew J., et al. “Sensory Noise Predicts Divisive Reshaping of Receptive Fields.” PLoS Computational Biology, vol. 13, no. 6, e1005582, Public Library of Science, 2017, doi:10.1371/journal.pcbi.1005582.","short":"M.J. Chalk, P. Masset, B. Gutkin, S. Denève, PLoS Computational Biology 13 (2017).","chicago":"Chalk, Matthew J, Paul Masset, Boris Gutkin, and Sophie Denève. “Sensory Noise Predicts Divisive Reshaping of Receptive Fields.” PLoS Computational Biology. Public Library of Science, 2017. https://doi.org/10.1371/journal.pcbi.1005582."},"date_published":"2017-06-01T00:00:00Z","scopus_import":1,"day":"01","has_accepted_license":"1"},{"date_published":"2017-06-01T00:00:00Z","doi":"10.1371/journal.pcbi.1005582.s001","citation":{"chicago":"Chalk, Matthew J, Paul Masset, Boris Gutkin, and Sophie Denève. “Supplementary Appendix.” Public Library of Science, 2017. https://doi.org/10.1371/journal.pcbi.1005582.s001.","short":"M.J. Chalk, P. Masset, B. Gutkin, S. Denève, (2017).","mla":"Chalk, Matthew J., et al. Supplementary Appendix. Public Library of Science, 2017, doi:10.1371/journal.pcbi.1005582.s001.","ieee":"M. J. Chalk, P. Masset, B. Gutkin, and S. Denève, “Supplementary appendix.” Public Library of Science, 2017.","apa":"Chalk, M. J., Masset, P., Gutkin, B., & Denève, S. (2017). Supplementary appendix. Public Library of Science. https://doi.org/10.1371/journal.pcbi.1005582.s001","ista":"Chalk MJ, Masset P, Gutkin B, Denève S. 2017. Supplementary appendix, Public Library of Science, 10.1371/journal.pcbi.1005582.s001.","ama":"Chalk MJ, Masset P, Gutkin B, Denève S. Supplementary appendix. 2017. doi:10.1371/journal.pcbi.1005582.s001"},"article_processing_charge":"No","month":"06","day":"01","related_material":{"record":[{"id":"680","status":"public","relation":"used_in_publication"}]},"author":[{"last_name":"Chalk","first_name":"Matthew J","orcid":"0000-0001-7782-4436","id":"2BAAC544-F248-11E8-B48F-1D18A9856A87","full_name":"Chalk, Matthew J"},{"full_name":"Masset, Paul","first_name":"Paul","last_name":"Masset"},{"last_name":"Gutkin","first_name":"Boris","full_name":"Gutkin, Boris"},{"last_name":"Denève","first_name":"Sophie","full_name":"Denève, Sophie"}],"oa_version":"Published Version","date_updated":"2023-02-23T12:52:17Z","date_created":"2021-08-10T07:05:10Z","year":"2017","_id":"9855","user_id":"6785fbc1-c503-11eb-8a32-93094b40e1cf","department":[{"_id":"GaTk"}],"publisher":"Public Library of Science","status":"public","title":"Supplementary appendix","abstract":[{"text":"Includes derivation of optimal estimation algorithm, generalisation to non-poisson noise statistics, correlated input noise, and implementation of in a multi-layer neural network.","lang":"eng"}],"type":"research_data_reference"},{"scopus_import":1,"month":"12","day":"01","main_file_link":[{"url":"https://arxiv.org/abs/1605.07332","open_access":"1"}],"oa":1,"citation":{"ama":"Chalk MJ, Marre O, Tkačik G. Relevant sparse codes with variational information bottleneck. In: Vol 29. Neural Information Processing Systems; 2016:1965-1973.","ista":"Chalk MJ, Marre O, Tkačik G. 2016. Relevant sparse codes with variational information bottleneck. NIPS: Neural Information Processing Systems, Advances in Neural Information Processing Systems, vol. 29, 1965–1973.","ieee":"M. J. Chalk, O. Marre, and G. Tkačik, “Relevant sparse codes with variational information bottleneck,” presented at the NIPS: Neural Information Processing Systems, Barcelona, Spain, 2016, vol. 29, pp. 1965–1973.","apa":"Chalk, M. J., Marre, O., & Tkačik, G. (2016). Relevant sparse codes with variational information bottleneck (Vol. 29, pp. 1965–1973). Presented at the NIPS: Neural Information Processing Systems, Barcelona, Spain: Neural Information Processing Systems.","mla":"Chalk, Matthew J., et al. Relevant Sparse Codes with Variational Information Bottleneck. Vol. 29, Neural Information Processing Systems, 2016, pp. 1965–73.","short":"M.J. Chalk, O. Marre, G. Tkačik, in:, Neural Information Processing Systems, 2016, pp. 1965–1973.","chicago":"Chalk, Matthew J, Olivier Marre, and Gašper Tkačik. “Relevant Sparse Codes with Variational Information Bottleneck,” 29:1965–73. Neural Information Processing Systems, 2016."},"quality_controlled":"1","page":"1965-1973","conference":{"end_date":"2016-12-10","start_date":"2016-12-05","location":"Barcelona, Spain","name":"NIPS: Neural Information Processing Systems"},"date_published":"2016-12-01T00:00:00Z","language":[{"iso":"eng"}],"type":"conference","alternative_title":["Advances in Neural Information Processing Systems"],"abstract":[{"text":"In many applications, it is desirable to extract only the relevant aspects of data. A principled way to do this is the information bottleneck (IB) method, where one seeks a code that maximises information about a relevance variable, Y, while constraining the information encoded about the original data, X. Unfortunately however, the IB method is computationally demanding when data are high-dimensional and/or non-gaussian. Here we propose an approximate variational scheme for maximising a lower bound on the IB objective, analogous to variational EM. Using this method, we derive an IB algorithm to recover features that are both relevant and sparse. Finally, we demonstrate how kernelised versions of the algorithm can be used to address a broad range of problems with non-linear relation between X and Y.","lang":"eng"}],"publist_id":"6298","_id":"1082","user_id":"3E5EF7F0-F248-11E8-B48F-1D18A9856A87","year":"2016","title":"Relevant sparse codes with variational information bottleneck","publication_status":"published","status":"public","department":[{"_id":"GaTk"}],"publisher":"Neural Information Processing Systems","intvolume":" 29","author":[{"full_name":"Chalk, Matthew J","last_name":"Chalk","first_name":"Matthew J","orcid":"0000-0001-7782-4436","id":"2BAAC544-F248-11E8-B48F-1D18A9856A87"},{"last_name":"Marre","first_name":"Olivier","full_name":"Marre, Olivier"},{"full_name":"Tkacik, Gasper","orcid":"0000-0002-6699-1455","id":"3D494DCA-F248-11E8-B48F-1D18A9856A87","last_name":"Tkacik","first_name":"Gasper"}],"related_material":{"link":[{"url":"https://papers.nips.cc/paper/6101-relevant-sparse-codes-with-variational-information-bottleneck","relation":"other"}]},"date_updated":"2021-01-12T06:48:09Z","date_created":"2018-12-11T11:50:03Z","oa_version":"Preprint","volume":29},{"article_number":"e13824","publist_id":"6056","file_date_updated":"2020-07-14T12:44:42Z","year":"2016","acknowledgement":"Boris Gutkin acknowledges funding by the Russian Academic Excellence Project '5-100’.","publisher":"eLife Sciences Publications","department":[{"_id":"GaTk"}],"publication_status":"published","author":[{"full_name":"Chalk, Matthew J","last_name":"Chalk","first_name":"Matthew J","orcid":"0000-0001-7782-4436","id":"2BAAC544-F248-11E8-B48F-1D18A9856A87"},{"full_name":"Gutkin, Boris","last_name":"Gutkin","first_name":"Boris"},{"full_name":"Denève, Sophie","first_name":"Sophie","last_name":"Denève"}],"volume":5,"date_updated":"2021-01-12T06:49:30Z","date_created":"2018-12-11T11:51:02Z","month":"07","tmp":{"name":"Creative Commons Attribution 4.0 International Public License (CC-BY 4.0)","legal_code_url":"https://creativecommons.org/licenses/by/4.0/legalcode","short":"CC BY (4.0)","image":"/images/cc_by.png"},"oa":1,"quality_controlled":"1","doi":"10.7554/eLife.13824","language":[{"iso":"eng"}],"type":"journal_article","issue":"2016JULY","abstract":[{"text":"Cortical networks exhibit ‘global oscillations’, in which neural spike times are entrained to an underlying oscillatory rhythm, but where individual neurons fire irregularly, on only a fraction of cycles. While the network dynamics underlying global oscillations have been well characterised, their function is debated. Here, we show that such global oscillations are a direct consequence of optimal efficient coding in spiking networks with synaptic delays and noise. To avoid firing unnecessary spikes, neurons need to share information about the network state. Ideally, membrane potentials should be strongly correlated and reflect a ‘prediction error’ while the spikes themselves are uncorrelated and occur rarely. We show that the most efficient representation is when: (i) spike times are entrained to a global Gamma rhythm (implying a consistent representation of the error); but (ii) few neurons fire on each cycle (implying high efficiency), while (iii) excitation and inhibition are tightly balanced. This suggests that cortical networks exhibiting such dynamics are tuned to achieve a maximally efficient population code.","lang":"eng"}],"_id":"1266","user_id":"3E5EF7F0-F248-11E8-B48F-1D18A9856A87","intvolume":" 5","title":"Neural oscillations as a signature of efficient coding in the presence of synaptic delays","ddc":["571"],"status":"public","pubrep_id":"700","file":[{"access_level":"open_access","file_name":"IST-2016-700-v1+1_e13824-download.pdf","creator":"system","content_type":"application/pdf","file_size":2819055,"file_id":"4874","relation":"main_file","checksum":"dc52d967dc76174477bb258d84be2899","date_updated":"2020-07-14T12:44:42Z","date_created":"2018-12-12T10:11:20Z"}],"oa_version":"Published Version","scopus_import":1,"has_accepted_license":"1","day":"01","citation":{"ieee":"M. J. Chalk, B. Gutkin, and S. Denève, “Neural oscillations as a signature of efficient coding in the presence of synaptic delays,” eLife, vol. 5, no. 2016JULY. eLife Sciences Publications, 2016.","apa":"Chalk, M. J., Gutkin, B., & Denève, S. (2016). Neural oscillations as a signature of efficient coding in the presence of synaptic delays. ELife. eLife Sciences Publications. https://doi.org/10.7554/eLife.13824","ista":"Chalk MJ, Gutkin B, Denève S. 2016. Neural oscillations as a signature of efficient coding in the presence of synaptic delays. eLife. 5(2016JULY), e13824.","ama":"Chalk MJ, Gutkin B, Denève S. Neural oscillations as a signature of efficient coding in the presence of synaptic delays. eLife. 2016;5(2016JULY). doi:10.7554/eLife.13824","chicago":"Chalk, Matthew J, Boris Gutkin, and Sophie Denève. “Neural Oscillations as a Signature of Efficient Coding in the Presence of Synaptic Delays.” ELife. eLife Sciences Publications, 2016. https://doi.org/10.7554/eLife.13824.","short":"M.J. Chalk, B. Gutkin, S. Denève, ELife 5 (2016).","mla":"Chalk, Matthew J., et al. “Neural Oscillations as a Signature of Efficient Coding in the Presence of Synaptic Delays.” ELife, vol. 5, no. 2016JULY, e13824, eLife Sciences Publications, 2016, doi:10.7554/eLife.13824."},"publication":"eLife","date_published":"2016-07-01T00:00:00Z"}]