@article{1104, abstract = {In the early visual system, cells of the same type perform the same computation in different places of the visual field. How these cells code together a complex visual scene is unclear. A common assumption is that cells of a single-type extract a single-stimulus feature to form a feature map, but this has rarely been observed directly. Using large-scale recordings in the rat retina, we show that a homogeneous population of fast OFF ganglion cells simultaneously encodes two radically different features of a visual scene. Cells close to a moving object code quasilinearly for its position, while distant cells remain largely invariant to the object's position and, instead, respond nonlinearly to changes in the object's speed. We develop a quantitative model that accounts for this effect and identify a disinhibitory circuit that mediates it. Ganglion cells of a single type thus do not code for one, but two features simultaneously. This richer, flexible neural map might also be present in other sensory systems.}, author = {Deny, Stephane and Ferrari, Ulisse and Mace, Emilie and Yger, Pierre and Caplette, Romain and Picaud, Serge and Tkacik, Gasper and Marre, Olivier}, issn = {20411723}, journal = {Nature Communications}, number = {1}, publisher = {Nature Publishing Group}, title = {{Multiplexed computations in retinal ganglion cells of a single type}}, doi = {10.1038/s41467-017-02159-y}, volume = {8}, year = {2017}, }