[{"intvolume":" 18","month":"02","scopus_import":"1","oa_version":"Published Version","pmid":1,"abstract":[{"lang":"eng","text":"Allometric settings of population dynamics models are appealing due to their parsimonious nature and broad utility when studying system level effects. Here, we parameterise the size-scaled Rosenzweig-MacArthur differential equations to eliminate prey-mass dependency, facilitating an in depth analytic study of the equations which incorporates scaling parameters’ contributions to coexistence. We define the functional response term to match empirical findings, and examine situations where metabolic theory derivations and observation diverge. The dynamical properties of the Rosenzweig-MacArthur system, encompassing the distribution of size-abundance equilibria, the scaling of period and amplitude of population cycling, and relationships between predator and prey abundances, are consistent with empirical observation. Our parameterisation is an accurate minimal model across 15+ orders of mass magnitude."}],"issue":"2","volume":18,"language":[{"iso":"eng"}],"file":[{"file_name":"2023_PLOSOne_Mckerral.pdf","date_created":"2023-03-07T10:26:45Z","creator":"cchlebak","file_size":1257003,"date_updated":"2023-03-07T10:26:45Z","success":1,"checksum":"798ed5739a4117b03173e5d56e0534c9","file_id":"12712","relation":"main_file","access_level":"open_access","content_type":"application/pdf"}],"publication_status":"published","publication_identifier":{"eissn":["1932-6203"]},"status":"public","tmp":{"legal_code_url":"https://creativecommons.org/licenses/by/4.0/legalcode","image":"/images/cc_by.png","name":"Creative Commons Attribution 4.0 International Public License (CC-BY 4.0)","short":"CC BY (4.0)"},"article_type":"original","type":"journal_article","_id":"12706","department":[{"_id":"KrCh"}],"file_date_updated":"2023-03-07T10:26:45Z","ddc":["000"],"date_updated":"2023-10-17T12:53:30Z","oa":1,"quality_controlled":"1","publisher":"Public Library of Science","acknowledgement":"This research was supported by an Australian Government Research Training Program\r\n(RTP) Scholarship to JCM (https://www.dese.gov.au), and LB is supported by the Centre de\r\nrecherche sur le vieillissement Fellowship Program. The funders had no role in study design, data collection and analysis, decision to publish, or preparation of the manuscript.","date_created":"2023-03-05T23:01:05Z","doi":"10.1371/journal.pone.0279838","date_published":"2023-02-27T00:00:00Z","page":"e0279838","publication":"PLoS One","day":"27","year":"2023","has_accepted_license":"1","isi":1,"title":"Empirical parameterisation and dynamical analysis of the allometric Rosenzweig-MacArthur equations","article_processing_charge":"No","external_id":{"isi":["000996122900022"],"pmid":["36848357"]},"author":[{"last_name":"Mckerral","full_name":"Mckerral, Jody C.","first_name":"Jody C."},{"first_name":"Maria","id":"4E21749C-F248-11E8-B48F-1D18A9856A87","last_name":"Kleshnina","full_name":"Kleshnina, Maria"},{"last_name":"Ejov","full_name":"Ejov, Vladimir","first_name":"Vladimir"},{"full_name":"Bartle, Louise","last_name":"Bartle","first_name":"Louise"},{"full_name":"Mitchell, James G.","last_name":"Mitchell","first_name":"James G."},{"full_name":"Filar, Jerzy A.","last_name":"Filar","first_name":"Jerzy A."}],"user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","citation":{"ieee":"J. C. Mckerral, M. Kleshnina, V. Ejov, L. Bartle, J. G. Mitchell, and J. A. Filar, “Empirical parameterisation and dynamical analysis of the allometric Rosenzweig-MacArthur equations,” PLoS One, vol. 18, no. 2. Public Library of Science, p. e0279838, 2023.","short":"J.C. Mckerral, M. Kleshnina, V. Ejov, L. Bartle, J.G. Mitchell, J.A. Filar, PLoS One 18 (2023) e0279838.","apa":"Mckerral, J. C., Kleshnina, M., Ejov, V., Bartle, L., Mitchell, J. G., & Filar, J. A. (2023). Empirical parameterisation and dynamical analysis of the allometric Rosenzweig-MacArthur equations. PLoS One. Public Library of Science. https://doi.org/10.1371/journal.pone.0279838","ama":"Mckerral JC, Kleshnina M, Ejov V, Bartle L, Mitchell JG, Filar JA. Empirical parameterisation and dynamical analysis of the allometric Rosenzweig-MacArthur equations. PLoS One. 2023;18(2):e0279838. doi:10.1371/journal.pone.0279838","mla":"Mckerral, Jody C., et al. “Empirical Parameterisation and Dynamical Analysis of the Allometric Rosenzweig-MacArthur Equations.” PLoS One, vol. 18, no. 2, Public Library of Science, 2023, p. e0279838, doi:10.1371/journal.pone.0279838.","ista":"Mckerral JC, Kleshnina M, Ejov V, Bartle L, Mitchell JG, Filar JA. 2023. Empirical parameterisation and dynamical analysis of the allometric Rosenzweig-MacArthur equations. PLoS One. 18(2), e0279838.","chicago":"Mckerral, Jody C., Maria Kleshnina, Vladimir Ejov, Louise Bartle, James G. Mitchell, and Jerzy A. Filar. “Empirical Parameterisation and Dynamical Analysis of the Allometric Rosenzweig-MacArthur Equations.” PLoS One. Public Library of Science, 2023. https://doi.org/10.1371/journal.pone.0279838."}},{"project":[{"call_identifier":"H2020","_id":"2659CC84-B435-11E9-9278-68D0E5697425","grant_number":"793482","name":"Ultrastructural analysis of phosphoinositides in nerve terminals: distribution, dynamics and physiological roles in synaptic transmission"},{"call_identifier":"H2020","_id":"25CA28EA-B435-11E9-9278-68D0E5697425","grant_number":"694539","name":"In situ analysis of single channel subunit composition in neurons: physiological implication in synaptic plasticity and behaviour"}],"author":[{"last_name":"Eguchi","orcid":"0000-0002-6170-2546","full_name":"Eguchi, Kohgaku","id":"2B7846DC-F248-11E8-B48F-1D18A9856A87","first_name":"Kohgaku"},{"full_name":"Le Monnier, Elodie","last_name":"Le Monnier","first_name":"Elodie","id":"3B59276A-F248-11E8-B48F-1D18A9856A87"},{"full_name":"Shigemoto, Ryuichi","orcid":"0000-0001-8761-9444","last_name":"Shigemoto","id":"499F3ABC-F248-11E8-B48F-1D18A9856A87","first_name":"Ryuichi"}],"external_id":{"pmid":["37160366"],"isi":["001020132100005"]},"article_processing_charge":"No","title":"Nanoscale phosphoinositide distribution on cell membranes of mouse cerebellar neurons","citation":{"mla":"Eguchi, Kohgaku, et al. “Nanoscale Phosphoinositide Distribution on Cell Membranes of Mouse Cerebellar Neurons.” The Journal of Neuroscience, vol. 43, no. 23, Society for Neuroscience, 2023, pp. 4197–216, doi:10.1523/JNEUROSCI.1514-22.2023.","ama":"Eguchi K, Le Monnier E, Shigemoto R. Nanoscale phosphoinositide distribution on cell membranes of mouse cerebellar neurons. The Journal of Neuroscience. 2023;43(23):4197-4216. doi:10.1523/JNEUROSCI.1514-22.2023","apa":"Eguchi, K., Le Monnier, E., & Shigemoto, R. (2023). Nanoscale phosphoinositide distribution on cell membranes of mouse cerebellar neurons. The Journal of Neuroscience. Society for Neuroscience. https://doi.org/10.1523/JNEUROSCI.1514-22.2023","ieee":"K. Eguchi, E. Le Monnier, and R. Shigemoto, “Nanoscale phosphoinositide distribution on cell membranes of mouse cerebellar neurons,” The Journal of Neuroscience, vol. 43, no. 23. Society for Neuroscience, pp. 4197–4216, 2023.","short":"K. Eguchi, E. Le Monnier, R. Shigemoto, The Journal of Neuroscience 43 (2023) 4197–4216.","chicago":"Eguchi, Kohgaku, Elodie Le Monnier, and Ryuichi Shigemoto. “Nanoscale Phosphoinositide Distribution on Cell Membranes of Mouse Cerebellar Neurons.” The Journal of Neuroscience. Society for Neuroscience, 2023. https://doi.org/10.1523/JNEUROSCI.1514-22.2023.","ista":"Eguchi K, Le Monnier E, Shigemoto R. 2023. Nanoscale phosphoinositide distribution on cell membranes of mouse cerebellar neurons. The Journal of Neuroscience. 43(23), 4197–4216."},"user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","publisher":"Society for Neuroscience","quality_controlled":"1","oa":1,"acknowledgement":"This work was supported by The Institute of Science and Technology (IST) Austria, the European Union's Horizon 2020 Research and Innovation Program under the Marie Skłodowska-Curie Grant Agreement No. 793482 (to K.E.) and by the European Research Council (ERC) Grant Agreement No. 694539 (to R.S.). We thank Nicoleta Condruz (IST Austria, Klosterneuburg, Austria) for technical assistance with sample preparation, the Electron Microscopy Facility of IST Austria (Klosterneuburg, Austria) for technical support with EM works, Natalia Baranova (University of Vienna, Vienna, Austria) and Martin Loose (IST Austria, Klosterneuburg, Austria) for advice on liposome preparation, and Yugo Fukazawa (University of Fukui, Fukui, Japan) for comments.","page":"4197-4216","doi":"10.1523/JNEUROSCI.1514-22.2023","date_published":"2023-06-07T00:00:00Z","date_created":"2023-07-09T22:01:12Z","isi":1,"has_accepted_license":"1","year":"2023","day":"07","publication":"The Journal of Neuroscience","article_type":"original","type":"journal_article","tmp":{"legal_code_url":"https://creativecommons.org/licenses/by/4.0/legalcode","image":"/images/cc_by.png","name":"Creative Commons Attribution 4.0 International Public License (CC-BY 4.0)","short":"CC BY (4.0)"},"status":"public","_id":"13202","department":[{"_id":"RySh"}],"file_date_updated":"2023-07-10T09:04:58Z","date_updated":"2023-10-18T07:12:47Z","ddc":["570"],"scopus_import":"1","month":"06","intvolume":" 43","acknowledged_ssus":[{"_id":"EM-Fac"}],"abstract":[{"lang":"eng","text":"Phosphatidylinositol-4,5-bisphosphate (PI(4,5)P2) plays an essential role in neuronal activities through interaction with various proteins involved in signaling at membranes. However, the distribution pattern of PI(4,5)P2 and the association with these proteins on the neuronal cell membranes remain elusive. In this study, we established a method for visualizing PI(4,5)P2 by SDS-digested freeze-fracture replica labeling (SDS-FRL) to investigate the quantitative nanoscale distribution of PI(4,5)P2 in cryo-fixed brain. We demonstrate that PI(4,5)P2 forms tiny clusters with a mean size of ∼1000 nm2 rather than randomly distributed in cerebellar neuronal membranes in male C57BL/6J mice. These clusters show preferential accumulation in specific membrane compartments of different cell types, in particular, in Purkinje cell (PC) spines and granule cell (GC) presynaptic active zones. Furthermore, we revealed extensive association of PI(4,5)P2 with CaV2.1 and GIRK3 across different membrane compartments, whereas its association with mGluR1α was compartment specific. These results suggest that our SDS-FRL method provides valuable insights into the physiological functions of PI(4,5)P2 in neurons."}],"oa_version":"Published Version","pmid":1,"volume":43,"issue":"23","ec_funded":1,"publication_identifier":{"eissn":["1529-2401"],"issn":["0270-6474"]},"publication_status":"published","file":[{"date_created":"2023-07-10T09:04:58Z","file_name":"2023_JN_Eguchi.pdf","date_updated":"2023-07-10T09:04:58Z","file_size":7794425,"creator":"alisjak","file_id":"13205","checksum":"70b2141870e0bf1c94fd343e18fdbc32","success":1,"content_type":"application/pdf","access_level":"open_access","relation":"main_file"}],"language":[{"iso":"eng"}]},{"abstract":[{"text":"We apply a variant of the square-sieve to produce an upper bound for the number of rational points of bounded height on a family of surfaces that admit a fibration over P1 whose general fibre is a hyperelliptic curve. The implied constant does not depend on the coefficients of the polynomial defining the surface.\r\n","lang":"eng"}],"oa_version":"Preprint","scopus_import":"1","main_file_link":[{"url":"https://doi.org/10.48550/arXiv.2007.14182","open_access":"1"}],"month":"02","intvolume":" 24","publication_identifier":{"eissn":["2036-2145"],"issn":["0391-173X"]},"publication_status":"published","language":[{"iso":"eng"}],"issue":"1","volume":24,"_id":"12916","article_type":"original","type":"journal_article","status":"public","date_updated":"2023-10-18T06:54:30Z","department":[{"_id":"TiBr"}],"quality_controlled":"1","publisher":"Scuola Normale Superiore - Edizioni della Normale","oa":1,"year":"2023","day":"16","publication":"Annali della Scuola Normale Superiore di Pisa - Classe di Scienze","page":"173-204","date_published":"2023-02-16T00:00:00Z","doi":"10.2422/2036-2145.202010_018","date_created":"2023-05-07T22:01:04Z","citation":{"chicago":"Bonolis, Dante, and Timothy D Browning. “Uniform Bounds for Rational Points on Hyperelliptic Fibrations.” Annali Della Scuola Normale Superiore Di Pisa - Classe Di Scienze. Scuola Normale Superiore - Edizioni della Normale, 2023. https://doi.org/10.2422/2036-2145.202010_018.","ista":"Bonolis D, Browning TD. 2023. Uniform bounds for rational points on hyperelliptic fibrations. Annali della Scuola Normale Superiore di Pisa - Classe di Scienze. 24(1), 173–204.","mla":"Bonolis, Dante, and Timothy D. Browning. “Uniform Bounds for Rational Points on Hyperelliptic Fibrations.” Annali Della Scuola Normale Superiore Di Pisa - Classe Di Scienze, vol. 24, no. 1, Scuola Normale Superiore - Edizioni della Normale, 2023, pp. 173–204, doi:10.2422/2036-2145.202010_018.","ama":"Bonolis D, Browning TD. Uniform bounds for rational points on hyperelliptic fibrations. Annali della Scuola Normale Superiore di Pisa - Classe di Scienze. 2023;24(1):173-204. doi:10.2422/2036-2145.202010_018","apa":"Bonolis, D., & Browning, T. D. (2023). Uniform bounds for rational points on hyperelliptic fibrations. Annali Della Scuola Normale Superiore Di Pisa - Classe Di Scienze. Scuola Normale Superiore - Edizioni della Normale. https://doi.org/10.2422/2036-2145.202010_018","ieee":"D. Bonolis and T. D. Browning, “Uniform bounds for rational points on hyperelliptic fibrations,” Annali della Scuola Normale Superiore di Pisa - Classe di Scienze, vol. 24, no. 1. Scuola Normale Superiore - Edizioni della Normale, pp. 173–204, 2023.","short":"D. Bonolis, T.D. Browning, Annali Della Scuola Normale Superiore Di Pisa - Classe Di Scienze 24 (2023) 173–204."},"user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","author":[{"full_name":"Bonolis, Dante","last_name":"Bonolis","id":"6A459894-5FDD-11E9-AF35-BB24E6697425","first_name":"Dante"},{"id":"35827D50-F248-11E8-B48F-1D18A9856A87","first_name":"Timothy D","last_name":"Browning","orcid":"0000-0002-8314-0177","full_name":"Browning, Timothy D"}],"article_processing_charge":"No","external_id":{"arxiv":["2007.14182"]},"title":"Uniform bounds for rational points on hyperelliptic fibrations"},{"_id":"14422","type":"dissertation","tmp":{"name":"Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International (CC BY-NC-SA 4.0)","image":"/images/cc_by_nc_sa.png","legal_code_url":"https://creativecommons.org/licenses/by-nc-sa/4.0/legalcode","short":"CC BY-NC-SA (4.0)"},"status":"public","supervisor":[{"first_name":"Tim P","id":"CB6FF8D2-008F-11EA-8E08-2637E6697425","orcid":"0000-0003-3295-6181","full_name":"Vogels, Tim P","last_name":"Vogels"}],"date_updated":"2023-10-18T09:20:56Z","ddc":["610"],"department":[{"_id":"GradSch"},{"_id":"TiVo"}],"file_date_updated":"2023-10-18T07:56:08Z","abstract":[{"lang":"eng","text":"Animals exhibit a remarkable ability to learn and remember new behaviors, skills, and associations throughout their lifetime. These capabilities are made possible thanks to a variety of\r\nchanges in the brain throughout adulthood, regrouped under the term \"plasticity\". Some cells\r\nin the brain —neurons— and specifically changes in the connections between neurons, the\r\nsynapses, were shown to be crucial for the formation, selection, and consolidation of memories\r\nfrom past experiences. These ongoing changes of synapses across time are called synaptic\r\nplasticity. Understanding how a myriad of biochemical processes operating at individual\r\nsynapses can somehow work in concert to give rise to meaningful changes in behavior is a\r\nfascinating problem and an active area of research.\r\nHowever, the experimental search for the precise plasticity mechanisms at play in the brain\r\nis daunting, as it is difficult to control and observe synapses during learning. Theoretical\r\napproaches have thus been the default method to probe the plasticity-behavior connection. Such\r\nstudies attempt to extract unifying principles across synapses and model all observed synaptic\r\nchanges using plasticity rules: equations that govern the evolution of synaptic strengths across\r\ntime in neuronal network models. These rules can use many relevant quantities to determine\r\nthe magnitude of synaptic changes, such as the precise timings of pre- and postsynaptic\r\naction potentials, the recent neuronal activity levels, the state of neighboring synapses, etc.\r\nHowever, analytical studies rely heavily on human intuition and are forced to make simplifying\r\nassumptions about plasticity rules.\r\nIn this thesis, we aim to assist and augment human intuition in this search for plasticity rules.\r\nWe explore whether a numerical approach could automatically discover the plasticity rules\r\nthat elicit desired behaviors in large networks of interconnected neurons. This approach is\r\ndubbed meta-learning synaptic plasticity: learning plasticity rules which themselves will make\r\nneuronal networks learn how to solve a desired task. We first write all the potential plasticity\r\nmechanisms to consider using a single expression with adjustable parameters. We then optimize\r\nthese plasticity parameters using evolutionary strategies or Bayesian inference on tasks known\r\nto involve synaptic plasticity, such as familiarity detection and network stabilization.\r\nWe show that these automated approaches are powerful tools, able to complement established\r\nanalytical methods. By comprehensively screening plasticity rules at all synapse types in\r\nrealistic, spiking neuronal network models, we discover entire sets of degenerate plausible\r\nplasticity rules that reliably elicit memory-related behaviors. Our approaches allow for more\r\nrobust experimental predictions, by abstracting out the idiosyncrasies of individual plasticity\r\nrules, and provide fresh insights on synaptic plasticity in spiking network models.\r\n"}],"oa_version":"Published Version","alternative_title":["ISTA Thesis"],"month":"10","publication_identifier":{"issn":["2663 - 337X"]},"degree_awarded":"PhD","publication_status":"published","file":[{"embargo":"2024-10-12","checksum":"7f636555eae7803323df287672fd13ed","file_id":"14424","embargo_to":"open_access","content_type":"application/pdf","relation":"main_file","access_level":"closed","file_name":"Confavreux_Thesis_2A.pdf","date_created":"2023-10-12T14:53:50Z","file_size":30599717,"date_updated":"2023-10-12T14:54:52Z","creator":"cchlebak"},{"date_created":"2023-10-18T07:38:34Z","file_name":"Confavreux Thesis.zip","date_updated":"2023-10-18T07:56:08Z","file_size":68406739,"creator":"cchlebak","file_id":"14440","checksum":"725e85946db92290a4583a0de9779e1b","content_type":"application/x-zip-compressed","access_level":"closed","relation":"source_file"}],"language":[{"iso":"eng"}],"related_material":{"record":[{"relation":"part_of_dissertation","id":"9633","status":"public"}]},"ec_funded":1,"license":"https://creativecommons.org/licenses/by-nc-sa/4.0/","project":[{"call_identifier":"H2020","_id":"0aacfa84-070f-11eb-9043-d7eb2c709234","grant_number":"819603","name":"Learning the shape of synaptic plasticity rules for neuronal architectures and function through machine learning."}],"citation":{"mla":"Confavreux, Basile J. Synapseek: Meta-Learning Synaptic Plasticity Rules. Institute of Science and Technology Austria, 2023, doi:10.15479/at:ista:14422.","apa":"Confavreux, B. J. (2023). Synapseek: Meta-learning synaptic plasticity rules. Institute of Science and Technology Austria. https://doi.org/10.15479/at:ista:14422","ama":"Confavreux BJ. Synapseek: Meta-learning synaptic plasticity rules. 2023. doi:10.15479/at:ista:14422","ieee":"B. J. Confavreux, “Synapseek: Meta-learning synaptic plasticity rules,” Institute of Science and Technology Austria, 2023.","short":"B.J. Confavreux, Synapseek: Meta-Learning Synaptic Plasticity Rules, Institute of Science and Technology Austria, 2023.","chicago":"Confavreux, Basile J. “Synapseek: Meta-Learning Synaptic Plasticity Rules.” Institute of Science and Technology Austria, 2023. https://doi.org/10.15479/at:ista:14422.","ista":"Confavreux BJ. 2023. Synapseek: Meta-learning synaptic plasticity rules. Institute of Science and Technology Austria."},"user_id":"8b945eb4-e2f2-11eb-945a-df72226e66a9","author":[{"id":"C7610134-B532-11EA-BD9F-F5753DDC885E","first_name":"Basile J","last_name":"Confavreux","full_name":"Confavreux, Basile J"}],"article_processing_charge":"No","title":"Synapseek: Meta-learning synaptic plasticity rules","publisher":"Institute of Science and Technology Austria","has_accepted_license":"1","year":"2023","day":"12","page":"148","doi":"10.15479/at:ista:14422","date_published":"2023-10-12T00:00:00Z","date_created":"2023-10-12T14:13:25Z"},{"_id":"14374","tmp":{"name":"Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International (CC BY-NC-SA 4.0)","image":"/images/cc_by_nc_sa.png","legal_code_url":"https://creativecommons.org/licenses/by-nc-sa/4.0/legalcode","short":"CC BY-NC-SA (4.0)"},"type":"dissertation","status":"public","date_updated":"2023-10-27T10:37:30Z","supervisor":[{"id":"4AFD0470-F248-11E8-B48F-1D18A9856A87","first_name":"Robert","full_name":"Seiringer, Robert","orcid":"0000-0002-6781-0521","last_name":"Seiringer"}],"ddc":["515","539"],"department":[{"_id":"GradSch"},{"_id":"RoSe"}],"file_date_updated":"2023-10-06T11:38:01Z","abstract":[{"lang":"eng","text":"Superconductivity has many important applications ranging from levitating trains over qubits to MRI scanners. The phenomenon is successfully modeled by Bardeen-Cooper-Schrieffer (BCS) theory. From a mathematical perspective, BCS theory has been studied extensively for systems without boundary. However, little is known in the presence of boundaries. With the help of numerical methods physicists observed that the critical temperature may increase in the presence of a boundary. The goal of this thesis is to understand the influence of boundaries on the critical temperature in BCS theory and to give a first rigorous justification of these observations. On the way, we also study two-body Schrödinger operators on domains with boundaries and prove additional results for superconductors without boundary.\r\n\r\nBCS theory is based on a non-linear functional, where the minimizer indicates whether the system is superconducting or in the normal, non-superconducting state. By considering the Hessian of the BCS functional at the normal state, one can analyze whether the normal state is possibly a minimum of the BCS functional and estimate the critical temperature. The Hessian turns out to be a linear operator resembling a Schrödinger operator for two interacting particles, but with more complicated kinetic energy. As a first step, we study the two-body Schrödinger operator in the presence of boundaries.\r\nFor Neumann boundary conditions, we prove that the addition of a boundary can create new eigenvalues, which correspond to the two particles forming a bound state close to the boundary.\r\n\r\nSecond, we need to understand superconductivity in the translation invariant setting. While in three dimensions this has been extensively studied, there is no mathematical literature for the one and two dimensional cases. In dimensions one and two, we compute the weak coupling asymptotics of the critical temperature and the energy gap in the translation invariant setting. We also prove that their ratio is independent of the microscopic details of the model in the weak coupling limit; this property is referred to as universality.\r\n\r\nIn the third part, we study the critical temperature of superconductors in the presence of boundaries. We start by considering the one-dimensional case of a half-line with contact interaction. Then, we generalize the results to generic interactions and half-spaces in one, two and three dimensions. Finally, we compare the critical temperature of a quarter space in two dimensions to the critical temperatures of a half-space and of the full space."}],"oa_version":"Published Version","alternative_title":["ISTA Thesis"],"month":"09","publication_status":"published","degree_awarded":"PhD","publication_identifier":{"issn":["2663 - 337X"]},"language":[{"iso":"eng"}],"file":[{"access_level":"open_access","relation":"main_file","content_type":"application/pdf","file_id":"14398","checksum":"ef039ffc3de2cb8dee5b14110938e9b6","creator":"broos","date_updated":"2023-10-06T11:35:56Z","file_size":2365702,"date_created":"2023-10-06T11:35:56Z","file_name":"phd-thesis-draft_pdfa_acrobat.pdf"},{"creator":"broos","date_updated":"2023-10-06T11:38:01Z","file_size":4691734,"date_created":"2023-10-06T11:38:01Z","file_name":"Version5.zip","access_level":"closed","relation":"source_file","content_type":"application/x-zip-compressed","file_id":"14399","checksum":"81dcac33daeefaf0111db52f41bb1fd0"}],"ec_funded":1,"related_material":{"record":[{"status":"public","id":"13207","relation":"part_of_dissertation"},{"relation":"part_of_dissertation","status":"public","id":"10850"}]},"project":[{"name":"Analysis of quantum many-body systems","grant_number":"694227","call_identifier":"H2020","_id":"25C6DC12-B435-11E9-9278-68D0E5697425"},{"_id":"bda63fe5-d553-11ed-ba76-a16e3d2f256b","grant_number":"I06427","name":"Mathematical Challenges in BCS Theory of Superconductivity"}],"citation":{"mla":"Roos, Barbara. Boundary Superconductivity in BCS Theory. Institute of Science and Technology Austria, 2023, doi:10.15479/at:ista:14374.","ieee":"B. Roos, “Boundary superconductivity in BCS theory,” Institute of Science and Technology Austria, 2023.","short":"B. Roos, Boundary Superconductivity in BCS Theory, Institute of Science and Technology Austria, 2023.","ama":"Roos B. Boundary superconductivity in BCS theory. 2023. doi:10.15479/at:ista:14374","apa":"Roos, B. (2023). Boundary superconductivity in BCS theory. Institute of Science and Technology Austria. https://doi.org/10.15479/at:ista:14374","chicago":"Roos, Barbara. “Boundary Superconductivity in BCS Theory.” Institute of Science and Technology Austria, 2023. https://doi.org/10.15479/at:ista:14374.","ista":"Roos B. 2023. Boundary superconductivity in BCS theory. Institute of Science and Technology Austria."},"user_id":"8b945eb4-e2f2-11eb-945a-df72226e66a9","article_processing_charge":"No","author":[{"last_name":"Roos","full_name":"Roos, Barbara","orcid":"0000-0002-9071-5880","id":"5DA90512-D80F-11E9-8994-2E2EE6697425","first_name":"Barbara"}],"title":"Boundary superconductivity in BCS theory","oa":1,"publisher":"Institute of Science and Technology Austria","year":"2023","has_accepted_license":"1","day":"30","page":"206","date_created":"2023-09-28T14:23:04Z","doi":"10.15479/at:ista:14374","date_published":"2023-09-30T00:00:00Z"},{"user_id":"4359f0d1-fa6c-11eb-b949-802e58b17ae8","citation":{"apa":"Hainzl, C., Roos, B., & Seiringer, R. (2023). Boundary superconductivity in the BCS model. Journal of Spectral Theory. EMS Press. https://doi.org/10.4171/JST/439","ama":"Hainzl C, Roos B, Seiringer R. Boundary superconductivity in the BCS model. Journal of Spectral Theory. 2023;12(4):1507–1540. doi:10.4171/JST/439","ieee":"C. Hainzl, B. Roos, and R. Seiringer, “Boundary superconductivity in the BCS model,” Journal of Spectral Theory, vol. 12, no. 4. EMS Press, pp. 1507–1540, 2023.","short":"C. Hainzl, B. Roos, R. Seiringer, Journal of Spectral Theory 12 (2023) 1507–1540.","mla":"Hainzl, Christian, et al. “Boundary Superconductivity in the BCS Model.” Journal of Spectral Theory, vol. 12, no. 4, EMS Press, 2023, pp. 1507–1540, doi:10.4171/JST/439.","ista":"Hainzl C, Roos B, Seiringer R. 2023. Boundary superconductivity in the BCS model. Journal of Spectral Theory. 12(4), 1507–1540.","chicago":"Hainzl, Christian, Barbara Roos, and Robert Seiringer. “Boundary Superconductivity in the BCS Model.” Journal of Spectral Theory. EMS Press, 2023. https://doi.org/10.4171/JST/439."},"title":"Boundary superconductivity in the BCS model","author":[{"first_name":"Christian","last_name":"Hainzl","full_name":"Hainzl, Christian"},{"orcid":"0000-0002-9071-5880","full_name":"Roos, Barbara","last_name":"Roos","id":"5DA90512-D80F-11E9-8994-2E2EE6697425","first_name":"Barbara"},{"full_name":"Seiringer, Robert","orcid":"0000-0002-6781-0521","last_name":"Seiringer","id":"4AFD0470-F248-11E8-B48F-1D18A9856A87","first_name":"Robert"}],"article_processing_charge":"No","external_id":{"arxiv":["2201.08090"],"isi":["000997933500008"]},"project":[{"call_identifier":"H2020","_id":"25C6DC12-B435-11E9-9278-68D0E5697425","name":"Analysis of quantum many-body systems","grant_number":"694227"}],"day":"18","publication":"Journal of Spectral Theory","isi":1,"has_accepted_license":"1","year":"2023","doi":"10.4171/JST/439","date_published":"2023-05-18T00:00:00Z","date_created":"2023-07-10T16:35:45Z","page":"1507–1540","acknowledgement":"We thank Egor Babaev for encouraging us to study this problem, and Rupert Frank for many fruitful discussions. scussions. Funding. Funding from the European Union’s Horizon 2020 research and innovation programme under the ERC grant agreement No. 694227 (Barbara Roos and Robert Seiringer) is gratefully acknowledged.","publisher":"EMS Press","quality_controlled":"1","oa":1,"ddc":["530"],"date_updated":"2023-10-27T10:37:29Z","file_date_updated":"2023-07-11T08:19:15Z","department":[{"_id":"GradSch"},{"_id":"RoSe"}],"_id":"13207","status":"public","type":"journal_article","article_type":"original","tmp":{"legal_code_url":"https://creativecommons.org/licenses/by/4.0/legalcode","image":"/images/cc_by.png","name":"Creative Commons Attribution 4.0 International Public License (CC-BY 4.0)","short":"CC BY (4.0)"},"file":[{"relation":"main_file","access_level":"open_access","content_type":"application/pdf","success":1,"checksum":"5501da33be010b5c81440438287584d5","file_id":"13208","creator":"alisjak","file_size":304619,"date_updated":"2023-07-11T08:19:15Z","file_name":"2023_EMS_Hainzl.pdf","date_created":"2023-07-11T08:19:15Z"}],"language":[{"iso":"eng"}],"publication_identifier":{"issn":["1664-039X"],"eissn":["1664-0403"]},"publication_status":"published","related_material":{"record":[{"relation":"dissertation_contains","status":"public","id":"14374"}]},"issue":"4","volume":12,"ec_funded":1,"oa_version":"Published Version","abstract":[{"text":"We consider the linear BCS equation, determining the BCS critical temperature, in the presence of a boundary, where Dirichlet boundary conditions are imposed. In the one-dimensional case with point interactions, we prove that the critical temperature is strictly larger than the bulk value, at least at weak coupling. In particular, the Cooper-pair wave function localizes near the boundary, an effect that cannot be modeled by effective Neumann boundary conditions on the order parameter as often imposed in Ginzburg–Landau theory. We also show that the relative shift in critical temperature vanishes if the coupling constant either goes to zero or to infinity.","lang":"eng"}],"month":"05","intvolume":" 12"},{"project":[{"call_identifier":"FP7","_id":"25B07788-B435-11E9-9278-68D0E5697425","grant_number":"250152","name":"Limits to selection in biology and in evolutionary computation"},{"_id":"bd6958e0-d553-11ed-ba76-86eba6a76c00","grant_number":"101055327","name":"Understanding the evolution of continuous genomes"}],"article_number":"iyad133","author":[{"last_name":"Barton","orcid":"0000-0002-8548-5240","full_name":"Barton, Nicholas H","first_name":"Nicholas H","id":"4880FE40-F248-11E8-B48F-1D18A9856A87"},{"first_name":"Alison M.","last_name":"Etheridge","full_name":"Etheridge, Alison M."},{"first_name":"Amandine","full_name":"Véber, Amandine","last_name":"Véber"}],"external_id":{"arxiv":["2211.03515"]},"article_processing_charge":"Yes (in subscription journal)","title":"The infinitesimal model with dominance","citation":{"chicago":"Barton, Nicholas H, Alison M. Etheridge, and Amandine Véber. “The Infinitesimal Model with Dominance.” Genetics. Oxford Academic, 2023. https://doi.org/10.1093/genetics/iyad133.","ista":"Barton NH, Etheridge AM, Véber A. 2023. The infinitesimal model with dominance. Genetics. 225(2), iyad133.","mla":"Barton, Nicholas H., et al. “The Infinitesimal Model with Dominance.” Genetics, vol. 225, no. 2, iyad133, Oxford Academic, 2023, doi:10.1093/genetics/iyad133.","ieee":"N. H. Barton, A. M. Etheridge, and A. Véber, “The infinitesimal model with dominance,” Genetics, vol. 225, no. 2. Oxford Academic, 2023.","short":"N.H. Barton, A.M. Etheridge, A. Véber, Genetics 225 (2023).","apa":"Barton, N. H., Etheridge, A. M., & Véber, A. (2023). The infinitesimal model with dominance. Genetics. Oxford Academic. https://doi.org/10.1093/genetics/iyad133","ama":"Barton NH, Etheridge AM, Véber A. The infinitesimal model with dominance. Genetics. 2023;225(2). doi:10.1093/genetics/iyad133"},"user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","publisher":"Oxford Academic","quality_controlled":"1","oa":1,"acknowledgement":"NHB was supported in part by ERC Grants 250152 and 101055327. AV was partly supported by the chaire Modélisation Mathématique et Biodiversité of Veolia Environment—Ecole Polytechnique—Museum National d’Histoire Naturelle—Fondation X.","doi":"10.1093/genetics/iyad133","date_published":"2023-10-01T00:00:00Z","date_created":"2023-10-29T23:01:15Z","has_accepted_license":"1","year":"2023","day":"01","publication":"Genetics","type":"journal_article","article_type":"original","tmp":{"legal_code_url":"https://creativecommons.org/licenses/by/4.0/legalcode","image":"/images/cc_by.png","name":"Creative Commons Attribution 4.0 International Public License (CC-BY 4.0)","short":"CC BY (4.0)"},"status":"public","_id":"14452","department":[{"_id":"NiBa"}],"file_date_updated":"2023-10-30T12:57:53Z","date_updated":"2023-10-30T13:04:11Z","ddc":["570"],"scopus_import":"1","month":"10","intvolume":" 225","abstract":[{"lang":"eng","text":"The classical infinitesimal model is a simple and robust model for the inheritance of quantitative traits. In this model, a quantitative trait is expressed as the sum of a genetic and an environmental component, and the genetic component of offspring traits within a family follows a normal distribution around the average of the parents’ trait values, and has a variance that is independent of the parental traits. In previous work, we showed that when trait values are determined by the sum of a large number of additive Mendelian factors, each of small effect, one can justify the infinitesimal model as a limit of Mendelian inheritance. In this paper, we show that this result extends to include dominance. We define the model in terms of classical quantities of quantitative genetics, before justifying it as a limit of Mendelian inheritance as the number, M, of underlying loci tends to infinity. As in the additive case, the multivariate normal distribution of trait values across the pedigree can be expressed in terms of variance components in an ancestral population and probabilities of identity by descent determined by the pedigree. Now, with just first-order dominance effects, we require two-, three-, and four-way identities. We also show that, even if we condition on parental trait values, the “shared” and “residual” components of trait values within each family will be asymptotically normally distributed as the number of loci tends to infinity, with an error of order 1/M−−√. We illustrate our results with some numerical examples."}],"oa_version":"Published Version","related_material":{"record":[{"id":"12949","status":"public","relation":"research_data"}]},"issue":"2","volume":225,"ec_funded":1,"publication_identifier":{"issn":["0016-6731"],"eissn":["1943-2631"]},"publication_status":"published","file":[{"success":1,"checksum":"3f65b1fbe813e2f4dbb5d2b5e891844a","file_id":"14469","content_type":"application/pdf","relation":"main_file","access_level":"open_access","file_name":"2023_Genetics_Barton.pdf","date_created":"2023-10-30T12:57:53Z","file_size":1439032,"date_updated":"2023-10-30T12:57:53Z","creator":"dernst"}],"language":[{"iso":"eng"}]},{"month":"05","publisher":"Institute of Science and Technology Austria","oa":1,"oa_version":"Published Version","abstract":[{"text":"The classical infinitesimal model is a simple and robust model for the inheritance of quantitative traits. In this model, a quantitative trait is expressed as the sum of a genetic and a non-genetic (environmental) component and the genetic component of offspring traits within a family follows a normal distribution around the average of the parents’ trait values, and has a variance that is independent of the trait values of the parents. Although the trait distribution across the whole population can be far from normal, the trait distributions within families are normally distributed with a variance-covariance matrix that is determined entirely by that in the ancestral population and the probabilities of identity determined by the pedigree. Moreover, conditioning on some of the trait values within the pedigree has predictable effects on the mean and variance within and between families. In previous work, Barton et al. (2017), we showed that when trait values are determined by the sum of a large number of Mendelian factors, each of small effect, one can justify the infinitesimal model as limit of Mendelian inheritance. It was also shown that under some forms of epistasis, trait values within a family are still normally distributed.","lang":"eng"}],"date_published":"2023-05-13T00:00:00Z","doi":"10.15479/AT:ISTA:12949","related_material":{"record":[{"relation":"used_in_publication","status":"public","id":"14452"}]},"contributor":[{"contributor_type":"researcher","first_name":"Amandine","last_name":"Veber"},{"last_name":"Etheridge","contributor_type":"researcher","first_name":"Alison"}],"date_created":"2023-05-13T09:49:09Z","day":"13","file":[{"date_created":"2023-05-13T09:36:33Z","file_name":"Neutral identities 16th Jan","date_updated":"2023-05-13T09:36:33Z","file_size":13662,"creator":"nbarton","file_id":"12950","checksum":"b0ce7d4b1ee7e7265430ceed36fc3336","success":1,"content_type":"application/octet-stream","access_level":"open_access","relation":"main_file"},{"relation":"main_file","access_level":"open_access","content_type":"application/octet-stream","success":1,"checksum":"ad5035ad4f7d3b150a252c79884f6a83","file_id":"12951","creator":"nbarton","file_size":181619928,"date_updated":"2023-05-13T09:38:17Z","file_name":"p, zA, zD, N=30 neutral III","date_created":"2023-05-13T09:38:17Z"},{"file_name":"p, zA, zD, N=30 neutral IV","date_created":"2023-05-13T09:41:59Z","file_size":605902074,"date_updated":"2023-05-13T09:41:59Z","creator":"nbarton","success":1,"checksum":"62182a1de796256edd6f4223704312ef","file_id":"12952","content_type":"application/octet-stream","relation":"main_file","access_level":"open_access"},{"file_name":"p, zA, zD, N=30 selected k=5","date_created":"2023-05-13T09:46:52Z","file_size":1018238746,"date_updated":"2023-05-13T09:46:52Z","creator":"nbarton","success":1,"file_id":"12953","checksum":"af775dda5c4f6859cb1e5a81ec40a667","content_type":"application/octet-stream","relation":"main_file","access_level":"open_access"},{"file_size":3197160,"date_updated":"2023-05-13T09:42:05Z","creator":"nbarton","file_name":"Pairwise F N=30 neutral II","date_created":"2023-05-13T09:42:05Z","content_type":"application/octet-stream","relation":"main_file","access_level":"open_access","success":1,"file_id":"12954","checksum":"af26f3394c387d3ada14b434cd68b1e5"},{"success":1,"checksum":"d5da7dc0e7282dd48222e26d12e34220","file_id":"12955","content_type":"application/octet-stream","relation":"main_file","access_level":"open_access","file_name":"Pedigrees N=30 neutral II","date_created":"2023-05-13T09:42:06Z","file_size":55492,"date_updated":"2023-05-13T09:42:06Z","creator":"nbarton"},{"success":1,"checksum":"00f386d80677590e29f6235d49cba58d","file_id":"12956","content_type":"application/octet-stream","relation":"main_file","access_level":"open_access","file_name":"selected reps N=30 selected k=1,2 300 reps III","date_created":"2023-05-13T09:46:06Z","file_size":474003467,"date_updated":"2023-05-13T09:46:06Z","creator":"nbarton"},{"creator":"nbarton","file_size":121209,"date_updated":"2023-05-13T09:46:08Z","file_name":"Algorithm for caclulating identities.nb","date_created":"2023-05-13T09:46:08Z","relation":"main_file","access_level":"open_access","content_type":"application/octet-stream","success":1,"checksum":"658cef3eaea6136a4d24da4f074191d7","file_id":"12957"},{"success":1,"file_id":"12958","checksum":"db9b6dddd7a596d974e25f5e78f5c45c","relation":"main_file","access_level":"open_access","content_type":"application/octet-stream","file_name":"Infinitesimal with dominance.nb","date_created":"2023-05-13T09:46:08Z","creator":"nbarton","file_size":1803898,"date_updated":"2023-05-13T09:46:08Z"},{"access_level":"open_access","relation":"main_file","content_type":"text/plain","checksum":"91f80a9fb58cae8eef2d8bf59fe30189","file_id":"12967","success":1,"creator":"nbarton","date_updated":"2023-05-16T04:09:08Z","file_size":990,"date_created":"2023-05-16T04:09:08Z","file_name":"ReadMe.txt"}],"has_accepted_license":"1","year":"2023","project":[{"grant_number":"101055327","name":"Understanding the evolution of continuous genomes","_id":"bd6958e0-d553-11ed-ba76-86eba6a76c00"}],"status":"public","keyword":["Quantitative genetics","infinitesimal model"],"type":"research_data","tmp":{"legal_code_url":"https://creativecommons.org/licenses/by/4.0/legalcode","image":"/images/cc_by.png","name":"Creative Commons Attribution 4.0 International Public License (CC-BY 4.0)","short":"CC BY (4.0)"},"_id":"12949","department":[{"_id":"NiBa"}],"title":"The infinitesimal model with dominance","file_date_updated":"2023-05-16T04:09:08Z","author":[{"orcid":"0000-0002-8548-5240","full_name":"Barton, Nicholas H","last_name":"Barton","first_name":"Nicholas H","id":"4880FE40-F248-11E8-B48F-1D18A9856A87"}],"article_processing_charge":"No","user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","ddc":["576"],"citation":{"chicago":"Barton, Nicholas H. “The Infinitesimal Model with Dominance.” Institute of Science and Technology Austria, 2023. https://doi.org/10.15479/AT:ISTA:12949.","ista":"Barton NH. 2023. The infinitesimal model with dominance, Institute of Science and Technology Austria, 10.15479/AT:ISTA:12949.","mla":"Barton, Nicholas H. The Infinitesimal Model with Dominance. Institute of Science and Technology Austria, 2023, doi:10.15479/AT:ISTA:12949.","short":"N.H. Barton, (2023).","ieee":"N. H. Barton, “The infinitesimal model with dominance.” Institute of Science and Technology Austria, 2023.","ama":"Barton NH. The infinitesimal model with dominance. 2023. doi:10.15479/AT:ISTA:12949","apa":"Barton, N. H. (2023). The infinitesimal model with dominance. Institute of Science and Technology Austria. https://doi.org/10.15479/AT:ISTA:12949"},"date_updated":"2023-10-30T13:04:11Z"},{"external_id":{"arxiv":["2302.02390"]},"article_processing_charge":"No","author":[{"full_name":"Markov, Ilia","last_name":"Markov","id":"D0CF4148-C985-11E9-8066-0BDEE5697425","first_name":"Ilia"},{"first_name":"Adrian","full_name":"Vladu, Adrian","last_name":"Vladu"},{"first_name":"Qi","full_name":"Guo, Qi","last_name":"Guo"},{"last_name":"Alistarh","full_name":"Alistarh, Dan-Adrian","orcid":"0000-0003-3650-940X","id":"4A899BFC-F248-11E8-B48F-1D18A9856A87","first_name":"Dan-Adrian"}],"title":"Quantized distributed training of large models with convergence guarantees","citation":{"ista":"Markov I, Vladu A, Guo Q, Alistarh D-A. 2023. Quantized distributed training of large models with convergence guarantees. Proceedings of the 40th International Conference on Machine Learning. ICML: International Conference on Machine Learning, PMLR, vol. 202, 24020–24044.","chicago":"Markov, Ilia, Adrian Vladu, Qi Guo, and Dan-Adrian Alistarh. “Quantized Distributed Training of Large Models with Convergence Guarantees.” In Proceedings of the 40th International Conference on Machine Learning, 202:24020–44. ML Research Press, 2023.","apa":"Markov, I., Vladu, A., Guo, Q., & Alistarh, D.-A. (2023). Quantized distributed training of large models with convergence guarantees. In Proceedings of the 40th International Conference on Machine Learning (Vol. 202, pp. 24020–24044). Honolulu, Hawaii, HI, United States: ML Research Press.","ama":"Markov I, Vladu A, Guo Q, Alistarh D-A. Quantized distributed training of large models with convergence guarantees. In: Proceedings of the 40th International Conference on Machine Learning. Vol 202. ML Research Press; 2023:24020-24044.","ieee":"I. Markov, A. Vladu, Q. Guo, and D.-A. Alistarh, “Quantized distributed training of large models with convergence guarantees,” in Proceedings of the 40th International Conference on Machine Learning, Honolulu, Hawaii, HI, United States, 2023, vol. 202, pp. 24020–24044.","short":"I. Markov, A. Vladu, Q. Guo, D.-A. Alistarh, in:, Proceedings of the 40th International Conference on Machine Learning, ML Research Press, 2023, pp. 24020–24044.","mla":"Markov, Ilia, et al. “Quantized Distributed Training of Large Models with Convergence Guarantees.” Proceedings of the 40th International Conference on Machine Learning, vol. 202, ML Research Press, 2023, pp. 24020–44."},"user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","project":[{"call_identifier":"H2020","_id":"268A44D6-B435-11E9-9278-68D0E5697425","grant_number":"805223","name":"Elastic Coordination for Scalable Machine Learning"}],"page":"24020-24044","date_created":"2023-10-29T23:01:17Z","date_published":"2023-07-30T00:00:00Z","year":"2023","publication":"Proceedings of the 40th International Conference on Machine Learning","day":"30","oa":1,"publisher":"ML Research Press","quality_controlled":"1","acknowledgement":"The authors gratefully acknowledge funding from the European Research Council (ERC) under the European Union’s Horizon 2020 research and innovation programme (grant agreement No 805223 ScaleML), as well as experimental support from the IST Austria IT department, in particular Stefano Elefante, Andrei Hornoiu, and Alois Schloegl. AV acknowledges the support of the French Agence Nationale de la Recherche (ANR), under grant ANR-21-CE48-0016 (project COMCOPT), the support of Fondation Hadamard with a PRMO grant, and the support of CNRS with a CoopIntEER IEA grant (project ALFRED).","department":[{"_id":"DaAl"}],"date_updated":"2023-10-31T09:40:45Z","conference":{"name":"ICML: International Conference on Machine Learning","start_date":"2023-07-23","end_date":"2023-07-29","location":"Honolulu, Hawaii, HI, United States"},"type":"conference","status":"public","_id":"14461","ec_funded":1,"volume":202,"publication_status":"published","publication_identifier":{"eissn":["2640-3498"]},"language":[{"iso":"eng"}],"main_file_link":[{"open_access":"1","url":"https://doi.org/10.48550/arXiv.2302.02390"}],"alternative_title":["PMLR"],"scopus_import":"1","intvolume":" 202","month":"07","abstract":[{"text":"Communication-reduction techniques are a popular way to improve scalability in data-parallel training of deep neural networks (DNNs). The recent emergence of large language models such as GPT has created the need for new approaches to exploit data-parallelism. Among these, fully-sharded data parallel (FSDP) training is highly popular, yet it still encounters scalability bottlenecks. One reason is that applying compression techniques to FSDP is challenging: as the vast majority of the communication involves the model’s weights, direct compression alters convergence and leads to accuracy loss. We present QSDP, a variant of FSDP which supports both gradient and weight quantization with theoretical guarantees, is simple to implement and has essentially no overheads. To derive QSDP we prove that a natural modification of SGD achieves convergence even when we only maintain quantized weights, and thus the domain over which we train consists of quantized points and is, therefore, highly non-convex. We validate this approach by training GPT-family models with up to 1.3 billion parameters on a multi-node cluster. Experiments show that QSDP preserves model accuracy, while completely removing the communication bottlenecks of FSDP, providing end-to-end speedups of up to 2.2x.","lang":"eng"}],"acknowledged_ssus":[{"_id":"ScienComp"}],"oa_version":"Preprint"},{"ec_funded":1,"volume":202,"publication_status":"published","publication_identifier":{"eissn":["2640-3498"]},"language":[{"iso":"eng"}],"main_file_link":[{"open_access":"1","url":"https://proceedings.mlr.press/v202/fichtenberger23a/fichtenberger23a.pdf"}],"alternative_title":["PMLR"],"scopus_import":"1","intvolume":" 202","month":"07","abstract":[{"text":"We study fine-grained error bounds for differentially private algorithms for counting under continual observation. Our main insight is that the matrix mechanism when using lower-triangular matrices can be used in the continual observation model. More specifically, we give an explicit factorization for the counting matrix Mcount and upper bound the error explicitly. We also give a fine-grained analysis, specifying the exact constant in the upper bound. Our analysis is based on upper and lower bounds of the completely bounded norm (cb-norm) of Mcount\r\n. Along the way, we improve the best-known bound of 28 years by Mathias (SIAM Journal on Matrix Analysis and Applications, 1993) on the cb-norm of Mcount for a large range of the dimension of Mcount. Furthermore, we are the first to give concrete error bounds for various problems under continual observation such as binary counting, maintaining a histogram, releasing an approximately cut-preserving synthetic graph, many graph-based statistics, and substring and episode counting. Finally, we note that our result can be used to get a fine-grained error bound for non-interactive local learning and the first lower bounds on the additive error for (ϵ,δ)-differentially-private counting under continual observation. Subsequent to this work, Henzinger et al. (SODA, 2023) showed that our factorization also achieves fine-grained mean-squared error.","lang":"eng"}],"oa_version":"Published Version","department":[{"_id":"MoHe"}],"date_updated":"2023-10-31T09:54:05Z","conference":{"name":"ICML: International Conference on Machine Learning","start_date":"2023-07-23","location":"Honolulu, Hawaii, HI, United States","end_date":"2023-07-29"},"type":"conference","status":"public","_id":"14462","page":"10072-10092","date_created":"2023-10-29T23:01:17Z","date_published":"2023-07-30T00:00:00Z","year":"2023","publication":"Proceedings of the 40th International Conference on Machine Learning","day":"30","oa":1,"quality_controlled":"1","publisher":"ML Research Press","acknowledgement":"This project has received funding from the European Research Council (ERC) under the European Union’s Horizon 2020 research and innovation programme (Grant agreement No.\r\n101019564 “The Design of Modern Fully Dynamic Data Structures (MoDynStruct)” and from the Austrian Science Fund (FWF) project Z 422-N, and project “Fast Algorithms for a Reactive Network Layer (ReactNet)”, P 33775-N, with additional funding from the netidee SCIENCE Stiftung, 2020–2024. 2020–2024. JU’s research was funded by Decanal Research Grant. A part of this work was done when JU was visiting Indian Statistical Institute, Delhi. The authors would like to thank Rajat Bhatia, Aleksandar Nikolov, Shanta Laisharam, Vern Paulsen, Ryan Rogers, Abhradeep Thakurta, and Sarvagya Upadhyay for useful discussions.","article_processing_charge":"No","author":[{"first_name":"Hendrik","last_name":"Fichtenberger","full_name":"Fichtenberger, Hendrik"},{"last_name":"Henzinger","full_name":"Henzinger, Monika H","orcid":"0000-0002-5008-6530","id":"540c9bbd-f2de-11ec-812d-d04a5be85630","first_name":"Monika H"},{"last_name":"Upadhyay","full_name":"Upadhyay, Jalaj","first_name":"Jalaj"}],"title":"Constant matters: Fine-grained error bound on differentially private continual observation","citation":{"mla":"Fichtenberger, Hendrik, et al. “Constant Matters: Fine-Grained Error Bound on Differentially Private Continual Observation.” Proceedings of the 40th International Conference on Machine Learning, vol. 202, ML Research Press, 2023, pp. 10072–92.","ieee":"H. Fichtenberger, M. H. Henzinger, and J. Upadhyay, “Constant matters: Fine-grained error bound on differentially private continual observation,” in Proceedings of the 40th International Conference on Machine Learning, Honolulu, Hawaii, HI, United States, 2023, vol. 202, pp. 10072–10092.","short":"H. Fichtenberger, M.H. Henzinger, J. Upadhyay, in:, Proceedings of the 40th International Conference on Machine Learning, ML Research Press, 2023, pp. 10072–10092.","ama":"Fichtenberger H, Henzinger MH, Upadhyay J. Constant matters: Fine-grained error bound on differentially private continual observation. In: Proceedings of the 40th International Conference on Machine Learning. Vol 202. ML Research Press; 2023:10072-10092.","apa":"Fichtenberger, H., Henzinger, M. H., & Upadhyay, J. (2023). Constant matters: Fine-grained error bound on differentially private continual observation. In Proceedings of the 40th International Conference on Machine Learning (Vol. 202, pp. 10072–10092). Honolulu, Hawaii, HI, United States: ML Research Press.","chicago":"Fichtenberger, Hendrik, Monika H Henzinger, and Jalaj Upadhyay. “Constant Matters: Fine-Grained Error Bound on Differentially Private Continual Observation.” In Proceedings of the 40th International Conference on Machine Learning, 202:10072–92. ML Research Press, 2023.","ista":"Fichtenberger H, Henzinger MH, Upadhyay J. 2023. Constant matters: Fine-grained error bound on differentially private continual observation. Proceedings of the 40th International Conference on Machine Learning. ICML: International Conference on Machine Learning, PMLR, vol. 202, 10072–10092."},"user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","project":[{"name":"The design and evaluation of modern fully dynamic data structures","grant_number":"101019564","call_identifier":"H2020","_id":"bd9ca328-d553-11ed-ba76-dc4f890cfe62"},{"_id":"34def286-11ca-11ed-8bc3-da5948e1613c","grant_number":"Z00422","name":"Wittgenstein Award - Monika Henzinger"},{"_id":"bd9e3a2e-d553-11ed-ba76-8aa684ce17fe","grant_number":"P33775 ","name":"Fast Algorithms for a Reactive Network Layer"}]},{"acknowledgement":"Aleksandr Shevchenko, Kevin Kogler and Marco Mondelli are supported by the 2019 Lopez-Loreta Prize. Hamed Hassani acknowledges the support by the NSF CIF award (1910056) and the NSF Institute for CORE Emerging Methods in Data Science (EnCORE).","oa":1,"quality_controlled":"1","publisher":"ML Research Press","publication":"Proceedings of the 40th International Conference on Machine Learning","day":"30","year":"2023","date_created":"2023-10-29T23:01:17Z","date_published":"2023-07-30T00:00:00Z","page":"31151-31209","project":[{"name":"Prix Lopez-Loretta 2019 - Marco Mondelli","_id":"059876FA-7A3F-11EA-A408-12923DDC885E"}],"user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","citation":{"apa":"Shevchenko, A., Kögler, K., Hassani, H., & Mondelli, M. (2023). Fundamental limits of two-layer autoencoders, and achieving them with gradient methods. In Proceedings of the 40th International Conference on Machine Learning (Vol. 202, pp. 31151–31209). Honolulu, Hawaii, HI, United States: ML Research Press.","ama":"Shevchenko A, Kögler K, Hassani H, Mondelli M. Fundamental limits of two-layer autoencoders, and achieving them with gradient methods. In: Proceedings of the 40th International Conference on Machine Learning. Vol 202. ML Research Press; 2023:31151-31209.","short":"A. Shevchenko, K. Kögler, H. Hassani, M. Mondelli, in:, Proceedings of the 40th International Conference on Machine Learning, ML Research Press, 2023, pp. 31151–31209.","ieee":"A. Shevchenko, K. Kögler, H. Hassani, and M. Mondelli, “Fundamental limits of two-layer autoencoders, and achieving them with gradient methods,” in Proceedings of the 40th International Conference on Machine Learning, Honolulu, Hawaii, HI, United States, 2023, vol. 202, pp. 31151–31209.","mla":"Shevchenko, Aleksandr, et al. “Fundamental Limits of Two-Layer Autoencoders, and Achieving Them with Gradient Methods.” Proceedings of the 40th International Conference on Machine Learning, vol. 202, ML Research Press, 2023, pp. 31151–209.","ista":"Shevchenko A, Kögler K, Hassani H, Mondelli M. 2023. Fundamental limits of two-layer autoencoders, and achieving them with gradient methods. Proceedings of the 40th International Conference on Machine Learning. ICML: International Conference on Machine Learning, PMLR, vol. 202, 31151–31209.","chicago":"Shevchenko, Aleksandr, Kevin Kögler, Hamed Hassani, and Marco Mondelli. “Fundamental Limits of Two-Layer Autoencoders, and Achieving Them with Gradient Methods.” In Proceedings of the 40th International Conference on Machine Learning, 202:31151–209. ML Research Press, 2023."},"title":"Fundamental limits of two-layer autoencoders, and achieving them with gradient methods","article_processing_charge":"No","external_id":{"arxiv":["2212.13468"]},"author":[{"last_name":"Shevchenko","full_name":"Shevchenko, Aleksandr","first_name":"Aleksandr","id":"F2B06EC2-C99E-11E9-89F0-752EE6697425"},{"full_name":"Kögler, Kevin","last_name":"Kögler","first_name":"Kevin","id":"94ec913c-dc85-11ea-9058-e5051ab2428b"},{"first_name":"Hamed","last_name":"Hassani","full_name":"Hassani, Hamed"},{"first_name":"Marco","id":"27EB676C-8706-11E9-9510-7717E6697425","last_name":"Mondelli","full_name":"Mondelli, Marco","orcid":"0000-0002-3242-7020"}],"oa_version":"Preprint","abstract":[{"lang":"eng","text":"Autoencoders are a popular model in many branches of machine learning and lossy data compression. However, their fundamental limits, the performance of gradient methods and the features learnt during optimization remain poorly understood, even in the two-layer setting. In fact, earlier work has considered either linear autoencoders or specific training regimes (leading to vanishing or diverging compression rates). Our paper addresses this gap by focusing on non-linear two-layer autoencoders trained in the challenging proportional regime in which the input dimension scales linearly with the size of the representation. Our results characterize the minimizers of the population risk, and show that such minimizers are achieved by gradient methods; their structure is also unveiled, thus leading to a concise description of the features obtained via training. For the special case of a sign activation function, our analysis establishes the fundamental limits for the lossy compression of Gaussian sources via (shallow) autoencoders. Finally, while the results are proved for Gaussian data, numerical simulations on standard datasets display the universality of the theoretical predictions."}],"intvolume":" 202","month":"07","main_file_link":[{"url":"https://doi.org/10.48550/arXiv.2212.13468","open_access":"1"}],"scopus_import":"1","alternative_title":["PMLR"],"language":[{"iso":"eng"}],"publication_status":"published","publication_identifier":{"eissn":["2640-3498"]},"volume":202,"_id":"14459","status":"public","conference":{"name":"ICML: International Conference on Machine Learning","end_date":"2023-07-29","location":"Honolulu, Hawaii, HI, United States","start_date":"2023-07-23"},"type":"conference","date_updated":"2023-10-31T08:52:28Z","department":[{"_id":"MaMo"},{"_id":"DaAl"}]},{"oa":1,"publisher":"ML Research Press","quality_controlled":"1","acknowledgement":"We would like to thank Elias Frantar for his valuable assistance and support at the outset of this project, and the anonymous ICML and SNN reviewers for very constructive feedback. EI was supported in part by the FWF DK VGSCO, grant agreement number W1260-N35. DA acknowledges generous ERC support, via Starting Grant 805223 ScaleML. ","page":"26215-26227","date_created":"2023-10-29T23:01:17Z","date_published":"2023-07-30T00:00:00Z","year":"2023","publication":"Proceedings of the 40th International Conference on Machine Learning","day":"30","project":[{"grant_number":"805223","name":"Elastic Coordination for Scalable Machine Learning","call_identifier":"H2020","_id":"268A44D6-B435-11E9-9278-68D0E5697425"}],"article_processing_charge":"No","external_id":{"arxiv":["2302.04852"]},"author":[{"last_name":"Nikdan","full_name":"Nikdan, Mahdi","id":"66374281-f394-11eb-9cf6-869147deecc0","first_name":"Mahdi"},{"last_name":"Pegolotti","full_name":"Pegolotti, Tommaso","first_name":"Tommaso"},{"last_name":"Iofinova","orcid":"0000-0002-7778-3221","full_name":"Iofinova, Eugenia B","first_name":"Eugenia B","id":"f9a17499-f6e0-11ea-865d-fdf9a3f77117"},{"last_name":"Kurtic","full_name":"Kurtic, Eldar","id":"47beb3a5-07b5-11eb-9b87-b108ec578218","first_name":"Eldar"},{"id":"4A899BFC-F248-11E8-B48F-1D18A9856A87","first_name":"Dan-Adrian","orcid":"0000-0003-3650-940X","full_name":"Alistarh, Dan-Adrian","last_name":"Alistarh"}],"title":"SparseProp: Efficient sparse backpropagation for faster training of neural networks at the edge","citation":{"chicago":"Nikdan, Mahdi, Tommaso Pegolotti, Eugenia B Iofinova, Eldar Kurtic, and Dan-Adrian Alistarh. “SparseProp: Efficient Sparse Backpropagation for Faster Training of Neural Networks at the Edge.” In Proceedings of the 40th International Conference on Machine Learning, 202:26215–27. ML Research Press, 2023.","ista":"Nikdan M, Pegolotti T, Iofinova EB, Kurtic E, Alistarh D-A. 2023. SparseProp: Efficient sparse backpropagation for faster training of neural networks at the edge. Proceedings of the 40th International Conference on Machine Learning. ICML: International Conference on Machine Learning, PMLR, vol. 202, 26215–26227.","mla":"Nikdan, Mahdi, et al. “SparseProp: Efficient Sparse Backpropagation for Faster Training of Neural Networks at the Edge.” Proceedings of the 40th International Conference on Machine Learning, vol. 202, ML Research Press, 2023, pp. 26215–27.","ama":"Nikdan M, Pegolotti T, Iofinova EB, Kurtic E, Alistarh D-A. SparseProp: Efficient sparse backpropagation for faster training of neural networks at the edge. In: Proceedings of the 40th International Conference on Machine Learning. Vol 202. ML Research Press; 2023:26215-26227.","apa":"Nikdan, M., Pegolotti, T., Iofinova, E. B., Kurtic, E., & Alistarh, D.-A. (2023). SparseProp: Efficient sparse backpropagation for faster training of neural networks at the edge. In Proceedings of the 40th International Conference on Machine Learning (Vol. 202, pp. 26215–26227). Honolulu, Hawaii, HI, United States: ML Research Press.","ieee":"M. Nikdan, T. Pegolotti, E. B. Iofinova, E. Kurtic, and D.-A. Alistarh, “SparseProp: Efficient sparse backpropagation for faster training of neural networks at the edge,” in Proceedings of the 40th International Conference on Machine Learning, Honolulu, Hawaii, HI, United States, 2023, vol. 202, pp. 26215–26227.","short":"M. Nikdan, T. Pegolotti, E.B. Iofinova, E. Kurtic, D.-A. Alistarh, in:, Proceedings of the 40th International Conference on Machine Learning, ML Research Press, 2023, pp. 26215–26227."},"user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","main_file_link":[{"url":"https://doi.org/10.48550/arXiv.2302.04852","open_access":"1"}],"alternative_title":["PMLR"],"scopus_import":"1","intvolume":" 202","month":"07","abstract":[{"text":"We provide an efficient implementation of the backpropagation algorithm, specialized to the case where the weights of the neural network being trained are sparse. Our algorithm is general, as it applies to arbitrary (unstructured) sparsity and common layer types (e.g., convolutional or linear). We provide a fast vectorized implementation on commodity CPUs, and show that it can yield speedups in end-to-end runtime experiments, both in transfer learning using already-sparsified networks, and in training sparse networks from scratch. Thus, our results provide the first support for sparse training on commodity hardware.","lang":"eng"}],"oa_version":"Preprint","ec_funded":1,"volume":202,"publication_status":"published","publication_identifier":{"eissn":["2640-3498"]},"language":[{"iso":"eng"}],"conference":{"start_date":"2023-07-23","end_date":"2023-07-29","location":"Honolulu, Hawaii, HI, United States","name":"ICML: International Conference on Machine Learning"},"type":"conference","status":"public","_id":"14460","department":[{"_id":"DaAl"}],"date_updated":"2023-10-31T09:33:51Z"},{"oa":1,"quality_controlled":"1","publisher":"Springer Nature","year":"2023","publication":"8th International Conference on Cryptology and Information Security in Latin America","day":"01","page":"215-228","date_created":"2023-10-29T23:01:16Z","doi":"10.1007/978-3-031-44469-2_11","date_published":"2023-10-01T00:00:00Z","citation":{"ista":"Hoffmann C, Simkin M. 2023. Stronger lower bounds for leakage-resilient secret sharing. 8th International Conference on Cryptology and Information Security in Latin America. LATINCRYPT: Conference on Cryptology and Information Security in Latin America, LNCS, vol. 14168, 215–228.","chicago":"Hoffmann, Charlotte, and Mark Simkin. “Stronger Lower Bounds for Leakage-Resilient Secret Sharing.” In 8th International Conference on Cryptology and Information Security in Latin America, 14168:215–28. Springer Nature, 2023. https://doi.org/10.1007/978-3-031-44469-2_11.","ieee":"C. Hoffmann and M. Simkin, “Stronger lower bounds for leakage-resilient secret sharing,” in 8th International Conference on Cryptology and Information Security in Latin America, Quito, Ecuador, 2023, vol. 14168, pp. 215–228.","short":"C. Hoffmann, M. Simkin, in:, 8th International Conference on Cryptology and Information Security in Latin America, Springer Nature, 2023, pp. 215–228.","apa":"Hoffmann, C., & Simkin, M. (2023). Stronger lower bounds for leakage-resilient secret sharing. In 8th International Conference on Cryptology and Information Security in Latin America (Vol. 14168, pp. 215–228). Quito, Ecuador: Springer Nature. https://doi.org/10.1007/978-3-031-44469-2_11","ama":"Hoffmann C, Simkin M. Stronger lower bounds for leakage-resilient secret sharing. In: 8th International Conference on Cryptology and Information Security in Latin America. Vol 14168. Springer Nature; 2023:215-228. doi:10.1007/978-3-031-44469-2_11","mla":"Hoffmann, Charlotte, and Mark Simkin. “Stronger Lower Bounds for Leakage-Resilient Secret Sharing.” 8th International Conference on Cryptology and Information Security in Latin America, vol. 14168, Springer Nature, 2023, pp. 215–28, doi:10.1007/978-3-031-44469-2_11."},"user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","article_processing_charge":"No","author":[{"first_name":"Charlotte","id":"0f78d746-dc7d-11ea-9b2f-83f92091afe7","full_name":"Hoffmann, Charlotte","orcid":"0000-0003-2027-5549","last_name":"Hoffmann"},{"first_name":"Mark","last_name":"Simkin","full_name":"Simkin, Mark"}],"title":"Stronger lower bounds for leakage-resilient secret sharing","abstract":[{"text":"Threshold secret sharing allows a dealer to split a secret s into n shares, such that any t shares allow for reconstructing s, but no t-1 shares reveal any information about s. Leakage-resilient secret sharing requires that the secret remains hidden, even when an adversary additionally obtains a limited amount of leakage from every share. Benhamouda et al. (CRYPTO’18) proved that Shamir’s secret sharing scheme is one bit leakage-resilient for reconstruction threshold t≥0.85n and conjectured that the same holds for t = c.n for any constant 0≤c≤1. Nielsen and Simkin (EUROCRYPT’20) showed that this is the best one can hope for by proving that Shamir’s scheme is not secure against one-bit leakage when t0c.n/log(n).\r\nIn this work, we strengthen the lower bound of Nielsen and Simkin. We consider noisy leakage-resilience, where a random subset of leakages is replaced by uniformly random noise. We prove a lower bound for Shamir’s secret sharing, similar to that of Nielsen and Simkin, which holds even when a constant fraction of leakages is replaced by random noise. To this end, we first prove a lower bound on the share size of any noisy-leakage-resilient sharing scheme. We then use this lower bound to show that there exist universal constants c1, c2, such that for sufficiently large n it holds that Shamir’s secret sharing scheme is not noisy-leakage-resilient for t≤c1.n/log(n), even when a c2 fraction of leakages are replaced by random noise.\r\n\r\n\r\n\r\n","lang":"eng"}],"oa_version":"Preprint","main_file_link":[{"open_access":"1","url":"https://eprint.iacr.org/2023/1017"}],"scopus_import":"1","alternative_title":["LNCS"],"intvolume":" 14168","month":"10","publication_status":"published","publication_identifier":{"issn":["0302-9743"],"isbn":["9783031444685"],"eissn":["1611-3349"]},"language":[{"iso":"eng"}],"volume":14168,"_id":"14457","conference":{"start_date":"2023-10-03","end_date":"2023-10-06","location":"Quito, Ecuador","name":"LATINCRYPT: Conference on Cryptology and Information Security in Latin America"},"type":"conference","status":"public","date_updated":"2023-10-31T11:43:12Z","department":[{"_id":"KrPi"}]},{"department":[{"_id":"DaAl"}],"date_updated":"2023-10-31T09:59:42Z","type":"conference","conference":{"end_date":"2023-07-29","location":"Honolulu, Hawaii, HI, United States","start_date":"2023-07-23","name":"ICML: International Conference on Machine Learning"},"status":"public","_id":"14458","volume":202,"ec_funded":1,"publication_identifier":{"eissn":["2640-3498"]},"publication_status":"published","language":[{"iso":"eng"}],"alternative_title":["PMLR"],"scopus_import":"1","main_file_link":[{"url":"https://doi.org/10.48550/arXiv.2301.00774","open_access":"1"}],"month":"07","intvolume":" 202","abstract":[{"text":"We show for the first time that large-scale generative pretrained transformer (GPT) family models can be pruned to at least 50% sparsity in one-shot, without any retraining, at minimal loss of accuracy. This is achieved via a new pruning method called SparseGPT, specifically designed to work efficiently and accurately on massive GPT-family models. We can execute SparseGPT on the largest available open-source models, OPT-175B and BLOOM-176B, in under 4.5 hours, and can reach 60% unstructured sparsity with negligible increase in perplexity: remarkably, more than 100 billion weights from these models can be ignored at inference time. SparseGPT generalizes to semi-structured (2:4 and 4:8) patterns, and is compatible with weight quantization approaches. The code is available at: https://github.com/IST-DASLab/sparsegpt.","lang":"eng"}],"acknowledged_ssus":[{"_id":"ScienComp"}],"oa_version":"Preprint","author":[{"id":"09a8f98d-ec99-11ea-ae11-c063a7b7fe5f","first_name":"Elias","last_name":"Frantar","full_name":"Frantar, Elias"},{"last_name":"Alistarh","full_name":"Alistarh, Dan-Adrian","orcid":"0000-0003-3650-940X","id":"4A899BFC-F248-11E8-B48F-1D18A9856A87","first_name":"Dan-Adrian"}],"article_processing_charge":"No","external_id":{"arxiv":["2301.00774"]},"title":"SparseGPT: Massive language models can be accurately pruned in one-shot","citation":{"mla":"Frantar, Elias, and Dan-Adrian Alistarh. “SparseGPT: Massive Language Models Can Be Accurately Pruned in One-Shot.” Proceedings of the 40th International Conference on Machine Learning, vol. 202, ML Research Press, 2023, pp. 10323–37.","apa":"Frantar, E., & Alistarh, D.-A. (2023). SparseGPT: Massive language models can be accurately pruned in one-shot. In Proceedings of the 40th International Conference on Machine Learning (Vol. 202, pp. 10323–10337). Honolulu, Hawaii, HI, United States: ML Research Press.","ama":"Frantar E, Alistarh D-A. SparseGPT: Massive language models can be accurately pruned in one-shot. In: Proceedings of the 40th International Conference on Machine Learning. Vol 202. ML Research Press; 2023:10323-10337.","short":"E. Frantar, D.-A. Alistarh, in:, Proceedings of the 40th International Conference on Machine Learning, ML Research Press, 2023, pp. 10323–10337.","ieee":"E. Frantar and D.-A. Alistarh, “SparseGPT: Massive language models can be accurately pruned in one-shot,” in Proceedings of the 40th International Conference on Machine Learning, Honolulu, Hawaii, HI, United States, 2023, vol. 202, pp. 10323–10337.","chicago":"Frantar, Elias, and Dan-Adrian Alistarh. “SparseGPT: Massive Language Models Can Be Accurately Pruned in One-Shot.” In Proceedings of the 40th International Conference on Machine Learning, 202:10323–37. ML Research Press, 2023.","ista":"Frantar E, Alistarh D-A. 2023. SparseGPT: Massive language models can be accurately pruned in one-shot. Proceedings of the 40th International Conference on Machine Learning. ICML: International Conference on Machine Learning, PMLR, vol. 202, 10323–10337."},"user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","project":[{"name":"Elastic Coordination for Scalable Machine Learning","grant_number":"805223","_id":"268A44D6-B435-11E9-9278-68D0E5697425","call_identifier":"H2020"}],"page":"10323-10337","date_published":"2023-07-30T00:00:00Z","date_created":"2023-10-29T23:01:16Z","year":"2023","day":"30","publication":"Proceedings of the 40th International Conference on Machine Learning","publisher":"ML Research Press","quality_controlled":"1","oa":1,"acknowledgement":"The authors gratefully acknowledge funding from the European Research Council (ERC) under the European Union’s Horizon 2020 programme (grant agreement No. 805223 ScaleML), as well as experimental support from Eldar Kurtic, and from the IST Austria IT department, in particular Stefano Elefante, Andrei Hornoiu, and Alois Schloegl."},{"title":"Multi-objective reward generalization: improving performance of Deep Reinforcement Learning for applications in single-asset trading","author":[{"full_name":"Cornalba, Federico","orcid":"0000-0002-6269-5149","last_name":"Cornalba","first_name":"Federico","id":"2CEB641C-A400-11E9-A717-D712E6697425"},{"first_name":"Constantin","last_name":"Disselkamp","full_name":"Disselkamp, Constantin"},{"first_name":"Davide","full_name":"Scassola, Davide","last_name":"Scassola"},{"full_name":"Helf, Christopher","last_name":"Helf","first_name":"Christopher"}],"external_id":{"arxiv":["2203.04579"]},"article_processing_charge":"Yes (via OA deal)","user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","citation":{"mla":"Cornalba, Federico, et al. “Multi-Objective Reward Generalization: Improving Performance of Deep Reinforcement Learning for Applications in Single-Asset Trading.” Neural Computing and Applications, Springer Nature, 2023, doi:10.1007/s00521-023-09033-7.","ama":"Cornalba F, Disselkamp C, Scassola D, Helf C. Multi-objective reward generalization: improving performance of Deep Reinforcement Learning for applications in single-asset trading. Neural Computing and Applications. 2023. doi:10.1007/s00521-023-09033-7","apa":"Cornalba, F., Disselkamp, C., Scassola, D., & Helf, C. (2023). Multi-objective reward generalization: improving performance of Deep Reinforcement Learning for applications in single-asset trading. Neural Computing and Applications. Springer Nature. https://doi.org/10.1007/s00521-023-09033-7","short":"F. Cornalba, C. Disselkamp, D. Scassola, C. Helf, Neural Computing and Applications (2023).","ieee":"F. Cornalba, C. Disselkamp, D. Scassola, and C. Helf, “Multi-objective reward generalization: improving performance of Deep Reinforcement Learning for applications in single-asset trading,” Neural Computing and Applications. Springer Nature, 2023.","chicago":"Cornalba, Federico, Constantin Disselkamp, Davide Scassola, and Christopher Helf. “Multi-Objective Reward Generalization: Improving Performance of Deep Reinforcement Learning for Applications in Single-Asset Trading.” Neural Computing and Applications. Springer Nature, 2023. https://doi.org/10.1007/s00521-023-09033-7.","ista":"Cornalba F, Disselkamp C, Scassola D, Helf C. 2023. Multi-objective reward generalization: improving performance of Deep Reinforcement Learning for applications in single-asset trading. Neural Computing and Applications."},"project":[{"name":"Taming Complexity in Partial Differential Systems","grant_number":"F6504","_id":"fc31cba2-9c52-11eb-aca3-ff467d239cd2"},{"name":"ISTplus - Postdoctoral Fellowships","grant_number":"754411","call_identifier":"H2020","_id":"260C2330-B435-11E9-9278-68D0E5697425"}],"date_published":"2023-10-05T00:00:00Z","doi":"10.1007/s00521-023-09033-7","date_created":"2023-10-22T22:01:16Z","day":"05","publication":"Neural Computing and Applications","year":"2023","publisher":"Springer Nature","quality_controlled":"1","oa":1,"acknowledgement":"Open access funding provided by Università degli Studi di Trieste within the CRUI-CARE Agreement. Funding was provided by Austrian Science Fund (Grant No. F65), Horizon 2020 (Grant No. 754411) and Österreichische Forschungsförderungsgesellschaft.","department":[{"_id":"JuFi"}],"date_updated":"2023-10-31T10:58:28Z","status":"public","type":"journal_article","article_type":"original","_id":"14451","ec_funded":1,"language":[{"iso":"eng"}],"publication_identifier":{"eissn":["1433-3058"],"issn":["0941-0643"]},"publication_status":"epub_ahead","month":"10","scopus_import":"1","main_file_link":[{"open_access":"1","url":"https://doi.org/10.1007/s00521-023-09033-7"}],"oa_version":"Published Version","abstract":[{"lang":"eng","text":"We investigate the potential of Multi-Objective, Deep Reinforcement Learning for stock and cryptocurrency single-asset trading: in particular, we consider a Multi-Objective algorithm which generalizes the reward functions and discount factor (i.e., these components are not specified a priori, but incorporated in the learning process). Firstly, using several important assets (BTCUSD, ETHUSDT, XRPUSDT, AAPL, SPY, NIFTY50), we verify the reward generalization property of the proposed Multi-Objective algorithm, and provide preliminary statistical evidence showing increased predictive stability over the corresponding Single-Objective strategy. Secondly, we show that the Multi-Objective algorithm has a clear edge over the corresponding Single-Objective strategy when the reward mechanism is sparse (i.e., when non-null feedback is infrequent over time). Finally, we discuss the generalization properties with respect to the discount factor. The entirety of our code is provided in open-source format."}]},{"publisher":"Springer Nature","quality_controlled":"1","acknowledgement":"MR-V and RS are supported by Fondecyt Grant No. 1220536 and Millennium Science Initiative Program NCN19_170D of ANID, Chile. P.d.C. was supported by Scholarships Nos. 2021/10139-2 and 2022/13872-5 and ICTP-SAIFR Grant No. 2021/14335-0, all granted by São Paulo Research Foundation (FAPESP), Brazil.","date_created":"2023-10-22T22:01:13Z","date_published":"2023-10-01T00:00:00Z","doi":"10.1140/epje/s10189-023-00354-y","year":"2023","publication":"The European Physical Journal E","day":"01","article_number":"95","article_processing_charge":"No","external_id":{"pmid":["37819444"]},"author":[{"last_name":"Rojas Vega","full_name":"Rojas Vega, Mauricio Nicolas","id":"441e7207-f91f-11ec-b67c-9e6fe3d8fd6d","first_name":"Mauricio Nicolas"},{"first_name":"Pablo","last_name":"De Castro","full_name":"De Castro, Pablo"},{"full_name":"Soto, Rodrigo","last_name":"Soto","first_name":"Rodrigo"}],"title":"Mixtures of self-propelled particles interacting with asymmetric obstacles","citation":{"chicago":"Rojas Vega, Mauricio Nicolas, Pablo De Castro, and Rodrigo Soto. “Mixtures of Self-Propelled Particles Interacting with Asymmetric Obstacles.” The European Physical Journal E. Springer Nature, 2023. https://doi.org/10.1140/epje/s10189-023-00354-y.","ista":"Rojas Vega MN, De Castro P, Soto R. 2023. Mixtures of self-propelled particles interacting with asymmetric obstacles. The European Physical Journal E. 46(10), 95.","mla":"Rojas Vega, Mauricio Nicolas, et al. “Mixtures of Self-Propelled Particles Interacting with Asymmetric Obstacles.” The European Physical Journal E, vol. 46, no. 10, 95, Springer Nature, 2023, doi:10.1140/epje/s10189-023-00354-y.","ieee":"M. N. Rojas Vega, P. De Castro, and R. Soto, “Mixtures of self-propelled particles interacting with asymmetric obstacles,” The European Physical Journal E, vol. 46, no. 10. Springer Nature, 2023.","short":"M.N. Rojas Vega, P. De Castro, R. Soto, The European Physical Journal E 46 (2023).","ama":"Rojas Vega MN, De Castro P, Soto R. Mixtures of self-propelled particles interacting with asymmetric obstacles. The European Physical Journal E. 2023;46(10). doi:10.1140/epje/s10189-023-00354-y","apa":"Rojas Vega, M. N., De Castro, P., & Soto, R. (2023). Mixtures of self-propelled particles interacting with asymmetric obstacles. The European Physical Journal E. Springer Nature. https://doi.org/10.1140/epje/s10189-023-00354-y"},"user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","scopus_import":"1","intvolume":" 46","month":"10","abstract":[{"text":"In the presence of an obstacle, active particles condensate into a surface “wetting” layer due to persistent motion. If the obstacle is asymmetric, a rectification current arises in addition to wetting. Asymmetric geometries are therefore commonly used to concentrate microorganisms like bacteria and sperms. However, most studies neglect the fact that biological active matter is diverse, composed of individuals with distinct self-propulsions. Using simulations, we study a mixture of “fast” and “slow” active Brownian disks in two dimensions interacting with large half-disk obstacles. With this prototypical obstacle geometry, we analyze how the stationary collective behavior depends on the degree of self-propulsion “diversity,” defined as proportional to the difference between the self-propulsion speeds, while keeping the average self-propulsion speed fixed. A wetting layer rich in fast particles arises. The rectification current is amplified by speed diversity due to a superlinear dependence of rectification on self-propulsion speed, which arises from cooperative effects. Thus, the total rectification current cannot be obtained from an effective one-component active fluid with the same average self-propulsion speed, highlighting the importance of considering diversity in active matter.","lang":"eng"}],"pmid":1,"oa_version":"None","issue":"10","volume":46,"publication_status":"published","publication_identifier":{"eissn":["1292-895X"],"issn":["1292-8941"]},"language":[{"iso":"eng"}],"article_type":"original","type":"journal_article","status":"public","_id":"14442","department":[{"_id":"AnSa"}],"date_updated":"2023-10-31T11:16:41Z"},{"citation":{"ista":"Kwan MA, Sah A, Sawhney M, Simkin M. 2023. Substructures in Latin squares. Israel Journal of Mathematics. 256(2), 363–416.","chicago":"Kwan, Matthew Alan, Ashwin Sah, Mehtaab Sawhney, and Michael Simkin. “Substructures in Latin Squares.” Israel Journal of Mathematics. Springer Nature, 2023. https://doi.org/10.1007/s11856-023-2513-9.","apa":"Kwan, M. A., Sah, A., Sawhney, M., & Simkin, M. (2023). Substructures in Latin squares. Israel Journal of Mathematics. Springer Nature. https://doi.org/10.1007/s11856-023-2513-9","ama":"Kwan MA, Sah A, Sawhney M, Simkin M. Substructures in Latin squares. Israel Journal of Mathematics. 2023;256(2):363-416. doi:10.1007/s11856-023-2513-9","ieee":"M. A. Kwan, A. Sah, M. Sawhney, and M. Simkin, “Substructures in Latin squares,” Israel Journal of Mathematics, vol. 256, no. 2. Springer Nature, pp. 363–416, 2023.","short":"M.A. Kwan, A. Sah, M. Sawhney, M. Simkin, Israel Journal of Mathematics 256 (2023) 363–416.","mla":"Kwan, Matthew Alan, et al. “Substructures in Latin Squares.” Israel Journal of Mathematics, vol. 256, no. 2, Springer Nature, 2023, pp. 363–416, doi:10.1007/s11856-023-2513-9."},"user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","article_processing_charge":"Yes (in subscription journal)","external_id":{"arxiv":["2202.05088"]},"author":[{"first_name":"Matthew Alan","id":"5fca0887-a1db-11eb-95d1-ca9d5e0453b3","last_name":"Kwan","full_name":"Kwan, Matthew Alan","orcid":"0000-0002-4003-7567"},{"first_name":"Ashwin","last_name":"Sah","full_name":"Sah, Ashwin"},{"last_name":"Sawhney","full_name":"Sawhney, Mehtaab","first_name":"Mehtaab"},{"first_name":"Michael","last_name":"Simkin","full_name":"Simkin, Michael"}],"title":"Substructures in Latin squares","acknowledgement":"Sah and Sawhney were supported by NSF Graduate Research Fellowship Program DGE-1745302. Sah was supported by the PD Soros Fellowship. Simkin was supported by the Center of Mathematical Sciences and Applications at Harvard University.","oa":1,"publisher":"Springer Nature","quality_controlled":"1","year":"2023","publication":"Israel Journal of Mathematics","day":"01","page":"363-416","date_created":"2023-10-22T22:01:14Z","doi":"10.1007/s11856-023-2513-9","date_published":"2023-09-01T00:00:00Z","_id":"14444","article_type":"original","type":"journal_article","status":"public","date_updated":"2023-10-31T11:27:30Z","department":[{"_id":"MaKw"}],"abstract":[{"text":"We prove several results about substructures in Latin squares. First, we explain how to adapt our recent work on high-girth Steiner triple systems to the setting of Latin squares, resolving a conjecture of Linial that there exist Latin squares with arbitrarily high girth. As a consequence, we see that the number of order- n Latin squares with no intercalate (i.e., no 2×2 Latin subsquare) is at least (e−9/4n−o(n))n2. Equivalently, P[N=0]≥e−n2/4−o(n2)=e−(1+o(1))EN\r\n , where N is the number of intercalates in a uniformly random order- n Latin square. \r\nIn fact, extending recent work of Kwan, Sah, and Sawhney, we resolve the general large-deviation problem for intercalates in random Latin squares, up to constant factors in the exponent: for any constant 0<δ≤1 we have P[N≤(1−δ)EN]=exp(−Θ(n2)) and for any constant δ>0 we have P[N≥(1+δ)EN]=exp(−Θ(n4/3logn)). \r\nFinally, as an application of some new general tools for studying substructures in random Latin squares, we show that in almost all order- n Latin squares, the number of cuboctahedra (i.e., the number of pairs of possibly degenerate 2×2 submatrices with the same arrangement of symbols) is of order n4, which is the minimum possible. As observed by Gowers and Long, this number can be interpreted as measuring ``how associative'' the quasigroup associated with the Latin square is.","lang":"eng"}],"oa_version":"Preprint","main_file_link":[{"url":"https://doi.org/10.48550/arXiv.2202.05088","open_access":"1"}],"scopus_import":"1","intvolume":" 256","month":"09","publication_status":"published","publication_identifier":{"issn":["0021-2172"],"eissn":["1565-8511"]},"language":[{"iso":"eng"}],"volume":256,"issue":"2"},{"date_created":"2023-10-29T23:01:15Z","doi":"10.1007/978-3-031-44267-4_15","date_published":"2023-10-01T00:00:00Z","page":"291-311","publication":"23rd International Conference on Runtime Verification","day":"01","year":"2023","oa":1,"publisher":"Springer Nature","quality_controlled":"1","acknowledgement":"This work is supported by the European Research Council under Grant No.: ERC-2020-AdG 101020093.","title":"Monitoring algorithmic fairness under partial observations","external_id":{"arxiv":["2308.00341"]},"article_processing_charge":"No","author":[{"id":"40876CD8-F248-11E8-B48F-1D18A9856A87","first_name":"Thomas A","orcid":"0000-0002-2985-7724","full_name":"Henzinger, Thomas A","last_name":"Henzinger"},{"orcid":"0000-0001-8974-2542","full_name":"Kueffner, Konstantin","last_name":"Kueffner","id":"8121a2d0-dc85-11ea-9058-af578f3b4515","first_name":"Konstantin"},{"orcid":"0000-0001-9864-7475","full_name":"Mallik, Kaushik","last_name":"Mallik","first_name":"Kaushik","id":"0834ff3c-6d72-11ec-94e0-b5b0a4fb8598"}],"user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","citation":{"mla":"Henzinger, Thomas A., et al. “Monitoring Algorithmic Fairness under Partial Observations.” 23rd International Conference on Runtime Verification, vol. 14245, Springer Nature, 2023, pp. 291–311, doi:10.1007/978-3-031-44267-4_15.","apa":"Henzinger, T. A., Kueffner, K., & Mallik, K. (2023). Monitoring algorithmic fairness under partial observations. In 23rd International Conference on Runtime Verification (Vol. 14245, pp. 291–311). Thessaloniki, Greece: Springer Nature. https://doi.org/10.1007/978-3-031-44267-4_15","ama":"Henzinger TA, Kueffner K, Mallik K. Monitoring algorithmic fairness under partial observations. In: 23rd International Conference on Runtime Verification. Vol 14245. Springer Nature; 2023:291-311. doi:10.1007/978-3-031-44267-4_15","short":"T.A. Henzinger, K. Kueffner, K. Mallik, in:, 23rd International Conference on Runtime Verification, Springer Nature, 2023, pp. 291–311.","ieee":"T. A. Henzinger, K. Kueffner, and K. Mallik, “Monitoring algorithmic fairness under partial observations,” in 23rd International Conference on Runtime Verification, Thessaloniki, Greece, 2023, vol. 14245, pp. 291–311.","chicago":"Henzinger, Thomas A, Konstantin Kueffner, and Kaushik Mallik. “Monitoring Algorithmic Fairness under Partial Observations.” In 23rd International Conference on Runtime Verification, 14245:291–311. Springer Nature, 2023. https://doi.org/10.1007/978-3-031-44267-4_15.","ista":"Henzinger TA, Kueffner K, Mallik K. 2023. Monitoring algorithmic fairness under partial observations. 23rd International Conference on Runtime Verification. RV: Conference on Runtime Verification, LNCS, vol. 14245, 291–311."},"project":[{"grant_number":"101020093","name":"Vigilant Algorithmic Monitoring of Software","call_identifier":"H2020","_id":"62781420-2b32-11ec-9570-8d9b63373d4d"}],"ec_funded":1,"volume":14245,"language":[{"iso":"eng"}],"publication_status":"published","publication_identifier":{"issn":["0302-9743"],"eissn":["1611-3349"],"isbn":["9783031442667"]},"intvolume":" 14245","month":"10","main_file_link":[{"url":"https://doi.org/10.48550/arXiv.2308.00341","open_access":"1"}],"scopus_import":"1","alternative_title":["LNCS"],"oa_version":"Preprint","abstract":[{"lang":"eng","text":"As AI and machine-learned software are used increasingly for making decisions that affect humans, it is imperative that they remain fair and unbiased in their decisions. To complement design-time bias mitigation measures, runtime verification techniques have been introduced recently to monitor the algorithmic fairness of deployed systems. Previous monitoring techniques assume full observability of the states of the (unknown) monitored system. Moreover, they can monitor only fairness properties that are specified as arithmetic expressions over the probabilities of different events. In this work, we extend fairness monitoring to systems modeled as partially observed Markov chains (POMC), and to specifications containing arithmetic expressions over the expected values of numerical functions on event sequences. The only assumptions we make are that the underlying POMC is aperiodic and starts in the stationary distribution, with a bound on its mixing time being known. These assumptions enable us to estimate a given property for the entire distribution of possible executions of the monitored POMC, by observing only a single execution. Our monitors observe a long run of the system and, after each new observation, output updated PAC-estimates of how fair or biased the system is. The monitors are computationally lightweight and, using a prototype implementation, we demonstrate their effectiveness on several real-world examples."}],"department":[{"_id":"ToHe"}],"date_updated":"2023-10-31T11:48:20Z","status":"public","conference":{"name":"RV: Conference on Runtime Verification","start_date":"2023-10-03","location":"Thessaloniki, Greece","end_date":"2023-10-06"},"type":"conference","_id":"14454"},{"date_created":"2023-10-22T22:01:15Z","date_published":"2023-08-01T00:00:00Z","doi":"10.2478/msr-2023-0023","page":"175-183","publication":"Measurement Science Review","day":"01","year":"2023","has_accepted_license":"1","oa":1,"publisher":"Sciendo","quality_controlled":"1","acknowledgement":"The work was supported by the Scientific Grant Agency of the Ministry of Education of the Slovak Republic and the Slovak Academy of Sciences, projects APVV-21-0216, VEGA2-0096-21 and VEGA 2-0023-22.","title":"Against the flow of time with multi-output models","article_processing_charge":"Yes","author":[{"first_name":"Jozef","full_name":"Jakubík, Jozef","last_name":"Jakubík"},{"last_name":"Bui Thi Mai","full_name":"Bui Thi Mai, Phuong","first_name":"Phuong","id":"3EC6EE64-F248-11E8-B48F-1D18A9856A87"},{"first_name":"Martina","full_name":"Chvosteková, Martina","last_name":"Chvosteková"},{"full_name":"Krakovská, Anna","last_name":"Krakovská","first_name":"Anna"}],"user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","citation":{"short":"J. Jakubík, M. Phuong, M. Chvosteková, A. Krakovská, Measurement Science Review 23 (2023) 175–183.","ieee":"J. Jakubík, M. Phuong, M. Chvosteková, and A. Krakovská, “Against the flow of time with multi-output models,” Measurement Science Review, vol. 23, no. 4. Sciendo, pp. 175–183, 2023.","apa":"Jakubík, J., Phuong, M., Chvosteková, M., & Krakovská, A. (2023). Against the flow of time with multi-output models. Measurement Science Review. Sciendo. https://doi.org/10.2478/msr-2023-0023","ama":"Jakubík J, Phuong M, Chvosteková M, Krakovská A. Against the flow of time with multi-output models. Measurement Science Review. 2023;23(4):175-183. doi:10.2478/msr-2023-0023","mla":"Jakubík, Jozef, et al. “Against the Flow of Time with Multi-Output Models.” Measurement Science Review, vol. 23, no. 4, Sciendo, 2023, pp. 175–83, doi:10.2478/msr-2023-0023.","ista":"Jakubík J, Phuong M, Chvosteková M, Krakovská A. 2023. Against the flow of time with multi-output models. Measurement Science Review. 23(4), 175–183.","chicago":"Jakubík, Jozef, Mary Phuong, Martina Chvosteková, and Anna Krakovská. “Against the Flow of Time with Multi-Output Models.” Measurement Science Review. Sciendo, 2023. https://doi.org/10.2478/msr-2023-0023."},"issue":"4","volume":23,"language":[{"iso":"eng"}],"file":[{"success":1,"file_id":"14476","checksum":"b069cc10fa6a7c96b2bc9f728165f9e6","relation":"main_file","access_level":"open_access","content_type":"application/pdf","file_name":"2023_MeasurementScienceRev_Jakubik.pdf","date_created":"2023-10-31T12:07:23Z","creator":"dernst","file_size":2639783,"date_updated":"2023-10-31T12:07:23Z"}],"publication_status":"published","publication_identifier":{"eissn":["1335-8871"]},"intvolume":" 23","month":"08","scopus_import":"1","oa_version":"Published Version","abstract":[{"lang":"eng","text":"Recent work has paid close attention to the first principle of Granger causality, according to which cause precedes effect. In this context, the question may arise whether the detected direction of causality also reverses after the time reversal of unidirectionally coupled data. Recently, it has been shown that for unidirectionally causally connected autoregressive (AR) processes X → Y, after time reversal of data, the opposite causal direction Y → X is indeed detected, although typically as part of the bidirectional X↔ Y link. As we argue here, the answer is different when the measured data are not from AR processes but from linked deterministic systems. When the goal is the usual forward data analysis, cross-mapping-like approaches correctly detect X → Y, while Granger causality-like approaches, which should not be used for deterministic time series, detect causal independence X → Y. The results of backward causal analysis depend on the predictability of the reversed data. Unlike AR processes, observables from deterministic dynamical systems, even complex nonlinear ones, can be predicted well forward, while backward predictions can be difficult (notably when the time reversal of a function leads to one-to-many relations). To address this problem, we propose an approach based on models that provide multiple candidate predictions for the target, combined with a loss function that consideres only the best candidate. The resulting good forward and backward predictability supports the view that unidirectionally causally linked deterministic dynamical systems X → Y can be expected to detect the same link both before and after time reversal."}],"file_date_updated":"2023-10-31T12:07:23Z","department":[{"_id":"ChLa"}],"ddc":["510"],"date_updated":"2023-10-31T12:12:47Z","status":"public","tmp":{"short":"CC BY-NC-ND (4.0)","name":"Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International (CC BY-NC-ND 4.0)","legal_code_url":"https://creativecommons.org/licenses/by-nc-nd/4.0/legalcode","image":"/images/cc_by_nc_nd.png"},"type":"journal_article","article_type":"original","_id":"14446"},{"article_processing_charge":"No","external_id":{"pmid":["37610741"]},"author":[{"first_name":"Gunter","last_name":"Schumann","full_name":"Schumann, Gunter"},{"full_name":"Andreassen, Ole A.","last_name":"Andreassen","first_name":"Ole A."},{"first_name":"Tobias","last_name":"Banaschewski","full_name":"Banaschewski, Tobias"},{"first_name":"Vince D.","last_name":"Calhoun","full_name":"Calhoun, Vince D."},{"first_name":"Nicholas","last_name":"Clinton","full_name":"Clinton, Nicholas"},{"first_name":"Sylvane","full_name":"Desrivieres, Sylvane","last_name":"Desrivieres"},{"first_name":"Ragnhild Eek","last_name":"Brandlistuen","full_name":"Brandlistuen, Ragnhild Eek"},{"first_name":"Jianfeng","last_name":"Feng","full_name":"Feng, Jianfeng"},{"first_name":"Soeren","last_name":"Hese","full_name":"Hese, Soeren"},{"full_name":"Hitchen, Esther","last_name":"Hitchen","first_name":"Esther"},{"full_name":"Hoffmann, Per","last_name":"Hoffmann","first_name":"Per"},{"first_name":"Tianye","last_name":"Jia","full_name":"Jia, Tianye"},{"first_name":"Viktor","last_name":"Jirsa","full_name":"Jirsa, Viktor"},{"full_name":"Marquand, Andre F.","last_name":"Marquand","first_name":"Andre F."},{"last_name":"Nees","full_name":"Nees, Frauke","first_name":"Frauke"},{"first_name":"Markus M.","last_name":"Nöthen","full_name":"Nöthen, Markus M."},{"full_name":"Novarino, Gaia","orcid":"0000-0002-7673-7178","last_name":"Novarino","id":"3E57A680-F248-11E8-B48F-1D18A9856A87","first_name":"Gaia"},{"first_name":"Elli","full_name":"Polemiti, Elli","last_name":"Polemiti"},{"first_name":"Markus","last_name":"Ralser","full_name":"Ralser, Markus"},{"first_name":"Michael","last_name":"Rapp","full_name":"Rapp, Michael"},{"first_name":"Kerstin","full_name":"Schepanski, Kerstin","last_name":"Schepanski"},{"first_name":"Tamara","full_name":"Schikowski, Tamara","last_name":"Schikowski"},{"full_name":"Slater, Mel","last_name":"Slater","first_name":"Mel"},{"full_name":"Sommer, Peter","last_name":"Sommer","first_name":"Peter"},{"first_name":"Bernd Carsten","last_name":"Stahl","full_name":"Stahl, Bernd Carsten"},{"last_name":"Thompson","full_name":"Thompson, Paul M.","first_name":"Paul M."},{"last_name":"Twardziok","full_name":"Twardziok, Sven","first_name":"Sven"},{"last_name":"Van Der Meer","full_name":"Van Der Meer, Dennis","first_name":"Dennis"},{"full_name":"Walter, Henrik","last_name":"Walter","first_name":"Henrik"},{"last_name":"Westlye","full_name":"Westlye, Lars","first_name":"Lars"}],"title":"Addressing global environmental challenges to mental health using population neuroscience: A review","citation":{"mla":"Schumann, Gunter, et al. “Addressing Global Environmental Challenges to Mental Health Using Population Neuroscience: A Review.” JAMA Psychiatry, vol. 80, no. 10, American Medical Association, 2023, pp. 1066–74, doi:10.1001/jamapsychiatry.2023.2996.","apa":"Schumann, G., Andreassen, O. A., Banaschewski, T., Calhoun, V. D., Clinton, N., Desrivieres, S., … Westlye, L. (2023). Addressing global environmental challenges to mental health using population neuroscience: A review. JAMA Psychiatry. American Medical Association. https://doi.org/10.1001/jamapsychiatry.2023.2996","ama":"Schumann G, Andreassen OA, Banaschewski T, et al. Addressing global environmental challenges to mental health using population neuroscience: A review. JAMA Psychiatry. 2023;80(10):1066-1074. doi:10.1001/jamapsychiatry.2023.2996","short":"G. Schumann, O.A. Andreassen, T. Banaschewski, V.D. Calhoun, N. Clinton, S. Desrivieres, R.E. Brandlistuen, J. Feng, S. Hese, E. Hitchen, P. Hoffmann, T. Jia, V. Jirsa, A.F. Marquand, F. Nees, M.M. Nöthen, G. Novarino, E. Polemiti, M. Ralser, M. Rapp, K. Schepanski, T. Schikowski, M. Slater, P. Sommer, B.C. Stahl, P.M. Thompson, S. Twardziok, D. Van Der Meer, H. Walter, L. Westlye, JAMA Psychiatry 80 (2023) 1066–1074.","ieee":"G. Schumann et al., “Addressing global environmental challenges to mental health using population neuroscience: A review,” JAMA Psychiatry, vol. 80, no. 10. American Medical Association, pp. 1066–1074, 2023.","chicago":"Schumann, Gunter, Ole A. Andreassen, Tobias Banaschewski, Vince D. Calhoun, Nicholas Clinton, Sylvane Desrivieres, Ragnhild Eek Brandlistuen, et al. “Addressing Global Environmental Challenges to Mental Health Using Population Neuroscience: A Review.” JAMA Psychiatry. American Medical Association, 2023. https://doi.org/10.1001/jamapsychiatry.2023.2996.","ista":"Schumann G, Andreassen OA, Banaschewski T, Calhoun VD, Clinton N, Desrivieres S, Brandlistuen RE, Feng J, Hese S, Hitchen E, Hoffmann P, Jia T, Jirsa V, Marquand AF, Nees F, Nöthen MM, Novarino G, Polemiti E, Ralser M, Rapp M, Schepanski K, Schikowski T, Slater M, Sommer P, Stahl BC, Thompson PM, Twardziok S, Van Der Meer D, Walter H, Westlye L. 2023. Addressing global environmental challenges to mental health using population neuroscience: A review. JAMA Psychiatry. 80(10), 1066–1074."},"user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","quality_controlled":"1","publisher":"American Medical Association","page":"1066-1074","date_created":"2023-10-22T22:01:14Z","doi":"10.1001/jamapsychiatry.2023.2996","date_published":"2023-10-01T00:00:00Z","year":"2023","publication":"JAMA Psychiatry","day":"01","article_type":"review","type":"journal_article","status":"public","_id":"14443","department":[{"_id":"GaNo"}],"date_updated":"2023-10-31T12:17:20Z","scopus_import":"1","intvolume":" 80","month":"10","abstract":[{"text":"Importance Climate change, pollution, urbanization, socioeconomic inequality, and psychosocial effects of the COVID-19 pandemic have caused massive changes in environmental conditions that affect brain health during the life span, both on a population level as well as on the level of the individual. How these environmental factors influence the brain, behavior, and mental illness is not well known.\r\nObservations A research strategy enabling population neuroscience to contribute to identify brain mechanisms underlying environment-related mental illness by leveraging innovative enrichment tools for data federation, geospatial observation, climate and pollution measures, digital health, and novel data integration techniques is described. This strategy can inform innovative treatments that target causal cognitive and molecular mechanisms of mental illness related to the environment. An example is presented of the environMENTAL Project that is leveraging federated cohort data of over 1.5 million European citizens and patients enriched with deep phenotyping data from large-scale behavioral neuroimaging cohorts to identify brain mechanisms related to environmental adversity underlying symptoms of depression, anxiety, stress, and substance misuse.\r\nConclusions and Relevance This research will lead to the development of objective biomarkers and evidence-based interventions that will significantly improve outcomes of environment-related mental illness.","lang":"eng"}],"oa_version":"None","pmid":1,"volume":80,"issue":"10","publication_status":"published","publication_identifier":{"eissn":["2168-6238"]},"language":[{"iso":"eng"}]}]