[{"oa_version":"Submitted Version","user_id":"c635000d-4b10-11ee-a964-aac5a93f6ac1","_id":"150","intvolume":" 560","title":"Inositol phosphates are assembly co-factors for HIV-1","status":"public","issue":"7719","abstract":[{"lang":"eng","text":"A short, 14-amino-acid segment called SP1, located in the Gag structural protein1, has a critical role during the formation of the HIV-1 virus particle. During virus assembly, the SP1 peptide and seven preceding residues fold into a six-helix bundle, which holds together the Gag hexamer and facilitates the formation of a curved immature hexagonal lattice underneath the viral membrane2,3. Upon completion of assembly and budding, proteolytic cleavage of Gag leads to virus maturation, in which the immature lattice is broken down; the liberated CA domain of Gag then re-assembles into the mature conical capsid that encloses the viral genome and associated enzymes. Folding and proteolysis of the six-helix bundle are crucial rate-limiting steps of both Gag assembly and disassembly, and the six-helix bundle is an established target of HIV-1 inhibitors4,5. Here, using a combination of structural and functional analyses, we show that inositol hexakisphosphate (InsP6, also known as IP6) facilitates the formation of the six-helix bundle and assembly of the immature HIV-1 Gag lattice. IP6 makes ionic contacts with two rings of lysine residues at the centre of the Gag hexamer. Proteolytic cleavage then unmasks an alternative binding site, where IP6 interaction promotes the assembly of the mature capsid lattice. These studies identify IP6 as a naturally occurring small molecule that promotes both assembly and maturation of HIV-1."}],"type":"journal_article","date_published":"2018-08-29T00:00:00Z","citation":{"short":"R. Dick, K.K. Zadrozny, C. Xu, F.K. Schur, T.D. Lyddon, C.L. Ricana, J.M. Wagner, J.R. Perilla, P.B.K. Ganser, M.C. Johnson, O. Pornillos, V. Vogt, Nature 560 (2018) 509–512.","mla":"Dick, Robert, et al. “Inositol Phosphates Are Assembly Co-Factors for HIV-1.” Nature, vol. 560, no. 7719, Nature Publishing Group, 2018, pp. 509–512, doi:10.1038/s41586-018-0396-4.","chicago":"Dick, Robert, Kaneil K Zadrozny, Chaoyi Xu, Florian KM Schur, Terri D Lyddon, Clifton L Ricana, Jonathan M Wagner, et al. “Inositol Phosphates Are Assembly Co-Factors for HIV-1.” Nature. Nature Publishing Group, 2018. https://doi.org/10.1038/s41586-018-0396-4.","ama":"Dick R, Zadrozny KK, Xu C, et al. Inositol phosphates are assembly co-factors for HIV-1. Nature. 2018;560(7719):509–512. doi:10.1038/s41586-018-0396-4","apa":"Dick, R., Zadrozny, K. K., Xu, C., Schur, F. K., Lyddon, T. D., Ricana, C. L., … Vogt, V. (2018). Inositol phosphates are assembly co-factors for HIV-1. Nature. Nature Publishing Group. https://doi.org/10.1038/s41586-018-0396-4","ieee":"R. Dick et al., “Inositol phosphates are assembly co-factors for HIV-1,” Nature, vol. 560, no. 7719. Nature Publishing Group, pp. 509–512, 2018.","ista":"Dick R, Zadrozny KK, Xu C, Schur FK, Lyddon TD, Ricana CL, Wagner JM, Perilla JR, Ganser PBK, Johnson MC, Pornillos O, Vogt V. 2018. Inositol phosphates are assembly co-factors for HIV-1. Nature. 560(7719), 509–512."},"publication":"Nature","page":"509–512","article_type":"original","article_processing_charge":"No","day":"29","scopus_import":"1","related_material":{"link":[{"relation":"erratum","url":"https://doi.org/10.1038/s41586-018-0505-4"}]},"author":[{"full_name":"Dick, Robert","first_name":"Robert","last_name":"Dick"},{"full_name":"Zadrozny, Kaneil K","last_name":"Zadrozny","first_name":"Kaneil K"},{"last_name":"Xu","first_name":"Chaoyi","full_name":"Xu, Chaoyi"},{"id":"48AD8942-F248-11E8-B48F-1D18A9856A87","orcid":"0000-0003-4790-8078","first_name":"Florian","last_name":"Schur","full_name":"Schur, Florian"},{"first_name":"Terri D","last_name":"Lyddon","full_name":"Lyddon, Terri D"},{"full_name":"Ricana, Clifton L","first_name":"Clifton L","last_name":"Ricana"},{"full_name":"Wagner, Jonathan M","first_name":"Jonathan M","last_name":"Wagner"},{"full_name":"Perilla, Juan R","first_name":"Juan R","last_name":"Perilla"},{"full_name":"Ganser, Pornillos Barbie K","first_name":"Pornillos Barbie K","last_name":"Ganser"},{"full_name":"Johnson, Marc C","first_name":"Marc C","last_name":"Johnson"},{"first_name":"Owen","last_name":"Pornillos","full_name":"Pornillos, Owen"},{"last_name":"Vogt","first_name":"Volker","full_name":"Vogt, Volker"}],"volume":560,"date_created":"2018-12-11T11:44:53Z","date_updated":"2023-09-12T07:44:37Z","pmid":1,"year":"2018","publisher":"Nature Publishing Group","department":[{"_id":"FlSc"}],"publication_status":"published","doi":"10.1038/s41586-018-0396-4","language":[{"iso":"eng"}],"main_file_link":[{"open_access":"1","url":"https://www.ncbi.nlm.nih.gov/pmc/articles/PMC6242333/"}],"external_id":{"pmid":["30158708"],"isi":["000442483400046"]},"oa":1,"isi":1,"quality_controlled":"1","publication_identifier":{"eissn":["1476-4687"]},"month":"08"},{"user_id":"c635000d-4b10-11ee-a964-aac5a93f6ac1","_id":"303","intvolume":" 38","title":"Introduction to tropical series and wave dynamic on them","status":"public","oa_version":"Submitted Version","type":"journal_article","issue":"6","abstract":[{"lang":"eng","text":"The theory of tropical series, that we develop here, firstly appeared in the study of the growth of pluriharmonic functions. Motivated by waves in sandpile models we introduce a dynamic on the set of tropical series, and it is experimentally observed that this dynamic obeys a power law. So, this paper serves as a compilation of results we need for other articles and also introduces several objects interesting by themselves."}],"citation":{"chicago":"Kalinin, Nikita, and Mikhail Shkolnikov. “Introduction to Tropical Series and Wave Dynamic on Them.” Discrete and Continuous Dynamical Systems- Series A. AIMS, 2018. https://doi.org/10.3934/dcds.2018120.","short":"N. Kalinin, M. Shkolnikov, Discrete and Continuous Dynamical Systems- Series A 38 (2018) 2827–2849.","mla":"Kalinin, Nikita, and Mikhail Shkolnikov. “Introduction to Tropical Series and Wave Dynamic on Them.” Discrete and Continuous Dynamical Systems- Series A, vol. 38, no. 6, AIMS, 2018, pp. 2827–49, doi:10.3934/dcds.2018120.","apa":"Kalinin, N., & Shkolnikov, M. (2018). Introduction to tropical series and wave dynamic on them. Discrete and Continuous Dynamical Systems- Series A. AIMS. https://doi.org/10.3934/dcds.2018120","ieee":"N. Kalinin and M. Shkolnikov, “Introduction to tropical series and wave dynamic on them,” Discrete and Continuous Dynamical Systems- Series A, vol. 38, no. 6. AIMS, pp. 2827–2849, 2018.","ista":"Kalinin N, Shkolnikov M. 2018. Introduction to tropical series and wave dynamic on them. Discrete and Continuous Dynamical Systems- Series A. 38(6), 2827–2849.","ama":"Kalinin N, Shkolnikov M. Introduction to tropical series and wave dynamic on them. Discrete and Continuous Dynamical Systems- Series A. 2018;38(6):2827-2849. doi:10.3934/dcds.2018120"},"publication":"Discrete and Continuous Dynamical Systems- Series A","page":"2827 - 2849","date_published":"2018-06-01T00:00:00Z","scopus_import":"1","article_processing_charge":"No","day":"01","acknowledgement":"The first author, Nikita Kalinin, is funded by SNCF PostDoc.Mobility grant 168647. Support from the Basic Research Program of the National Research University Higher School of Economics is gratefully acknowledged. The second author, Mikhail Shkolnikov, is supported in part by the grant 159240 of the Swiss National Science Foundation as well as by the National Center of Competence in Research SwissMAP of the Swiss National Science Foundation.","year":"2018","publisher":"AIMS","department":[{"_id":"TaHa"}],"publication_status":"published","author":[{"first_name":"Nikita","last_name":"Kalinin","full_name":"Kalinin, Nikita"},{"orcid":"0000-0002-4310-178X","id":"35084A62-F248-11E8-B48F-1D18A9856A87","last_name":"Shkolnikov","first_name":"Mikhail","full_name":"Shkolnikov, Mikhail"}],"volume":38,"date_updated":"2023-09-12T07:45:37Z","date_created":"2018-12-11T11:45:43Z","publist_id":"7576","oa":1,"external_id":{"arxiv":["1706.03062"],"isi":["000438818400007"]},"main_file_link":[{"open_access":"1","url":"https://arxiv.org/abs/1706.03062"}],"quality_controlled":"1","isi":1,"doi":"10.3934/dcds.2018120","language":[{"iso":"eng"}],"month":"06"},{"month":"06","publication_identifier":{"isbn":["9781510884472"],"eissn":["1049-5258"]},"language":[{"iso":"eng"}],"conference":{"name":"NeurIPS: Neural Information Processing Systems","location":"Montreal, Canada","start_date":"2018-12-03","end_date":"2018-12-08"},"quality_controlled":"1","main_file_link":[{"open_access":"1","url":"https://arxiv.org/abs/1806.02185"}],"oa":1,"external_id":{"arxiv":["1806.02185"]},"extern":"1","date_created":"2023-08-22T14:15:40Z","date_updated":"2023-09-13T07:38:24Z","volume":31,"author":[{"full_name":"Locatello, Francesco","id":"26cfd52f-2483-11ee-8040-88983bcc06d4","orcid":"0000-0002-4850-0683","first_name":"Francesco","last_name":"Locatello"},{"first_name":"Gideon","last_name":"Dresdner","full_name":"Dresdner, Gideon"},{"full_name":"Khanna, Rajiv","last_name":"Khanna","first_name":"Rajiv"},{"last_name":"Valera","first_name":"Isabel","full_name":"Valera, Isabel"},{"full_name":"Rätsch, Gunnar","last_name":"Rätsch","first_name":"Gunnar"}],"publication_status":"published","publisher":"Neural Information Processing Systems Foundation","department":[{"_id":"FrLo"}],"year":"2018","day":"06","article_processing_charge":"No","scopus_import":"1","date_published":"2018-06-06T00:00:00Z","publication":"Advances in Neural Information Processing Systems","citation":{"apa":"Locatello, F., Dresdner, G., Khanna, R., Valera, I., & Rätsch, G. (2018). Boosting black box variational inference. In Advances in Neural Information Processing Systems (Vol. 31). Montreal, Canada: Neural Information Processing Systems Foundation.","ieee":"F. Locatello, G. Dresdner, R. Khanna, I. Valera, and G. Rätsch, “Boosting black box variational inference,” in Advances in Neural Information Processing Systems, Montreal, Canada, 2018, vol. 31.","ista":"Locatello F, Dresdner G, Khanna R, Valera I, Rätsch G. 2018. Boosting black box variational inference. Advances in Neural Information Processing Systems. NeurIPS: Neural Information Processing Systems vol. 31.","ama":"Locatello F, Dresdner G, Khanna R, Valera I, Rätsch G. Boosting black box variational inference. In: Advances in Neural Information Processing Systems. Vol 31. Neural Information Processing Systems Foundation; 2018.","chicago":"Locatello, Francesco, Gideon Dresdner, Rajiv Khanna, Isabel Valera, and Gunnar Rätsch. “Boosting Black Box Variational Inference.” In Advances in Neural Information Processing Systems, Vol. 31. Neural Information Processing Systems Foundation, 2018.","short":"F. Locatello, G. Dresdner, R. Khanna, I. Valera, G. Rätsch, in:, Advances in Neural Information Processing Systems, Neural Information Processing Systems Foundation, 2018.","mla":"Locatello, Francesco, et al. “Boosting Black Box Variational Inference.” Advances in Neural Information Processing Systems, vol. 31, Neural Information Processing Systems Foundation, 2018."},"abstract":[{"lang":"eng","text":"Approximating a probability density in a tractable manner is a central task\r\nin Bayesian statistics. Variational Inference (VI) is a popular technique that\r\nachieves tractability by choosing a relatively simple variational family.\r\nBorrowing ideas from the classic boosting framework, recent approaches attempt\r\nto \\emph{boost} VI by replacing the selection of a single density with a\r\ngreedily constructed mixture of densities. In order to guarantee convergence,\r\nprevious works impose stringent assumptions that require significant effort for\r\npractitioners. Specifically, they require a custom implementation of the greedy\r\nstep (called the LMO) for every probabilistic model with respect to an\r\nunnatural variational family of truncated distributions. Our work fixes these\r\nissues with novel theoretical and algorithmic insights. On the theoretical\r\nside, we show that boosting VI satisfies a relaxed smoothness assumption which\r\nis sufficient for the convergence of the functional Frank-Wolfe (FW) algorithm.\r\nFurthermore, we rephrase the LMO problem and propose to maximize the Residual\r\nELBO (RELBO) which replaces the standard ELBO optimization in VI. These\r\ntheoretical enhancements allow for black box implementation of the boosting\r\nsubroutine. Finally, we present a stopping criterion drawn from the duality gap\r\nin the classic FW analyses and exhaustive experiments to illustrate the\r\nusefulness of our theoretical and algorithmic contributions."}],"type":"conference","oa_version":"Preprint","title":"Boosting black box variational inference","status":"public","intvolume":" 31","user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","_id":"14202"},{"conference":{"name":"AISTATS: Conference on Artificial Intelligence and Statistics","end_date":"2018-04-11","location":"Playa Blanca, Lanzarote","start_date":"2018-04-09"},"language":[{"iso":"eng"}],"main_file_link":[{"open_access":"1","url":"https://arxiv.org/abs/1708.01733"}],"oa":1,"external_id":{"arxiv":["1708.01733"]},"quality_controlled":"1","month":"04","author":[{"last_name":"Locatello","first_name":"Francesco","orcid":"0000-0002-4850-0683","id":"26cfd52f-2483-11ee-8040-88983bcc06d4","full_name":"Locatello, Francesco"},{"full_name":"Khanna, Rajiv","last_name":"Khanna","first_name":"Rajiv"},{"full_name":"Ghosh, Joydeep","last_name":"Ghosh","first_name":"Joydeep"},{"full_name":"Rätsch, Gunnar","last_name":"Rätsch","first_name":"Gunnar"}],"volume":84,"date_created":"2023-08-22T14:15:20Z","date_updated":"2023-09-13T07:52:40Z","year":"2018","publisher":"ML Research Press","department":[{"_id":"FrLo"}],"publication_status":"published","extern":"1","date_published":"2018-04-15T00:00:00Z","citation":{"chicago":"Locatello, Francesco, Rajiv Khanna, Joydeep Ghosh, and Gunnar Rätsch. “Boosting Variational Inference: An Optimization Perspective.” In Proceedings of the 21st International Conference on Artificial Intelligence and Statistics, 84:464–72. ML Research Press, 2018.","short":"F. Locatello, R. Khanna, J. Ghosh, G. Rätsch, in:, Proceedings of the 21st International Conference on Artificial Intelligence and Statistics, ML Research Press, 2018, pp. 464–472.","mla":"Locatello, Francesco, et al. “Boosting Variational Inference: An Optimization Perspective.” Proceedings of the 21st International Conference on Artificial Intelligence and Statistics, vol. 84, ML Research Press, 2018, pp. 464–72.","ieee":"F. Locatello, R. Khanna, J. Ghosh, and G. Rätsch, “Boosting variational inference: An optimization perspective,” in Proceedings of the 21st International Conference on Artificial Intelligence and Statistics, Playa Blanca, Lanzarote, 2018, vol. 84, pp. 464–472.","apa":"Locatello, F., Khanna, R., Ghosh, J., & Rätsch, G. (2018). Boosting variational inference: An optimization perspective. In Proceedings of the 21st International Conference on Artificial Intelligence and Statistics (Vol. 84, pp. 464–472). Playa Blanca, Lanzarote: ML Research Press.","ista":"Locatello F, Khanna R, Ghosh J, Rätsch G. 2018. Boosting variational inference: An optimization perspective. Proceedings of the 21st International Conference on Artificial Intelligence and Statistics. AISTATS: Conference on Artificial Intelligence and Statistics, PMLR, vol. 84, 464–472.","ama":"Locatello F, Khanna R, Ghosh J, Rätsch G. Boosting variational inference: An optimization perspective. In: Proceedings of the 21st International Conference on Artificial Intelligence and Statistics. Vol 84. ML Research Press; 2018:464-472."},"publication":"Proceedings of the 21st International Conference on Artificial Intelligence and Statistics","page":"464-472","article_processing_charge":"No","day":"15","scopus_import":"1","oa_version":"Preprint","_id":"14201","user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","intvolume":" 84","status":"public","title":"Boosting variational inference: An optimization perspective","abstract":[{"lang":"eng","text":"Variational inference is a popular technique to approximate a possibly\r\nintractable Bayesian posterior with a more tractable one. Recently, boosting\r\nvariational inference has been proposed as a new paradigm to approximate the\r\nposterior by a mixture of densities by greedily adding components to the\r\nmixture. However, as is the case with many other variational inference\r\nalgorithms, its theoretical properties have not been studied. In the present\r\nwork, we study the convergence properties of this approach from a modern\r\noptimization viewpoint by establishing connections to the classic Frank-Wolfe\r\nalgorithm. Our analyses yields novel theoretical insights regarding the\r\nsufficient conditions for convergence, explicit rates, and algorithmic\r\nsimplifications. Since a lot of focus in previous works for variational\r\ninference has been on tractability, our work is especially important as a much\r\nneeded attempt to bridge the gap between probabilistic models and their\r\ncorresponding theoretical properties."}],"type":"conference","alternative_title":["PMLR"]},{"type":"conference","extern":"1","abstract":[{"lang":"eng","text":"High-dimensional time series are common in many domains. Since human\r\ncognition is not optimized to work well in high-dimensional spaces, these areas\r\ncould benefit from interpretable low-dimensional representations. However, most\r\nrepresentation learning algorithms for time series data are difficult to\r\ninterpret. This is due to non-intuitive mappings from data features to salient\r\nproperties of the representation and non-smoothness over time. To address this\r\nproblem, we propose a new representation learning framework building on ideas\r\nfrom interpretable discrete dimensionality reduction and deep generative\r\nmodeling. This framework allows us to learn discrete representations of time\r\nseries, which give rise to smooth and interpretable embeddings with superior\r\nclustering performance. We introduce a new way to overcome the\r\nnon-differentiability in discrete representation learning and present a\r\ngradient-based version of the traditional self-organizing map algorithm that is\r\nmore performant than the original. Furthermore, to allow for a probabilistic\r\ninterpretation of our method, we integrate a Markov model in the representation\r\nspace. This model uncovers the temporal transition structure, improves\r\nclustering performance even further and provides additional explanatory\r\ninsights as well as a natural representation of uncertainty. We evaluate our\r\nmodel in terms of clustering performance and interpretability on static\r\n(Fashion-)MNIST data, a time series of linearly interpolated (Fashion-)MNIST\r\nimages, a chaotic Lorenz attractor system with two macro states, as well as on\r\na challenging real world medical time series application on the eICU data set.\r\nOur learned representations compare favorably with competitor methods and\r\nfacilitate downstream tasks on the real world data."}],"department":[{"_id":"FrLo"}],"publication_status":"published","title":"SOM-VAE: Interpretable discrete representation learning on time series","status":"public","year":"2018","_id":"14198","user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","oa_version":"Preprint","date_updated":"2023-09-13T06:35:12Z","date_created":"2023-08-22T14:12:48Z","author":[{"last_name":"Fortuin","first_name":"Vincent","full_name":"Fortuin, Vincent"},{"last_name":"Hüser","first_name":"Matthias","full_name":"Hüser, Matthias"},{"full_name":"Locatello, Francesco","orcid":"0000-0002-4850-0683","id":"26cfd52f-2483-11ee-8040-88983bcc06d4","last_name":"Locatello","first_name":"Francesco"},{"last_name":"Strathmann","first_name":"Heiko","full_name":"Strathmann, Heiko"},{"full_name":"Rätsch, Gunnar","first_name":"Gunnar","last_name":"Rätsch"}],"article_processing_charge":"No","day":"06","month":"06","quality_controlled":"1","main_file_link":[{"url":"https://arxiv.org/abs/1806.02199","open_access":"1"}],"oa":1,"citation":{"short":"V. Fortuin, M. Hüser, F. Locatello, H. Strathmann, G. Rätsch, in:, International Conference on Learning Representations, 2018.","mla":"Fortuin, Vincent, et al. “SOM-VAE: Interpretable Discrete Representation Learning on Time Series.” International Conference on Learning Representations, 2018.","chicago":"Fortuin, Vincent, Matthias Hüser, Francesco Locatello, Heiko Strathmann, and Gunnar Rätsch. “SOM-VAE: Interpretable Discrete Representation Learning on Time Series.” In International Conference on Learning Representations, 2018.","ama":"Fortuin V, Hüser M, Locatello F, Strathmann H, Rätsch G. SOM-VAE: Interpretable discrete representation learning on time series. In: International Conference on Learning Representations. ; 2018.","ieee":"V. Fortuin, M. Hüser, F. Locatello, H. Strathmann, and G. Rätsch, “SOM-VAE: Interpretable discrete representation learning on time series,” in International Conference on Learning Representations, New Orleans, LA, United States, 2018.","apa":"Fortuin, V., Hüser, M., Locatello, F., Strathmann, H., & Rätsch, G. (2018). SOM-VAE: Interpretable discrete representation learning on time series. In International Conference on Learning Representations. New Orleans, LA, United States.","ista":"Fortuin V, Hüser M, Locatello F, Strathmann H, Rätsch G. 2018. SOM-VAE: Interpretable discrete representation learning on time series. International Conference on Learning Representations. ICLR: International Conference on Learning Representations."},"external_id":{"arxiv":["1806.02199"]},"publication":"International Conference on Learning Representations","language":[{"iso":"eng"}],"date_published":"2018-06-06T00:00:00Z","conference":{"location":"New Orleans, LA, United States","start_date":"2019-05-06","end_date":"2019-05-09","name":"ICLR: International Conference on Learning Representations"}},{"_id":"14203","user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","intvolume":" 80","status":"public","title":"A conditional gradient framework for composite convex minimization with applications to semidefinite programming","oa_version":"Preprint","type":"conference","alternative_title":["PMLR"],"abstract":[{"lang":"eng","text":"We propose a conditional gradient framework for a composite convex minimization template with broad applications. Our approach combines smoothing and homotopy techniques under the CGM framework, and provably achieves the optimal O(1/k−−√) convergence rate. We demonstrate that the same rate holds if the linear subproblems are solved approximately with additive or multiplicative error. In contrast with the relevant work, we are able to characterize the convergence when the non-smooth term is an indicator function. Specific applications of our framework include the non-smooth minimization, semidefinite programming, and minimization with linear inclusion constraints over a compact domain. Numerical evidence demonstrates the benefits of our framework."}],"citation":{"mla":"Yurtsever, Alp, et al. “A Conditional Gradient Framework for Composite Convex Minimization with Applications to Semidefinite Programming.” Proceedings of the 35th International Conference on Machine Learning, vol. 80, ML Research Press, 2018, pp. 5727–36.","short":"A. Yurtsever, O. Fercoq, F. Locatello, V. Cevher, in:, Proceedings of the 35th International Conference on Machine Learning, ML Research Press, 2018, pp. 5727–5736.","chicago":"Yurtsever, Alp, Olivier Fercoq, Francesco Locatello, and Volkan Cevher. “A Conditional Gradient Framework for Composite Convex Minimization with Applications to Semidefinite Programming.” In Proceedings of the 35th International Conference on Machine Learning, 80:5727–36. ML Research Press, 2018.","ama":"Yurtsever A, Fercoq O, Locatello F, Cevher V. A conditional gradient framework for composite convex minimization with applications to semidefinite programming. In: Proceedings of the 35th International Conference on Machine Learning. Vol 80. ML Research Press; 2018:5727-5736.","ista":"Yurtsever A, Fercoq O, Locatello F, Cevher V. 2018. A conditional gradient framework for composite convex minimization with applications to semidefinite programming. Proceedings of the 35th International Conference on Machine Learning. ICML: International Conference on Machine Learning, PMLR, vol. 80, 5727–5736.","apa":"Yurtsever, A., Fercoq, O., Locatello, F., & Cevher, V. (2018). A conditional gradient framework for composite convex minimization with applications to semidefinite programming. In Proceedings of the 35th International Conference on Machine Learning (Vol. 80, pp. 5727–5736). Stockholm, Sweden: ML Research Press.","ieee":"A. Yurtsever, O. Fercoq, F. Locatello, and V. Cevher, “A conditional gradient framework for composite convex minimization with applications to semidefinite programming,” in Proceedings of the 35th International Conference on Machine Learning, Stockholm, Sweden, 2018, vol. 80, pp. 5727–5736."},"publication":"Proceedings of the 35th International Conference on Machine Learning","page":"5727-5736","date_published":"2018-07-15T00:00:00Z","article_processing_charge":"No","day":"15","year":"2018","department":[{"_id":"FrLo"}],"publisher":"ML Research Press","publication_status":"published","author":[{"full_name":"Yurtsever, Alp","first_name":"Alp","last_name":"Yurtsever"},{"last_name":"Fercoq","first_name":"Olivier","full_name":"Fercoq, Olivier"},{"first_name":"Francesco","last_name":"Locatello","id":"26cfd52f-2483-11ee-8040-88983bcc06d4","orcid":"0000-0002-4850-0683","full_name":"Locatello, Francesco"},{"first_name":"Volkan","last_name":"Cevher","full_name":"Cevher, Volkan"}],"volume":80,"date_created":"2023-08-22T14:16:01Z","date_updated":"2023-09-13T08:13:39Z","extern":"1","main_file_link":[{"open_access":"1","url":"https://arxiv.org/abs/1804.08544"}],"external_id":{"arxiv":["1804.08544"]},"oa":1,"quality_controlled":"1","conference":{"name":"ICML: International Conference on Machine Learning","start_date":"2018-07-10","location":"Stockholm, Sweden","end_date":"2018-07-15"},"language":[{"iso":"eng"}],"month":"07"},{"oa":1,"external_id":{"isi":["000440014100020"]},"main_file_link":[{"open_access":"1","url":"https://www.biorxiv.org/content/early/2017/11/30/227082"}],"isi":1,"quality_controlled":"1","doi":"10.1534/genetics.118.301018","language":[{"iso":"eng"}],"month":"08","year":"2018","department":[{"_id":"NiBa"}],"publisher":"Genetics Society of America","publication_status":"published","author":[{"full_name":"Sachdeva, Himani","first_name":"Himani","last_name":"Sachdeva","id":"42377A0A-F248-11E8-B48F-1D18A9856A87"},{"full_name":"Barton, Nicholas H","last_name":"Barton","first_name":"Nicholas H","orcid":"0000-0002-8548-5240","id":"4880FE40-F248-11E8-B48F-1D18A9856A87"}],"volume":209,"date_updated":"2023-09-13T08:22:32Z","date_created":"2018-12-11T11:45:36Z","publist_id":"7617","citation":{"ista":"Sachdeva H, Barton NH. 2018. Introgression of a block of genome under infinitesimal selection. Genetics. 209(4), 1279–1303.","ieee":"H. Sachdeva and N. H. Barton, “Introgression of a block of genome under infinitesimal selection,” Genetics, vol. 209, no. 4. Genetics Society of America, pp. 1279–1303, 2018.","apa":"Sachdeva, H., & Barton, N. H. (2018). Introgression of a block of genome under infinitesimal selection. Genetics. Genetics Society of America. https://doi.org/10.1534/genetics.118.301018","ama":"Sachdeva H, Barton NH. Introgression of a block of genome under infinitesimal selection. Genetics. 2018;209(4):1279-1303. doi:10.1534/genetics.118.301018","chicago":"Sachdeva, Himani, and Nicholas H Barton. “Introgression of a Block of Genome under Infinitesimal Selection.” Genetics. Genetics Society of America, 2018. https://doi.org/10.1534/genetics.118.301018.","mla":"Sachdeva, Himani, and Nicholas H. Barton. “Introgression of a Block of Genome under Infinitesimal Selection.” Genetics, vol. 209, no. 4, Genetics Society of America, 2018, pp. 1279–303, doi:10.1534/genetics.118.301018.","short":"H. Sachdeva, N.H. Barton, Genetics 209 (2018) 1279–1303."},"publication":"Genetics","page":"1279 - 1303","date_published":"2018-08-01T00:00:00Z","scopus_import":"1","article_processing_charge":"No","day":"01","user_id":"c635000d-4b10-11ee-a964-aac5a93f6ac1","_id":"282","intvolume":" 209","title":"Introgression of a block of genome under infinitesimal selection","status":"public","oa_version":"Submitted Version","type":"journal_article","issue":"4","abstract":[{"lang":"eng","text":"Adaptive introgression is common in nature and can be driven by selection acting on multiple, linked genes. We explore the effects of polygenic selection on introgression under the infinitesimal model with linkage. This model assumes that the introgressing block has an effectively infinite number of genes, each with an infinitesimal effect on the trait under selection. The block is assumed to introgress under directional selection within a native population that is genetically homogeneous. We use individual-based simulations and a branching process approximation to compute various statistics of the introgressing block, and explore how these depend on parameters such as the map length and initial trait value associated with the introgressing block, the genetic variability along the block, and the strength of selection. Our results show that the introgression dynamics of a block under infinitesimal selection is qualitatively different from the dynamics of neutral introgression. We also find that in the long run, surviving descendant blocks are likely to have intermediate lengths, and clarify how the length is shaped by the interplay between linkage and infinitesimal selection. Our results suggest that it may be difficult to distinguish introgression of single loci from that of genomic blocks with multiple, tightly linked and weakly selected loci."}]},{"publist_id":"7946","volume":2018,"date_updated":"2023-09-13T08:23:18Z","date_created":"2018-12-11T11:44:40Z","author":[{"first_name":"Marciej","last_name":"Obremski","full_name":"Obremski, Marciej"},{"id":"EC09FA6A-02D0-11E9-8223-86B7C91467DD","first_name":"Maciej","last_name":"Skorski","full_name":"Skorski, Maciej"}],"department":[{"_id":"KrPi"}],"publisher":"IEEE","publication_status":"published","year":"2018","month":"08","language":[{"iso":"eng"}],"doi":"10.1109/ISIT.2018.8437654","conference":{"name":"ISIT: International Symposium on Information Theory","end_date":"2018-06-22","start_date":"2018-06-17 ","location":"Vail, CO, USA"},"isi":1,"quality_controlled":"1","oa":1,"external_id":{"isi":["000448139300368"]},"main_file_link":[{"open_access":"1","url":"https://eprint.iacr.org/2017/507"}],"abstract":[{"lang":"eng","text":"Universal hashing found a lot of applications in computer science. In cryptography the most important fact about universal families is the so called Leftover Hash Lemma, proved by Impagliazzo, Levin and Luby. In the language of modern cryptography it states that almost universal families are good extractors. In this work we provide a somewhat surprising characterization in the opposite direction. Namely, every extractor with sufficiently good parameters yields a universal family on a noticeable fraction of its inputs. Our proof technique is based on tools from extremal graph theory applied to the \\'collision graph\\' induced by the extractor, and may be of independent interest. We discuss possible applications to the theory of randomness extractors and non-malleable codes."}],"alternative_title":["ISIT Proceedings"],"type":"conference","oa_version":"Submitted Version","intvolume":" 2018","title":"Inverted leftover hash lemma","status":"public","user_id":"c635000d-4b10-11ee-a964-aac5a93f6ac1","_id":"108","article_processing_charge":"No","day":"16","scopus_import":"1","date_published":"2018-08-16T00:00:00Z","citation":{"ama":"Obremski M, Skórski M. Inverted leftover hash lemma. In: Vol 2018. IEEE; 2018. doi:10.1109/ISIT.2018.8437654","ieee":"M. Obremski and M. Skórski, “Inverted leftover hash lemma,” presented at the ISIT: International Symposium on Information Theory, Vail, CO, USA, 2018, vol. 2018.","apa":"Obremski, M., & Skórski, M. (2018). Inverted leftover hash lemma (Vol. 2018). Presented at the ISIT: International Symposium on Information Theory, Vail, CO, USA: IEEE. https://doi.org/10.1109/ISIT.2018.8437654","ista":"Obremski M, Skórski M. 2018. Inverted leftover hash lemma. ISIT: International Symposium on Information Theory, ISIT Proceedings, vol. 2018.","short":"M. Obremski, M. Skórski, in:, IEEE, 2018.","mla":"Obremski, Marciej, and Maciej Skórski. Inverted Leftover Hash Lemma. Vol. 2018, IEEE, 2018, doi:10.1109/ISIT.2018.8437654.","chicago":"Obremski, Marciej, and Maciej Skórski. “Inverted Leftover Hash Lemma,” Vol. 2018. IEEE, 2018. https://doi.org/10.1109/ISIT.2018.8437654."}},{"publisher":"ML Research Press","department":[{"_id":"FrLo"}],"publication_status":"published","year":"2018","volume":80,"date_created":"2023-08-22T14:16:25Z","date_updated":"2023-09-13T08:19:05Z","author":[{"full_name":"Locatello, Francesco","id":"26cfd52f-2483-11ee-8040-88983bcc06d4","orcid":"0000-0002-4850-0683","first_name":"Francesco","last_name":"Locatello"},{"full_name":"Raj, Anant","last_name":"Raj","first_name":"Anant"},{"full_name":"Karimireddy, Sai Praneeth","last_name":"Karimireddy","first_name":"Sai Praneeth"},{"full_name":"Rätsch, Gunnar","last_name":"Rätsch","first_name":"Gunnar"},{"last_name":"Schölkopf","first_name":"Bernhard","full_name":"Schölkopf, Bernhard"},{"full_name":"Stich, Sebastian U.","last_name":"Stich","first_name":"Sebastian U."},{"full_name":"Jaggi, Martin","last_name":"Jaggi","first_name":"Martin"}],"extern":"1","quality_controlled":"1","oa":1,"main_file_link":[{"url":"https://arxiv.org/abs/1803.09539","open_access":"1"}],"external_id":{"arxiv":["1803.09539"]},"language":[{"iso":"eng"}],"month":"07","intvolume":" 80","status":"public","title":"On matching pursuit and coordinate descent","user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","_id":"14204","oa_version":"Preprint","alternative_title":["PMLR"],"type":"conference","abstract":[{"lang":"eng","text":"Two popular examples of first-order optimization methods over linear spaces are coordinate descent and matching pursuit algorithms, with their randomized variants. While the former targets the optimization by moving along coordinates, the latter considers a generalized notion of directions. Exploiting the connection between the two algorithms, we present a unified analysis of both, providing affine invariant sublinear O(1/t) rates on smooth objectives and linear convergence on strongly convex objectives. As a byproduct of our affine invariant analysis of matching pursuit, our rates for steepest coordinate descent are the tightest known. Furthermore, we show the first accelerated convergence rate O(1/t2) for matching pursuit and steepest coordinate descent on convex objectives."}],"page":"3198-3207","citation":{"mla":"Locatello, Francesco, et al. “On Matching Pursuit and Coordinate Descent.” Proceedings of the 35th International Conference on Machine Learning, vol. 80, ML Research Press, 2018, pp. 3198–207.","short":"F. Locatello, A. Raj, S.P. Karimireddy, G. Rätsch, B. Schölkopf, S.U. Stich, M. Jaggi, in:, Proceedings of the 35th International Conference on Machine Learning, ML Research Press, 2018, pp. 3198–3207.","chicago":"Locatello, Francesco, Anant Raj, Sai Praneeth Karimireddy, Gunnar Rätsch, Bernhard Schölkopf, Sebastian U. Stich, and Martin Jaggi. “On Matching Pursuit and Coordinate Descent.” In Proceedings of the 35th International Conference on Machine Learning, 80:3198–3207. ML Research Press, 2018.","ama":"Locatello F, Raj A, Karimireddy SP, et al. On matching pursuit and coordinate descent. In: Proceedings of the 35th International Conference on Machine Learning. Vol 80. ML Research Press; 2018:3198-3207.","ista":"Locatello F, Raj A, Karimireddy SP, Rätsch G, Schölkopf B, Stich SU, Jaggi M. 2018. On matching pursuit and coordinate descent. Proceedings of the 35th International Conference on Machine Learning. , PMLR, vol. 80, 3198–3207.","apa":"Locatello, F., Raj, A., Karimireddy, S. P., Rätsch, G., Schölkopf, B., Stich, S. U., & Jaggi, M. (2018). On matching pursuit and coordinate descent. In Proceedings of the 35th International Conference on Machine Learning (Vol. 80, pp. 3198–3207). ML Research Press.","ieee":"F. Locatello et al., “On matching pursuit and coordinate descent,” in Proceedings of the 35th International Conference on Machine Learning, 2018, vol. 80, pp. 3198–3207."},"publication":"Proceedings of the 35th International Conference on Machine Learning","date_published":"2018-07-01T00:00:00Z","scopus_import":"1","article_processing_charge":"No","day":"01"},{"month":"07","project":[{"grant_number":"Z211","_id":"25F42A32-B435-11E9-9278-68D0E5697425","name":"The Wittgenstein Prize","call_identifier":"FWF"},{"grant_number":"S 11407_N23","_id":"25832EC2-B435-11E9-9278-68D0E5697425","call_identifier":"FWF","name":"Rigorous Systems Engineering"}],"quality_controlled":"1","isi":1,"oa":1,"tmp":{"name":"Creative Commons Attribution 4.0 International Public License (CC-BY 4.0)","legal_code_url":"https://creativecommons.org/licenses/by/4.0/legalcode","short":"CC BY (4.0)","image":"/images/cc_by.png"},"external_id":{"isi":["000491481600005"]},"language":[{"iso":"eng"}],"doi":"10.1007/978-3-319-96145-3_5","conference":{"name":"CAV: Computer Aided Verification","end_date":"2018-07-17","start_date":"2018-07-14","location":"Oxford, UK"},"publist_id":"7761","file_date_updated":"2020-07-14T12:45:04Z","department":[{"_id":"ToHe"}],"publisher":"Springer","publication_status":"published","year":"2018","volume":10981,"date_updated":"2023-09-13T08:45:09Z","date_created":"2018-12-11T11:44:57Z","related_material":{"record":[{"relation":"dissertation_contains","status":"public","id":"8332"}]},"author":[{"full_name":"Kragl, Bernhard","first_name":"Bernhard","last_name":"Kragl","id":"320FC952-F248-11E8-B48F-1D18A9856A87","orcid":"0000-0001-7745-9117"},{"full_name":"Qadeer, Shaz","last_name":"Qadeer","first_name":"Shaz"}],"scopus_import":"1","article_processing_charge":"No","has_accepted_license":"1","day":"18","page":"79 - 102","citation":{"mla":"Kragl, Bernhard, and Shaz Qadeer. Layered Concurrent Programs. Vol. 10981, Springer, 2018, pp. 79–102, doi:10.1007/978-3-319-96145-3_5.","short":"B. Kragl, S. Qadeer, in:, Springer, 2018, pp. 79–102.","chicago":"Kragl, Bernhard, and Shaz Qadeer. “Layered Concurrent Programs,” 10981:79–102. Springer, 2018. https://doi.org/10.1007/978-3-319-96145-3_5.","ama":"Kragl B, Qadeer S. Layered Concurrent Programs. In: Vol 10981. Springer; 2018:79-102. doi:10.1007/978-3-319-96145-3_5","ista":"Kragl B, Qadeer S. 2018. Layered Concurrent Programs. CAV: Computer Aided Verification, LNCS, vol. 10981, 79–102.","apa":"Kragl, B., & Qadeer, S. (2018). Layered Concurrent Programs (Vol. 10981, pp. 79–102). Presented at the CAV: Computer Aided Verification, Oxford, UK: Springer. https://doi.org/10.1007/978-3-319-96145-3_5","ieee":"B. Kragl and S. Qadeer, “Layered Concurrent Programs,” presented at the CAV: Computer Aided Verification, Oxford, UK, 2018, vol. 10981, pp. 79–102."},"date_published":"2018-07-18T00:00:00Z","alternative_title":["LNCS"],"type":"conference","abstract":[{"lang":"eng","text":"We present layered concurrent programs, a compact and expressive notation for specifying refinement proofs of concurrent programs. A layered concurrent program specifies a sequence of connected concurrent programs, from most concrete to most abstract, such that common parts of different programs are written exactly once. These programs are expressed in the ordinary syntax of imperative concurrent programs using gated atomic actions, sequencing, choice, and (recursive) procedure calls. Each concurrent program is automatically extracted from the layered program. We reduce refinement to the safety of a sequence of concurrent checker programs, one each to justify the connection between every two consecutive concurrent programs. These checker programs are also automatically extracted from the layered program. Layered concurrent programs have been implemented in the CIVL verifier which has been successfully used for the verification of several complex concurrent programs."}],"intvolume":" 10981","title":"Layered Concurrent Programs","ddc":["000"],"status":"public","_id":"160","user_id":"c635000d-4b10-11ee-a964-aac5a93f6ac1","oa_version":"Published Version","file":[{"checksum":"c64fff560fe5a7532ec10626ad1c215e","date_created":"2018-12-17T12:52:12Z","date_updated":"2020-07-14T12:45:04Z","file_id":"5705","relation":"main_file","creator":"dernst","content_type":"application/pdf","file_size":1603844,"access_level":"open_access","file_name":"2018_LNCS_Kragl.pdf"}]}]