@article{2010,
abstract = {Many algorithms for inferring causality rely heavily on the faithfulness assumption. The main justification for imposing this assumption is that the set of unfaithful distributions has Lebesgue measure zero, since it can be seen as a collection of hypersurfaces in a hypercube. However, due to sampling error the faithfulness condition alone is not sufficient for statistical estimation, and strong-faithfulness has been proposed and assumed to achieve uniform or high-dimensional consistency. In contrast to the plain faithfulness assumption, the set of distributions that is not strong-faithful has nonzero Lebesgue measure and in fact, can be surprisingly large as we show in this paper. We study the strong-faithfulness condition from a geometric and combinatorial point of view and give upper and lower bounds on the Lebesgue measure of strong-faithful distributions for various classes of directed acyclic graphs. Our results imply fundamental limitations for the PC-algorithm and potentially also for other algorithms based on partial correlation testing in the Gaussian case.},
author = {Uhler, Caroline and Raskutti, Garvesh and Bühlmann, Peter and Yu, Bin},
journal = {The Annals of Statistics},
number = {2},
pages = {436 -- 463},
publisher = {Institute of Mathematical Statistics},
title = {{Geometry of the faithfulness assumption in causal inference}},
doi = {10.1214/12-AOS1080},
volume = {41},
year = {2013},
}
@inproceedings{2181,
abstract = {There is a trade-off between performance and correctness in implementing concurrent data structures. Better performance may be achieved at the expense of relaxing correctness, by redefining the semantics of data structures. We address such a redefinition of data structure semantics and present a systematic and formal framework for obtaining new data structures by quantitatively relaxing existing ones. We view a data structure as a sequential specification S containing all "legal" sequences over an alphabet of method calls. Relaxing the data structure corresponds to defining a distance from any sequence over the alphabet to the sequential specification: the k-relaxed sequential specification contains all sequences over the alphabet within distance k from the original specification. In contrast to other existing work, our relaxations are semantic (distance in terms of data structure states). As an instantiation of our framework, we present two simple yet generic relaxation schemes, called out-of-order and stuttering relaxation, along with several ways of computing distances. We show that the out-of-order relaxation, when further instantiated to stacks, queues, and priority queues, amounts to tolerating bounded out-of-order behavior, which cannot be captured by a purely syntactic relaxation (distance in terms of sequence manipulation, e.g. edit distance). We give concurrent implementations of relaxed data structures and demonstrate that bounded relaxations provide the means for trading correctness for performance in a controlled way. The relaxations are monotonic which further highlights the trade-off: increasing k increases the number of permitted sequences, which as we demonstrate can lead to better performance. Finally, since a relaxed stack or queue also implements a pool, we actually have new concurrent pool implementations that outperform the state-of-the-art ones.},
author = {Henzinger, Thomas A and Kirsch, Christoph and Payer, Hannes and Sezgin, Ali and Sokolova, Ana},
booktitle = {Proceedings of the 40th annual ACM SIGPLAN-SIGACT symposium on Principles of programming language},
isbn = {978-1-4503-1832-7},
location = {Rome, Italy},
pages = {317 -- 328},
publisher = {ACM},
title = {{Quantitative relaxation of concurrent data structures}},
doi = {10.1145/2429069.2429109},
year = {2013},
}
@inproceedings{2182,
abstract = {We propose a general framework for abstraction with respect to quantitative properties, such as worst-case execution time, or power consumption. Our framework provides a systematic way for counter-example guided abstraction refinement for quantitative properties. The salient aspect of the framework is that it allows anytime verification, that is, verification algorithms that can be stopped at any time (for example, due to exhaustion of memory), and report approximations that improve monotonically when the algorithms are given more time. We instantiate the framework with a number of quantitative abstractions and refinement schemes, which differ in terms of how much quantitative information they keep from the original system. We introduce both state-based and trace-based quantitative abstractions, and we describe conditions that define classes of quantitative properties for which the abstractions provide over-approximations. We give algorithms for evaluating the quantitative properties on the abstract systems. We present algorithms for counter-example based refinements for quantitative properties for both state-based and segment-based abstractions. We perform a case study on worst-case execution time of executables to evaluate the anytime verification aspect and the quantitative abstractions we proposed.},
author = {Cerny, Pavol and Henzinger, Thomas A and Radhakrishna, Arjun},
booktitle = {Proceedings of the 40th annual ACM SIGPLAN-SIGACT symposium on Principles of programming language},
location = {Rome, Italy},
pages = {115 -- 128},
publisher = {ACM},
title = {{Quantitative abstraction refinement}},
doi = {10.1145/2429069.2429085},
year = {2013},
}
@inproceedings{2209,
abstract = {A straight skeleton is a well-known geometric structure, and several algorithms exist to construct the straight skeleton for a given polygon or planar straight-line graph. In this paper, we ask the reverse question: Given the straight skeleton (in form of a planar straight-line graph, with some rays to infinity), can we reconstruct a planar straight-line graph for which this was the straight skeleton? We show how to reduce this problem to the problem of finding a line that intersects a set of convex polygons. We can find these convex polygons and all such lines in $O(nlog n)$ time in the Real RAM computer model, where $n$ denotes the number of edges of the input graph. We also explain how our approach can be used for recognizing Voronoi diagrams of points, thereby completing a partial solution provided by Ash and Bolker in 1985.
},
author = {Biedl, Therese and Held, Martin and Huber, Stefan},
location = {St. Petersburg, Russia},
pages = {37 -- 46},
publisher = {IEEE},
title = {{Recognizing straight skeletons and Voronoi diagrams and reconstructing their input}},
doi = {10.1109/ISVD.2013.11},
year = {2013},
}
@inproceedings{2210,
abstract = {A straight skeleton is a well-known geometric structure, and several algorithms exist to construct the straight skeleton for a given polygon. In this paper, we ask the reverse question: Given the straight skeleton (in form of a tree with a drawing in the plane, but with the exact position of the leaves unspecified), can we reconstruct the polygon? We show that in most cases there exists at most one polygon; in the remaining case there is an infinite number of polygons determined by one angle that can range in an interval. We can find this (set of) polygon(s) in linear time in the Real RAM computer model.},
author = {Biedl, Therese and Held, Martin and Huber, Stefan},
booktitle = {29th European Workshop on Computational Geometry},
location = {Braunschweig, Germany},
pages = {95 -- 98},
publisher = {TU Braunschweig},
title = {{Reconstructing polygons from embedded straight skeletons}},
year = {2013},
}
@inproceedings{2237,
abstract = {We describe new extensions of the Vampire theorem prover for computing tree interpolants. These extensions generalize Craig interpolation in Vampire, and can also be used to derive sequence interpolants. We evaluated our implementation on a large number of examples over the theory of linear integer arithmetic and integer-indexed arrays, with and without quantifiers. When compared to other methods, our experiments show that some examples could only be solved by our implementation.},
author = {Blanc, Régis and Gupta, Ashutosh and Kovács, Laura and Kragl, Bernhard},
location = {Stellenbosch, South Africa},
pages = {173 -- 181},
publisher = {Springer},
title = {{Tree interpolation in Vampire}},
doi = {10.1007/978-3-642-45221-5_13},
volume = {8312},
year = {2013},
}
@inproceedings{2238,
abstract = {We study the problem of achieving a given value in Markov decision processes (MDPs) with several independent discounted reward objectives. We consider a generalised version of discounted reward objectives, in which the amount of discounting depends on the states visited and on the objective. This definition extends the usual definition of discounted reward, and allows to capture the systems in which the value of different commodities diminish at different and variable rates.
We establish results for two prominent subclasses of the problem, namely state-discount models where the discount factors are only dependent on the state of the MDP (and independent of the objective), and reward-discount models where they are only dependent on the objective (but not on the state of the MDP). For the state-discount models we use a straightforward reduction to expected total reward and show that the problem whether a value is achievable can be solved in polynomial time. For the reward-discount model we show that memory and randomisation of the strategies are required, but nevertheless that the problem is decidable and it is sufficient to consider strategies which after a certain number of steps behave in a memoryless way.
For the general case, we show that when restricted to graphs (i.e. MDPs with no randomisation), pure strategies and discount factors of the form 1/n where n is an integer, the problem is in PSPACE and finite memory suffices for achieving a given value. We also show that when the discount factors are not of the form 1/n, the memory required by a strategy can be infinite.
},
author = {Chatterjee, Krishnendu and Forejt, Vojtěch and Wojtczak, Dominik},
location = {Stellenbosch, South Africa},
pages = {228 -- 242},
publisher = {Springer},
title = {{Multi-objective discounted reward verification in graphs and MDPs}},
doi = {10.1007/978-3-642-45221-5_17},
volume = {8312},
year = {2013},
}
@inproceedings{2243,
abstract = {We show that modal logic over universally first-order definable classes of transitive frames is decidable. More precisely, let K be an arbitrary class of transitive Kripke frames definable by a universal first-order sentence. We show that the global and finite global satisfiability problems of modal logic over K are decidable in NP, regardless of choice of K. We also show that the local satisfiability and the finite local satisfiability problems of modal logic over K are decidable in NEXPTIME.},
author = {Michaliszyn, Jakub and Otop, Jan},
location = {Torino, Italy},
pages = {563 -- 577},
publisher = {Schloss Dagstuhl - Leibniz-Zentrum für Informatik},
title = {{Elementary modal logics over transitive structures}},
doi = {10.4230/LIPIcs.CSL.2013.563},
volume = {23},
year = {2013},
}
@inproceedings{2244,
abstract = {We consider two systems (α1,...,αm) and (β1,...,βn) of curves drawn on a compact two-dimensional surface ℳ with boundary. Each αi and each βj is either an arc meeting the boundary of ℳ at its two endpoints, or a closed curve. The αi are pairwise disjoint except for possibly sharing endpoints, and similarly for the βj. We want to "untangle" the βj from the αi by a self-homeomorphism of ℳ; more precisely, we seek an homeomorphism φ: ℳ → ℳ fixing the boundary of ℳ pointwise such that the total number of crossings of the αi with the φ(βj) is as small as possible. This problem is motivated by an application in the algorithmic theory of embeddings and 3-manifolds. We prove that if ℳ is planar, i.e., a sphere with h ≥ 0 boundary components ("holes"), then O(mn) crossings can be achieved (independently of h), which is asymptotically tight, as an easy lower bound shows. In general, for an arbitrary (orientable or nonorientable) surface ℳ with h holes and of (orientable or nonorientable) genus g ≥ 0, we obtain an O((m + n)4) upper bound, again independent of h and g. },
author = {Matoušek, Jiří and Sedgwick, Eric and Tancer, Martin and Wagner, Uli},
location = {Bordeaux, France},
pages = {472 -- 483},
publisher = {Springer},
title = {{Untangling two systems of noncrossing curves}},
doi = {10.1007/978-3-319-03841-4_41},
volume = {8242},
year = {2013},
}
@article{2247,
abstract = {Cooperative behavior, where one individual incurs a cost to help another, is a wide spread phenomenon. Here we study direct reciprocity in the context of the alternating Prisoner's Dilemma. We consider all strategies that can be implemented by one and two-state automata. We calculate the payoff matrix of all pairwise encounters in the presence of noise. We explore deterministic selection dynamics with and without mutation. Using different error rates and payoff values, we observe convergence to a small number of distinct equilibria. Two of them are uncooperative strict Nash equilibria representing always-defect (ALLD) and Grim. The third equilibrium is mixed and represents a cooperative alliance of several strategies, dominated by a strategy which we call Forgiver. Forgiver cooperates whenever the opponent has cooperated; it defects once when the opponent has defected, but subsequently Forgiver attempts to re-establish cooperation even if the opponent has defected again. Forgiver is not an evolutionarily stable strategy, but the alliance, which it rules, is asymptotically stable. For a wide range of parameter values the most commonly observed outcome is convergence to the mixed equilibrium, dominated by Forgiver. Our results show that although forgiving might incur a short-term loss it can lead to a long-term gain. Forgiveness facilitates stable cooperation in the presence of exploitation and noise.},
author = {Zagorsky, Benjamin and Reiter, Johannes and Chatterjee, Krishnendu and Nowak, Martin},
journal = {PLoS One},
number = {12},
publisher = {Public Library of Science},
title = {{Forgiver triumphs in alternating prisoner's dilemma }},
doi = {10.1371/journal.pone.0080814},
volume = {8},
year = {2013},
}
@article{2256,
abstract = {Linked (Open) Data - bibliographic data on the Semantic Web. Report of the Working Group on Linked Data to the plenary assembly of the Austrian Library Network (translation of the title). Linked Data stands for a certain approach to publishing data on the Web. The underlying idea is to harmonise heterogeneous data sources of different origin in order to improve their accessibility and interoperability, effectively making them queryable as a big distributed database. This report summarises relevant developments in Europe as well as the Linked Data Working Group‘s strategic and technical considerations regarding the publishing of the Austrian Library Network’s (OBV’s) bibliographic datasets. It concludes with the mutual agreement that the implementation of Linked Data principles within the OBV can only be taken into consideration accompanied by a discussion about the provision of the datasets under a free license.},
author = {Danowski, Patrick and Goldfarb, Doron and Schaffner, Verena and Seidler, Wolfram},
journal = {VÖB Mitteilungen},
number = {3/4},
pages = {559 -- 587},
publisher = {Verein Österreichischer Bibliothekarinnen und Bibliothekare},
title = {{Linked (Open) Data - Bibliographische Daten im Semantic Web}},
volume = {66},
year = {2013},
}
@inproceedings{2258,
abstract = {In a digital signature scheme with message recovery, rather than transmitting the message m and its signature σ, a single enhanced signature τ is transmitted. The verifier is able to recover m from τ and at the same time verify its authenticity. The two most important parameters of such a scheme are its security and overhead |τ| − |m|. A simple argument shows that for any scheme with “n bits security” |τ| − |m| ≥ n, i.e., the overhead is lower bounded by the security parameter n. Currently, the best known constructions in the random oracle model are far from this lower bound requiring an overhead of n + logq h , where q h is the number of queries to the random oracle. In this paper we give a construction which basically matches the n bit lower bound. We propose a simple digital signature scheme with n + o(logq h ) bits overhead, where q h denotes the number of random oracle queries.
Our construction works in two steps. First, we propose a signature scheme with message recovery having optimal overhead in a new ideal model, the random invertible function model. Second, we show that a four-round Feistel network with random oracles as round functions is tightly “public-indifferentiable” from a random invertible function. At the core of our indifferentiability proof is an almost tight upper bound for the expected number of edges of the densest “small” subgraph of a random Cayley graph, which may be of independent interest.
},
author = {Kiltz, Eike and Pietrzak, Krzysztof Z and Szegedy, Mario},
location = {Santa Barbara, CA, United States},
pages = {571 -- 588},
publisher = {Springer},
title = {{Digital signatures with minimal overhead from indifferentiable random invertible functions}},
doi = {10.1007/978-3-642-40041-4_31},
volume = {8042},
year = {2013},
}
@inproceedings{2259,
abstract = {The learning with rounding (LWR) problem, introduced by Banerjee, Peikert and Rosen at EUROCRYPT ’12, is a variant of learning with errors (LWE), where one replaces random errors with deterministic rounding. The LWR problem was shown to be as hard as LWE for a setting of parameters where the modulus and modulus-to-error ratio are super-polynomial. In this work we resolve the main open problem and give a new reduction that works for a larger range of parameters, allowing for a polynomial modulus and modulus-to-error ratio. In particular, a smaller modulus gives us greater efficiency, and a smaller modulus-to-error ratio gives us greater security, which now follows from the worst-case hardness of GapSVP with polynomial (rather than super-polynomial) approximation factors.
As a tool in the reduction, we show that there is a “lossy mode” for the LWR problem, in which LWR samples only reveal partial information about the secret. This property gives us several interesting new applications, including a proof that LWR remains secure with weakly random secrets of sufficient min-entropy, and very simple constructions of deterministic encryption, lossy trapdoor functions and reusable extractors.
Our approach is inspired by a technique of Goldwasser et al. from ICS ’10, which implicitly showed the existence of a “lossy mode” for LWE. By refining this technique, we also improve on the parameters of that work to only requiring a polynomial (instead of super-polynomial) modulus and modulus-to-error ratio.
},
author = {Alwen, Joel F and Krenn, Stephan and Pietrzak, Krzysztof Z and Wichs, Daniel},
location = {Santa Barbara, CA, United States},
number = {1},
pages = {57 -- 74},
publisher = {Springer},
title = {{Learning with rounding, revisited: New reduction properties and applications}},
doi = {10.1007/978-3-642-40041-4_4},
volume = {8042},
year = {2013},
}
@article{476,
abstract = {Maternal exposure to infection occurring mid-gestation produces a three-fold increase in the risk of schizophrenia in the offspring. The critical initiating factor appears to be the maternal immune activation (MIA) that follows infection. This process can be induced in rodents by exposure of pregnant dams to the viral mimic Poly I:C, which triggers an immune response that results in structural, functional, behavioral, and electrophysiological phenotypes in the adult offspring that model those seen in schizophrenia. We used this model to explore the role of synchronization in brain neural networks, a process thought to be dysfunctional in schizophrenia and previously associated with positive, negative, and cognitive symptoms of schizophrenia. Exposure of pregnant dams to Poly I:C on GD15 produced an impairment in long-range neural synchrony in adult offspring between two regions implicated in schizophrenia pathology; the hippocampus and the medial prefrontal cortex (mPFC). This reduction in synchrony was ameliorated by acute doses of the antipsychotic clozapine. MIA animals have previously been shown to have impaired pre-pulse inhibition (PPI), a gold-standard measure of schizophrenia-like deficits in animal models. Our data showed that deficits in synchrony were positively correlated with the impairments in PPI. Subsequent analysis of LFP activity during the PPI response also showed that reduced coupling between the mPFC and the hippocampus following processing of the pre-pulse was associated with reduced PPI. The ability of the MIA intervention to model neurodevelopmental aspects of schizophrenia pathology provides a useful platform from which to investigate the ontogeny of aberrant synchronous processes. Further, the way in which the model expresses translatable deficits such as aberrant synchrony and reduced PPI will allow researchers to explore novel intervention strategies targeted to these changes. },
author = {Dickerson, Desiree and Bilkey, David},
journal = {Frontiers in Behavioral Neuroscience},
number = {DEC},
publisher = {Frontiers Research Foundation},
title = {{Aberrant neural synchrony in the maternal immune activation model: Using translatable measures to explore targeted interventions}},
doi = {10.3389/fnbeh.2013.00217},
volume = {7},
year = {2013},
}
@article{499,
abstract = {Exposure of an isogenic bacterial population to a cidal antibiotic typically fails to eliminate a small fraction of refractory cells. Historically, fractional killing has been attributed to infrequently dividing or nondividing "persisters." Using microfluidic cultures and time-lapse microscopy, we found that Mycobacterium smegmatis persists by dividing in the presence of the drug isoniazid (INH). Although persistence in these studies was characterized by stable numbers of cells, this apparent stability was actually a dynamic state of balanced division and death. Single cells expressed catalase-peroxidase (KatG), which activates INH, in stochastic pulses that were negatively correlated with cell survival. These behaviors may reflect epigenetic effects, because KatG pulsing and death were correlated between sibling cells. Selection of lineages characterized by infrequent KatG pulsing could allow nonresponsive adaptation during prolonged drug exposure.},
author = {Wakamoto, Yurichi and Dhar, Neraaj and Chait, Remy P and Schneider, Katrin and Signorino Gelo, François and Leibler, Stanislas and Mckinney, John},
journal = {Science},
number = {6115},
pages = {91 -- 95},
publisher = {American Association for the Advancement of Science},
title = {{Dynamic persistence of antibiotic-stressed mycobacteria}},
doi = {10.1126/science.1229858},
volume = {339},
year = {2013},
}
@article{500,
abstract = {Background: Reassortment between the RNA segments encoding haemagglutinin (HA) and neuraminidase (NA), the major antigenic influenza proteins, produces viruses with novel HA and NA subtype combinations and has preceded the emergence of pandemic strains. It has been suggested that productive viral infection requires a balance in the level of functional activity of HA and NA, arising from their closely interacting roles in the viral life cycle, and that this functional balance could be mediated by genetic changes in the HA and NA. Here, we investigate how the selective pressure varies for H7 avian influenza HA on different NA subtype backgrounds. Results: By extending Bayesian stochastic mutational mapping methods to calculate the ratio of the rate of non-synonymous change to the rate of synonymous change (d N/d S), we found the average d N/d S across the avian influenza H7 HA1 region to be significantly greater on an N2 NA subtype background than on an N1, N3 or N7 background. Observed differences in evolutionary rates of H7 HA on different NA subtype backgrounds could not be attributed to underlying differences between avian host species or virus pathogenicity. Examination of d N/d S values for each subtype on a site-by-site basis indicated that the elevated d N/d S on the N2 NA background was a result of increased selection, rather than a relaxation of selective constraint. Conclusions: Our results are consistent with the hypothesis that reassortment exposes influenza HA to significant changes in selective pressure through genetic interactions with NA. Such epistatic effects might be explicitly accounted for in future models of influenza evolution.},
author = {Ward, Melissa and Lycett, Samantha and Avila, Dorita and Bollback, Jonathan P and Leigh Brown, Andrew},
journal = {BMC Evolutionary Biology},
number = {1},
publisher = {BioMed Central},
title = {{Evolutionary interactions between haemagglutinin and neuraminidase in avian influenza}},
doi = {10.1186/1471-2148-13-222},
volume = {13},
year = {2013},
}
@article{501,
abstract = {All known species of extant tapirs are allopatric: 1 in southeastern Asia and 3 in Central and South America. The fossil record for tapirs, however, is much wider in geographical range, including Europe, Asia, and North and South America, going back to the late Oligocene, making the present distribution a relict of the original one. We here describe a new species of living Tapirus from the Amazon rain forest, the 1st since T. bairdii Gill, 1865, and the 1st new Perissodactyla in more than 100 years, from both morphological and molecular characters. It is shorter in stature than T. terrestris (Linnaeus, 1758) and has distinctive skull morphology, and it is basal to the clade formed by T. terrestris and T. pinchaque (Roulin, 1829). This highlights the unrecognized biodiversity in western Amazonia, where the biota faces increasing threats. Local peoples have long recognized our new species, suggesting a key role for traditional knowledge in understanding the biodiversity of the region.},
author = {Cozzuol, Mario and Clozato, Camila and Holanda, Elizete and Rodrigues, Flávio and Nienow, Samuel and De Thoisy, Benoit and Fernandes Redondo, Rodrigo A and Santos, Fabrício},
journal = {Journal of Mammalogy},
number = {6},
pages = {1331 -- 1345},
publisher = {Oxford University Press},
title = {{A new species of tapir from the Amazon}},
doi = {10.1644/12-MAMM-A-169.1},
volume = {94},
year = {2013},
}
@article{502,
abstract = {Blind signatures allow users to obtain signatures on messages hidden from the signer; moreover, the signer cannot link the resulting message/signature pair to the signing session. This paper presents blind signature schemes, in which the number of interactions between the user and the signer is minimal and whose blind signatures are short. Our schemes are defined over bilinear groups and are proved secure in the common-reference-string model without random oracles and under standard assumptions: CDH and the decision-linear assumption. (We also give variants over asymmetric groups based on similar assumptions.) The blind signatures are Waters signatures, which consist of 2 group elements. Moreover, we instantiate partially blind signatures, where the message consists of a part hidden from the signer and a commonly known public part, and schemes achieving perfect blindness. We propose new variants of blind signatures, such as signer-friendly partially blind signatures, where the public part can be chosen by the signer without prior agreement, 3-party blind signatures, as well as blind signatures on multiple aggregated messages provided by independent sources. We also extend Waters signatures to non-binary alphabets by proving a new result on the underlying hash function. },
author = {Blazy, Olivier and Fuchsbauer, Georg and Pointcheval, David and Vergnaud, Damien},
journal = {Journal of Computer Security},
number = {5},
pages = {627 -- 661},
publisher = {IOS Press},
title = {{Short blind signatures}},
doi = {10.3233/JCS-130477},
volume = {21},
year = {2013},
}
@article{505,
abstract = {Alkyd resins are polyesters containing unsaturated fatty acids that are used as binding agents in paints and coatings. Chemical drying of these polyesters is based on heavy metal catalyzed cross-linking of the unsaturated fatty acid moieties. Among the heavy-metal catalysts, cobalt complexes are the most effective, yet they have been proven to be carcinogenic. Therefore, strategies to replace the cobalt-based catalyst by environmentally friendlier and less toxic alternatives are under development. Here, we demonstrate for the first time that a laccase-mediator system can effectively replace the heavy-metal catalyst and cross-link alkyd resins. Interestingly, the biocatalytic reaction does not only work in aqueous media, but also in a solid film, where enzyme diffusion is limited. Within the catalytic cycle, the mediator oxidizes the alkyd resin and is regenerated by the laccase, which is uniformly distributed within the drying film as evidenced by confocal laser scanning microscopy. During gradual build-up of molecular weight, there is a concomitant decrease of the oxygen content in the film. A new optical sensor to follow oxygen consumption during the cross-linking reaction was developed and validated with state of the art techniques. A remarkable feature is the low sample amount required, which allows faster screening of new catalysts.},
author = {Greimel, Katrin and Perz, Veronika and Koren, Klaus and Feola, Roland and Temel, Armin and Sohar, Christian and Herrero Acero, Enrique and Klimant, Ingo and Guebitz, Georg},
journal = {Green Chemistry},
number = {2},
pages = {381 -- 388},
publisher = {Royal Society of Chemistry},
title = {{Banning toxic heavy-metal catalysts from paints: Enzymatic cross-linking of alkyd resins}},
doi = {10.1039/c2gc36666e},
volume = {15},
year = {2013},
}
@article{507,
abstract = {Fertilization in flowering plants requires the temporal and spatial coordination of many developmental processes, including pollen production, anther dehiscence, ovule production, and pollen tube elongation. However, it remains elusive as to how this coordination occurs during reproduction. Here, we present evidence that endocytosis, involving heterotetrameric adaptor protein complex 2 (AP-2), plays a crucial role in fertilization. An Arabidopsis thaliana mutant ap2m displays multiple defects in pollen production and viability, as well as elongation of staminal filaments and pollen tubes, all of which are pivotal processes needed for fertilization. Of these abnormalities, the defects in elongation of staminal filaments and pollen tubes were partially rescued by exogenous auxin. Moreover, DR5rev:GFP (for green fluorescent protein) expression was greatly reduced in filaments and anthers in ap2m mutant plants. At the cellular level, ap2m mutants displayed defects in both endocytosis of N-(3-triethylammonium-propyl)-4- (4-diethylaminophenylhexatrienyl) pyridinium dibromide, a lypophilic dye used as an endocytosis marker, and polar localization of auxin-efflux carrier PIN FORMED2 (PIN2) in the stamen filaments. Moreover, these defects were phenocopied by treatment with Tyrphostin A23, an inhibitor of endocytosis. Based on these results, we propose that AP-2-dependent endocytosis plays a crucial role in coordinating the multiple developmental aspects of male reproductive organs by modulating cellular auxin level through the regulation of the amount and polarity of PINs.},
author = {Kim, Soo and Xu, Zheng and Song, Kyungyoung and Kim, Dae and Kang, Hyangju and Reichardt, Ilka and Sohn, Eun and Friml, Jirí and Juergens, Gerd and Hwang, Inhwan},
journal = {Plant Cell},
number = {8},
pages = {2970 -- 2985},
publisher = {American Society of Plant Biologists},
title = {{Adaptor protein complex 2-mediated endocytosis is crucial for male reproductive organ development in arabidopsis}},
doi = {10.1105/tpc.113.114264},
volume = {25},
year = {2013},
}