@article{2516,
abstract = {We study the problem of object recognition for categories for which we have no training examples, a task also called zero-data or zero-shot learning. This situation has hardly been studied in computer vision research, even though it occurs frequently: the world contains tens of thousands of different object classes and for only few of them image collections have been formed and suitably annotated. To tackle the problem we introduce attribute-based classification: objects are identified based on a high-level description that is phrased in terms of semantic attributes, such as the object's color or shape. Because the identification of each such property transcends the specific learning task at hand, the attribute classifiers can be pre-learned independently, e.g. from existing image datasets unrelated to the current task. Afterwards, new classes can be detected based on their attribute representation, without the need for a new training phase. In this paper we also introduce a new dataset, Animals with Attributes, of over 30,000 images of 50 animal classes, annotated with 85 semantic attributes. Extensive experiments on this and two more datasets show that attribute-based classification indeed is able to categorize images without access to any training images of the target classes.},
author = {Lampert, Christoph and Nickisch, Hannes and Harmeling, Stefan},
journal = {IEEE Transactions on Pattern Analysis and Machine Intelligence},
number = {3},
pages = {453 -- 465},
publisher = {IEEE},
title = {{Attribute-based classification for zero-shot learning of object categories}},
doi = {10.1109/TPAMI.2013.140},
volume = {36},
year = {2013},
}
@inproceedings{2517,
abstract = {Traditional formal methods are based on a Boolean satisfaction notion: a reactive system satisfies, or not, a given specification. We generalize formal methods to also address the quality of systems. As an adequate specification formalism we introduce the linear temporal logic LTL[F]. The satisfaction value of an LTL[F] formula is a number between 0 and 1, describing the quality of the satisfaction. The logic generalizes traditional LTL by augmenting it with a (parameterized) set F of arbitrary functions over the interval [0,1]. For example, F may contain the maximum or minimum between the satisfaction values of subformulas, their product, and their average. The classical decision problems in formal methods, such as satisfiability, model checking, and synthesis, are generalized to search and optimization problems in the quantitative setting. For example, model checking asks for the quality in which a specification is satisfied, and synthesis returns a system satisfying the specification with the highest quality. Reasoning about quality gives rise to other natural questions, like the distance between specifications. We formalize these basic questions and study them for LTL[F]. By extending the automata-theoretic approach for LTL to a setting that takes quality into an account, we are able to solve the above problems and show that reasoning about LTL[F] has roughly the same complexity as reasoning about traditional LTL.},
author = {Almagor, Shaull and Boker, Udi and Kupferman, Orna},
location = {Riga, Latvia},
number = {Part 2},
pages = {15 -- 27},
publisher = {Springer},
title = {{Formalizing and reasoning about quality}},
doi = {10.1007/978-3-642-39212-2_3},
volume = {7966},
year = {2013},
}
@inproceedings{2518,
abstract = {A class of valued constraint satisfaction problems (VCSPs) is characterised by a valued constraint language, a fixed set of cost functions on a finite domain. An instance of the problem is specified by a sum of cost functions from the language with the goal to minimise the sum. We study which classes of finite-valued languages can be solved exactly by the basic linear programming relaxation (BLP). Thapper and Živný showed [20] that if BLP solves the language then the language admits a binary commutative fractional polymorphism. We prove that the converse is also true. This leads to a necessary and a sufficient condition which can be checked in polynomial time for a given language. In contrast, the previous necessary and sufficient condition due to [20] involved infinitely many inequalities. More recently, Thapper and Živný [21] showed (using, in particular, a technique introduced in this paper) that core languages that do not satisfy our condition are NP-hard. Taken together, these results imply that a finite-valued language can either be solved using Linear Programming or is NP-hard.},
author = {Kolmogorov, Vladimir},
location = {Riga, Latvia},
number = {1},
pages = {625 -- 636},
publisher = {Springer},
title = {{The power of linear programming for finite-valued CSPs: A constructive characterization}},
doi = {10.1007/978-3-642-39206-1_53},
volume = {7965},
year = {2013},
}
@inproceedings{2520,
abstract = {We propose a probabilistic model to infer supervised latent variables in
the Hamming space from observed data. Our model allows simultaneous
inference of the number of binary latent variables, and their values. The
latent variables preserve neighbourhood structure of the data in a sense
that objects in the same semantic concept have similar latent values, and
objects in different concepts have dissimilar latent values. We formulate
the supervised infinite latent variable problem based on an intuitive
principle of pulling objects together if they are of the same type, and
pushing them apart if they are not. We then combine this principle with a
flexible Indian Buffet Process prior on the latent variables. We show that
the inferred supervised latent variables can be directly used to perform a
nearest neighbour search for the purpose of retrieval. We introduce a new
application of dynamically extending hash codes, and show how to
effectively couple the structure of the hash codes with continuously
growing structure of the neighbourhood preserving infinite latent feature
space.},
author = {Quadrianto, Novi and Sharmanska, Viktoriia and Knowles, David and Ghahramani, Zoubin},
booktitle = {Proceedings of the 29th conference uncertainty in Artificial Intelligence},
isbn = {9780974903996},
location = {Bellevue, WA, United States},
pages = {527 -- 536},
publisher = {AUAI Press},
title = {{The supervised IBP: Neighbourhood preserving infinite latent feature models}},
year = {2013},
}
@article{2698,
abstract = {We consider non-interacting particles subject to a fixed external potential V and a self-generated magnetic field B. The total energy includes the field energy β∫B2 and we minimize over all particle states and magnetic fields. In the case of spin-1/2 particles this minimization leads to the coupled Maxwell-Pauli system. The parameter β tunes the coupling strength between the field and the particles and it effectively determines the strength of the field. We investigate the stability and the semiclassical asymptotics, h→0, of the total ground state energy E(β,h,V). The relevant parameter measuring the field strength in the semiclassical limit is κ=βh. We are not able to give the exact leading order semiclassical asymptotics uniformly in κ or even for fixed κ. We do however give upper and lower bounds on E with almost matching dependence on κ. In the simultaneous limit h→0 and κ→∞ we show that the standard non-magnetic Weyl asymptotics holds. The same result also holds for the spinless case, i.e. where the Pauli operator is replaced by the Schrödinger operator.},
author = {Erdös, László and Fournais, Søren and Solovej, Jan},
journal = {Journal of the European Mathematical Society},
number = {6},
pages = {2093 -- 2113},
publisher = {European Mathematical Society},
title = {{Stability and semiclassics in self-generated fields}},
doi = {10.4171/JEMS/416},
volume = {15},
year = {2013},
}