@article{8196, abstract = {This paper aims to obtain a strong convergence result for a Douglas–Rachford splitting method with inertial extrapolation step for finding a zero of the sum of two set-valued maximal monotone operators without any further assumption of uniform monotonicity on any of the involved maximal monotone operators. Furthermore, our proposed method is easy to implement and the inertial factor in our proposed method is a natural choice. Our method of proof is of independent interest. Finally, some numerical implementations are given to confirm the theoretical analysis.}, author = {Shehu, Yekini and Dong, Qiao-Li and Liu, Lu-Lu and Yao, Jen-Chih}, issn = {1573-2924}, journal = {Optimization and Engineering}, pages = {2627--2653}, publisher = {Springer Nature}, title = {{New strong convergence method for the sum of two maximal monotone operators}}, doi = {10.1007/s11081-020-09544-5}, volume = {22}, year = {2021}, } @article{7925, abstract = {In this paper, we introduce a relaxed CQ method with alternated inertial step for solving split feasibility problems. We give convergence of the sequence generated by our method under some suitable assumptions. Some numerical implementations from sparse signal and image deblurring are reported to show the efficiency of our method.}, author = {Shehu, Yekini and Gibali, Aviv}, issn = {1862-4480}, journal = {Optimization Letters}, pages = {2109--2126}, publisher = {Springer Nature}, title = {{New inertial relaxed method for solving split feasibilities}}, doi = {10.1007/s11590-020-01603-1}, volume = {15}, year = {2021}, } @article{6593, abstract = {We consider the monotone variational inequality problem in a Hilbert space and describe a projection-type method with inertial terms under the following properties: (a) The method generates a strongly convergent iteration sequence; (b) The method requires, at each iteration, only one projection onto the feasible set and two evaluations of the operator; (c) The method is designed for variational inequality for which the underline operator is monotone and uniformly continuous; (d) The method includes an inertial term. The latter is also shown to speed up the convergence in our numerical results. A comparison with some related methods is given and indicates that the new method is promising.}, author = {Shehu, Yekini and Li, Xiao-Huan and Dong, Qiao-Li}, issn = {1572-9265}, journal = {Numerical Algorithms}, pages = {365--388}, publisher = {Springer Nature}, title = {{An efficient projection-type method for monotone variational inequalities in Hilbert spaces}}, doi = {10.1007/s11075-019-00758-y}, volume = {84}, year = {2020}, } @article{8077, abstract = {The projection methods with vanilla inertial extrapolation step for variational inequalities have been of interest to many authors recently due to the improved convergence speed contributed by the presence of inertial extrapolation step. However, it is discovered that these projection methods with inertial steps lose the Fejér monotonicity of the iterates with respect to the solution, which is being enjoyed by their corresponding non-inertial projection methods for variational inequalities. This lack of Fejér monotonicity makes projection methods with vanilla inertial extrapolation step for variational inequalities not to converge faster than their corresponding non-inertial projection methods at times. Also, it has recently been proved that the projection methods with vanilla inertial extrapolation step may provide convergence rates that are worse than the classical projected gradient methods for strongly convex functions. In this paper, we introduce projection methods with alternated inertial extrapolation step for solving variational inequalities. We show that the sequence of iterates generated by our methods converges weakly to a solution of the variational inequality under some appropriate conditions. The Fejér monotonicity of even subsequence is recovered in these methods and linear rate of convergence is obtained. The numerical implementations of our methods compared with some other inertial projection methods show that our method is more efficient and outperforms some of these inertial projection methods.}, author = {Shehu, Yekini and Iyiola, Olaniyi S.}, issn = {0168-9274}, journal = {Applied Numerical Mathematics}, pages = {315--337}, publisher = {Elsevier}, title = {{Projection methods with alternating inertial steps for variational inequalities: Weak and linear convergence}}, doi = {10.1016/j.apnum.2020.06.009}, volume = {157}, year = {2020}, } @article{7161, abstract = {In this paper, we introduce an inertial projection-type method with different updating strategies for solving quasi-variational inequalities with strongly monotone and Lipschitz continuous operators in real Hilbert spaces. Under standard assumptions, we establish different strong convergence results for the proposed algorithm. Primary numerical experiments demonstrate the potential applicability of our scheme compared with some related methods in the literature.}, author = {Shehu, Yekini and Gibali, Aviv and Sagratella, Simone}, issn = {1573-2878}, journal = {Journal of Optimization Theory and Applications}, pages = {877–894}, publisher = {Springer Nature}, title = {{Inertial projection-type methods for solving quasi-variational inequalities in real Hilbert spaces}}, doi = {10.1007/s10957-019-01616-6}, volume = {184}, year = {2020}, } @inproceedings{6725, abstract = {A Valued Constraint Satisfaction Problem (VCSP) provides a common framework that can express a wide range of discrete optimization problems. A VCSP instance is given by a finite set of variables, a finite domain of labels, and an objective function to be minimized. This function is represented as a sum of terms where each term depends on a subset of the variables. To obtain different classes of optimization problems, one can restrict all terms to come from a fixed set Γ of cost functions, called a language. Recent breakthrough results have established a complete complexity classification of such classes with respect to language Γ: if all cost functions in Γ satisfy a certain algebraic condition then all Γ-instances can be solved in polynomial time, otherwise the problem is NP-hard. Unfortunately, testing this condition for a given language Γ is known to be NP-hard. We thus study exponential algorithms for this meta-problem. We show that the tractability condition of a finite-valued language Γ can be tested in O(3‾√3|D|⋅poly(size(Γ))) time, where D is the domain of Γ and poly(⋅) is some fixed polynomial. We also obtain a matching lower bound under the Strong Exponential Time Hypothesis (SETH). More precisely, we prove that for any constant δ<1 there is no O(3‾√3δ|D|) algorithm, assuming that SETH holds.}, author = {Kolmogorov, Vladimir}, booktitle = {46th International Colloquium on Automata, Languages and Programming}, isbn = {978-3-95977-109-2}, issn = {1868-8969}, location = {Patras, Greece}, pages = {77:1--77:12}, publisher = {Schloss Dagstuhl - Leibniz-Zentrum für Informatik}, title = {{Testing the complexity of a valued CSP language}}, doi = {10.4230/LIPICS.ICALP.2019.77}, volume = {132}, year = {2019}, } @article{6596, abstract = {It is well known that many problems in image recovery, signal processing, and machine learning can be modeled as finding zeros of the sum of maximal monotone and Lipschitz continuous monotone operators. Many papers have studied forward-backward splitting methods for finding zeros of the sum of two monotone operators in Hilbert spaces. Most of the proposed splitting methods in the literature have been proposed for the sum of maximal monotone and inverse-strongly monotone operators in Hilbert spaces. In this paper, we consider splitting methods for finding zeros of the sum of maximal monotone operators and Lipschitz continuous monotone operators in Banach spaces. We obtain weak and strong convergence results for the zeros of the sum of maximal monotone and Lipschitz continuous monotone operators in Banach spaces. Many already studied problems in the literature can be considered as special cases of this paper.}, author = {Shehu, Yekini}, issn = {1420-9012}, journal = {Results in Mathematics}, number = {4}, publisher = {Springer}, title = {{Convergence results of forward-backward algorithms for sum of monotone operators in Banach spaces}}, doi = {10.1007/s00025-019-1061-4}, volume = {74}, year = {2019}, } @article{7000, abstract = {The main contributions of this paper are the proposition and the convergence analysis of a class of inertial projection-type algorithm for solving variational inequality problems in real Hilbert spaces where the underline operator is monotone and uniformly continuous. We carry out a unified analysis of the proposed method under very mild assumptions. In particular, weak convergence of the generated sequence is established and nonasymptotic O(1 / n) rate of convergence is established, where n denotes the iteration counter. We also present some experimental results to illustrate the profits gained by introducing the inertial extrapolation steps.}, author = {Shehu, Yekini and Iyiola, Olaniyi S. and Li, Xiao-Huan and Dong, Qiao-Li}, issn = {1807-0302}, journal = {Computational and Applied Mathematics}, number = {4}, publisher = {Springer Nature}, title = {{Convergence analysis of projection method for variational inequalities}}, doi = {10.1007/s40314-019-0955-9}, volume = {38}, year = {2019}, } @article{7412, abstract = {We develop a framework for the rigorous analysis of focused stochastic local search algorithms. These algorithms search a state space by repeatedly selecting some constraint that is violated in the current state and moving to a random nearby state that addresses the violation, while (we hope) not introducing many new violations. An important class of focused local search algorithms with provable performance guarantees has recently arisen from algorithmizations of the Lovász local lemma (LLL), a nonconstructive tool for proving the existence of satisfying states by introducing a background measure on the state space. While powerful, the state transitions of algorithms in this class must be, in a precise sense, perfectly compatible with the background measure. In many applications this is a very restrictive requirement, and one needs to step outside the class. Here we introduce the notion of measure distortion and develop a framework for analyzing arbitrary focused stochastic local search algorithms, recovering LLL algorithmizations as the special case of no distortion. Our framework takes as input an arbitrary algorithm of such type and an arbitrary probability measure and shows how to use the measure as a yardstick of algorithmic progress, even for algorithms designed independently of the measure.}, author = {Achlioptas, Dimitris and Iliopoulos, Fotis and Kolmogorov, Vladimir}, issn = {1095-7111}, journal = {SIAM Journal on Computing}, number = {5}, pages = {1583--1602}, publisher = {SIAM}, title = {{A local lemma for focused stochastical algorithms}}, doi = {10.1137/16m109332x}, volume = {48}, year = {2019}, } @inproceedings{7468, abstract = {We present a new proximal bundle method for Maximum-A-Posteriori (MAP) inference in structured energy minimization problems. The method optimizes a Lagrangean relaxation of the original energy minimization problem using a multi plane block-coordinate Frank-Wolfe method that takes advantage of the specific structure of the Lagrangean decomposition. We show empirically that our method outperforms state-of-the-art Lagrangean decomposition based algorithms on some challenging Markov Random Field, multi-label discrete tomography and graph matching problems.}, author = {Swoboda, Paul and Kolmogorov, Vladimir}, booktitle = {Proceedings of the IEEE Computer Society Conference on Computer Vision and Pattern Recognition}, isbn = {9781728132938}, issn = {10636919}, location = {Long Beach, CA, United States}, publisher = {IEEE}, title = {{Map inference via block-coordinate Frank-Wolfe algorithm}}, doi = {10.1109/CVPR.2019.01140}, volume = {2019-June}, year = {2019}, }