├── Opti_SQP.m └── README.md /Opti_SQP.m: -------------------------------------------------------------------------------- 1 | % EXAMPLE OF SQP ALGORITHM 2 | 3 | clear variables; close all; clc; 4 | fprintf("---------------------------------------------------------------\n") 5 | fprintf("An implementation of Sequential Quadratic Programming method\nin a nonlinear constrained optimization problem\n") 6 | fprintf("---------------------------------------------------------------\n") 7 | 8 | % INITIAL VALUES - INPUT 9 | 10 | vars = 2; % number of variables 11 | cons = 1; % number of constraints 12 | maxIter=100; % max iterations 13 | x = [-1;4]; % initial guess point 14 | l =0; % LagrangeMultipliers vector 15 | H = eye(vars,vars); % Hessian matrix assumed to be identity 16 | 17 | % EVALUATE AT STARTING POINT 18 | 19 | fEval= f(x); 20 | gEval = g(x); 21 | [gViol,lViol] = Viols(gEval,l); 22 | gradfEval = gradf(x); 23 | gradgEval = gradg(x); 24 | P = Penalty(fEval,gViol,lViol); 25 | 26 | % SQP ALGORITHM 27 | 28 | for i=1:maxIter 29 | 30 | % SOLVE KKT CONDITIONS FOR THE OPTIMUM OF THE QUADRATIC APPROXIMATION 31 | 32 | sol = SolveKKT(gradfEval,gradgEval,gEval,H); 33 | xSol = sol(1:vars); 34 | lSol = sol(vars+1:vars+cons); 35 | 36 | % IF THE LAGRANGE MULTIPLIER IS NEGATIVE SET IT TO ZERO 37 | 38 | for j = 1:length(lSol) 39 | if lSol(j)<0 40 | sol= H\(-gradfEval)'; 41 | xSol = sol(1:vars); 42 | lSol(j)=0; 43 | end 44 | end 45 | 46 | % EVALUATE AT NEW CANDIDATE POINT 47 | 48 | xNew = x + xSol; 49 | lNew = lSol; 50 | fEvalNew = f(xNew); 51 | gEvalNew = g(xNew); 52 | gradfEvalNew = gradf(xNew); 53 | gradgEvalNew = gradg(xNew); 54 | [gViolNew,lViolNew] = Viols(gEvalNew,lNew); 55 | PNew = Penalty(fEvalNew,gViolNew,lViolNew); 56 | 57 | % IF PENALTY FUNCTION INCREASED, LOWER THE STEP BY 0.5 58 | 59 | while PNew-P>1e-4 60 | xSol = 0.5*xSol; 61 | xNew = x + xSol; 62 | fEvalNew = f(xNew); 63 | gEvalNew = g(xNew); 64 | gradfEvalNew = gradf(xNew); 65 | gradgEvalNew = gradg(xNew); 66 | [gViolNew,lViolNew] = Viols(gEvalNew,lNew); 67 | PNew = Penalty(fEvalNew,gViolNew,lViolNew); 68 | end 69 | 70 | % STOPPING CRITERION 71 | 72 | if norm(xNew(1:vars)-x(1:vars))<=1e-2 73 | break 74 | end 75 | 76 | % UPDATE THE HESSIAN 77 | 78 | gradLEval = gradLagr(gradfEval,gradgEval,lNew,vars); % lnew not l!!! 79 | gradLEvalNew = gradLagr(gradfEvalNew,gradgEvalNew,lNew,vars); 80 | Q = gradLEvalNew-gradLEval; 81 | dx = xNew-x; 82 | HNew = UpdateH(H,dx,Q); 83 | 84 | % UPDATE ALL VALUES FOR NEXT ITERATION 85 | 86 | H = HNew; 87 | fEval = fEvalNew; 88 | gEval = gEvalNew; 89 | gradfEval = gradfEvalNew; 90 | gradgEval = gradgEvalNew; 91 | P = PNew; 92 | x = xNew; 93 | end 94 | 95 | fprintf('SQP: Optimum point:\n x1=%10.4f\n x2=%10.4f\n iterations =%10.0f \n', x(1), x(2), i) 96 | 97 | % FUNCTIONS NEEDED 98 | 99 | function y = SolveKKT(gradfEval,gradgEval,gEval,Hessian) 100 | A = [Hessian -gradgEval';gradgEval 0]; 101 | b = [-gradfEval -gEval]'; 102 | y = A\b; 103 | end 104 | 105 | function y = f(x) 106 | y = x(1)^4 - 2*x(2)*x(1)^2 + x(2)^2 + x(1)^2 - 2*x(1)+5; 107 | end 108 | 109 | function y = gradf(x) 110 | y(1) = 2*x(1)-4*x(1)*x(2)+4*x(1)^3-2; 111 | y(2) = -2*x(1)^2 + 2*x(2); 112 | end 113 | 114 | function y = gradLagr(gradfEval,gradgEval,l,n) 115 | y = gradfEval'; 116 | sum = zeros(n,1); 117 | for i = 1:length(l) 118 | sum = sum -l(i)*gradgEval(i:n)'; 119 | end 120 | y = y + sum; 121 | end 122 | 123 | 124 | function y = gradg(x) 125 | y(1,1) = -2*x(1)-1/2; 126 | y(1,2) = 3/4; 127 | end 128 | 129 | function y = g(x) 130 | y(1) = -(x(1)+0.25)^2+0.75*x(2); 131 | end 132 | 133 | function [gViol,lViol] = Viols(gEval,l) 134 | gViol=[]; 135 | lViol=[]; 136 | for i = 1:length(gEval) 137 | if gEval(i)<0 138 | gViol(i)=gEval(i); 139 | lViol(i)=l(i); 140 | end 141 | end 142 | end 143 | 144 | 145 | function y = Penalty(fEval,gViol,lViol) 146 | sum = 0; 147 | y = fEval; 148 | for i = 1:length(gViol) 149 | sum = sum + lViol(i)*abs(gViol(i)); 150 | end 151 | y = y + sum; 152 | end 153 | 154 | function y = UpdateH(H,dx,gamma) 155 | term1=(gamma*gamma') / (gamma'*dx); 156 | term2 = ((H*dx)*(dx'*H)) / (dx'*(H*dx)); 157 | y = H + term1-term2; 158 | end 159 | 160 | 161 | 162 | 163 | 164 | 165 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Sequential-Quadratic-Programming-method-Implementation-in-Matlab 2 | A SQP algorithm implementation for solving nonlinear constrained optimization problems. 3 | 4 | Summary of Steps for SQP Algorithm 5 | 6 | 1. Make a QP approximation to the original problem. For the first iteration, use a 7 | Lagrangian Hessian equal to the identity matrix. 8 | 9 | 2. Solve for the optimum to the QP problem. As part of this solution, values for the 10 | Lagrange multipliers are obtained. 11 | 12 | 3. Execute a simple line search by first stepping to the optimum of the QP problem. So the 13 | initial step is ∆x, and xnew = xold + x. See if at this point a penalty function, composed of 14 | the values of the objective and violated constraints, is reduced. If not, cut back the step 15 | size until the penalty function is reduced. The penalty function is given by P = f + sum(λ*g), 16 | where the summation is done over the set of violated constraints, and 17 | the absolute values of the constraints are taken. The Lagrange multipliers act as scaling 18 | or weighting factors between the objective and violated constraints. 19 | 20 | 4. Evaluate the Lagrangian gradient at the new point. Calculate the difference in x and in 21 | the Lagrangrian gradient, γ. Update the Lagrangian Hessian using the BFGS update. 22 | 23 | 5. Return to Step 1 until ∆x is sufficiently small. When ∆x approaches zero, the K-T 24 | conditions for the original problem are satisfied. 25 | 26 | Example of SQP Algortihm 27 | 28 | Find the optimum to the problem: 29 | 30 | min f(x) = x1^4 -2*x1*x2^2 + x2^2 + x1^2 -2*x1 +5 31 | 32 | s.t. g(x) = -(x1 + 0.25)^2 + 0.75*x2 >= 0 33 | 34 | SQP finds the optimum in 9 iterations. 35 | 36 | ![image](https://user-images.githubusercontent.com/90531367/170685187-d181d8d8-ef06-409e-b239-ae64332c77e4.png) 37 | 38 | --------------------------------------------------------------------------------