├── w6-approximation_and_fitting ├── format_X.m ├── 06Approximation.pdf ├── tv_img_interp.m ├── team_abilities.m ├── least_squares.m ├── signal_denoise.m ├── rational_minimax.m ├── team_data.m ├── img_interpolation.m ├── convex_function_fitting.m ├── log_normcdf.m ├── costumer_preference.m ├── cens_fit_data_norng.m ├── cens_data2.m └── censored_data.m ├── w5-duality ├── 05Duality.pdf ├── slater.m ├── slater.m~ ├── qcqp.m ├── dual_solver.m ├── hw2.m └── hw1.m ├── w8-geometric_problems ├── CVX.pdf ├── 08GeometricalProbs.pdf ├── circle2.m ├── fitting_sphere.m ├── quad_pseud_metric.m ├── min_vol_rect.m ├── three_way_lin_class.m ├── sphere_fit_data.m ├── sep3way_data.m ├── quad_metric_data_norng.m └── max_vol_box_norng.m ├── w10-unconstrained ├── 10UnconstrainedMin.pdf ├── grad_newton │ ├── plot_res.m │ ├── grad_descent.m │ ├── newton.m │ ├── grad_and_newton_last.m │ ├── hessian_approximation.m │ └── mod_newton.m ├── drawing_elipsoids.m ├── advertising │ ├── ad_disp_data.m │ └── ad_display.m ├── grav_feed_network_data.m ├── bio │ ├── fba_data.m │ └── flux_analysis.m ├── simple_backtrack_line_search.m ├── ranking │ ├── ranking.m │ └── rank_aggr_data.m └── grad_and_newton.m ├── w11:12-equality_contrained ├── hw8extra.pdf ├── 11EqualityMin.pdf ├── 12InteriorPoint.pdf ├── extra │ └── linear_barrier │ │ ├── gradient_functions.m │ │ ├── logbarrier.m │ │ ├── centering.m │ │ ├── phase_1.m │ │ ├── solution │ │ ├── phase_1.m │ │ ├── lp_barrier.m │ │ ├── run_solution.m │ │ ├── lp_solve.m │ │ └── lp_acent.m │ │ ├── lp_solve.m │ │ ├── LP_inequality.m │ │ └── newton_KKT_and_BE.m └── hw │ ├── interdiction_effort.m │ ├── longest_path.m │ └── interdict_alloc_data.m ├── w7-stat_estimation ├── 07StatisticalEst.pdf ├── signal_estim2.m ├── ml_estim_incr_signal_data_norng.m ├── worse_case_loss.m ├── signal_estim.m └── pwl_fit_data.m ├── w4-convex_problems ├── 04ConvexOptimizationProblems.pdf ├── complex_ls.m ├── hw.m └── simple_portfolio_data.m ├── README.md └── LICENSE /w6-approximation_and_fitting/format_X.m: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /w5-duality/05Duality.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/abrazinskas/convex-optimization/HEAD/w5-duality/05Duality.pdf -------------------------------------------------------------------------------- /w8-geometric_problems/CVX.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/abrazinskas/convex-optimization/HEAD/w8-geometric_problems/CVX.pdf -------------------------------------------------------------------------------- /w10-unconstrained/10UnconstrainedMin.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/abrazinskas/convex-optimization/HEAD/w10-unconstrained/10UnconstrainedMin.pdf -------------------------------------------------------------------------------- /w11:12-equality_contrained/hw8extra.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/abrazinskas/convex-optimization/HEAD/w11:12-equality_contrained/hw8extra.pdf -------------------------------------------------------------------------------- /w7-stat_estimation/07StatisticalEst.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/abrazinskas/convex-optimization/HEAD/w7-stat_estimation/07StatisticalEst.pdf -------------------------------------------------------------------------------- /w5-duality/slater.m: -------------------------------------------------------------------------------- 1 | cvx_begin 2 | variables x,y 3 | minimize (exp(-x)) 4 | subject to 5 | (x^2)/4 <= 0 6 | y>0 7 | cvx_end -------------------------------------------------------------------------------- /w5-duality/slater.m~: -------------------------------------------------------------------------------- 1 | cvx_begin 2 | variable x 3 | variable y 4 | minimize (exp(-x)) 5 | subject to 6 | (x.^2)/y <= 0 7 | cvx_end -------------------------------------------------------------------------------- /w11:12-equality_contrained/11EqualityMin.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/abrazinskas/convex-optimization/HEAD/w11:12-equality_contrained/11EqualityMin.pdf -------------------------------------------------------------------------------- /w8-geometric_problems/08GeometricalProbs.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/abrazinskas/convex-optimization/HEAD/w8-geometric_problems/08GeometricalProbs.pdf -------------------------------------------------------------------------------- /w11:12-equality_contrained/12InteriorPoint.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/abrazinskas/convex-optimization/HEAD/w11:12-equality_contrained/12InteriorPoint.pdf -------------------------------------------------------------------------------- /w6-approximation_and_fitting/06Approximation.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/abrazinskas/convex-optimization/HEAD/w6-approximation_and_fitting/06Approximation.pdf -------------------------------------------------------------------------------- /w10-unconstrained/grad_newton/plot_res.m: -------------------------------------------------------------------------------- 1 | function [] = plot_res(f_vals,p_star, titl) 2 | plot(f_vals - p_star); 3 | title(titl); 4 | xlabel('iter'); 5 | ylabel('f - p^{*}'); -------------------------------------------------------------------------------- /w4-convex_problems/04ConvexOptimizationProblems.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/abrazinskas/convex-optimization/HEAD/w4-convex_problems/04ConvexOptimizationProblems.pdf -------------------------------------------------------------------------------- /w5-duality/qcqp.m: -------------------------------------------------------------------------------- 1 | P= [1 0; 0 1]; 2 | q_1 = [-2;-2]; 3 | q_2 = [-2;2]; 4 | 5 | cvx_begin 6 | variable x(2) 7 | minimize (x'*P*x) 8 | subject to 9 | x'*P*x + x'*q_1 + 1 <= 0 10 | x'*P*x + x'*q_2 + 1 <= 0 11 | cvx_end -------------------------------------------------------------------------------- /w8-geometric_problems/circle2.m: -------------------------------------------------------------------------------- 1 | function h = circle2(x,y,r) 2 | 3 | d = r*2; 4 | 5 | px = x-r; 6 | 7 | py = y-r; 8 | 9 | h = rectangle('Position',[px py d d],'Curvature',[1,1]); 10 | 11 | daspect([1,1,1]) 12 | -------------------------------------------------------------------------------- /w5-duality/dual_solver.m: -------------------------------------------------------------------------------- 1 | function [lambda, x] = dual_solver(A,P,q,b) 2 | cvx_begin quiet 3 | variable x(2); 4 | dual variable lambda; 5 | minimize( x'*P*x + q'*x) 6 | subject to 7 | lambda : A*x <= b; 8 | cvx_end 9 | end -------------------------------------------------------------------------------- /w10-unconstrained/drawing_elipsoids.m: -------------------------------------------------------------------------------- 1 | P1 = [2 0; 0 8]; 2 | P2 = [8 0; 0 2]; 3 | 4 | 5 | 6 | % P1 7 | x2 = @(x1) sqrt(1/8 - (x1.^2)/4); 8 | x1 = linspace(-sqrt(0.5),sqrt(0.5),1000); 9 | 10 | figure 11 | 12 | subplot(211) 13 | plot([x1 x1], [-x2(x1) x2(x1)],'*'); 14 | xlabel('x'); 15 | ylabel('y'); 16 | 17 | -------------------------------------------------------------------------------- /w11:12-equality_contrained/extra/linear_barrier/gradient_functions.m: -------------------------------------------------------------------------------- 1 | %% Gradients and functions 2 | grad = @(x) c; 3 | hessian = @(x) zeros(n, n); 4 | f = @(x) c'*x; 5 | 6 | 7 | %% Barrier function \phi that approximates the contraint x>=0 8 | phi = @(x) -sum(log(x)); 9 | d_phi = @(x) -1./x; 10 | d2_phi = @(x) diag(1./ (x.^2)); -------------------------------------------------------------------------------- /w11:12-equality_contrained/hw/interdiction_effort.m: -------------------------------------------------------------------------------- 1 | clear all; 2 | run interdict_alloc_data.m; 3 | 4 | cvx_begin 5 | variables x(m) z(n) 6 | minimize(z(n)) 7 | z(1) == 0 8 | A'*z >= -diag(a)*x; 9 | x >= 0 10 | x <= x_max 11 | sum(x) <= B 12 | cvx_end 13 | 14 | disp(['P_max is: ', num2str(exp(z(n)))]) -------------------------------------------------------------------------------- /w11:12-equality_contrained/hw/longest_path.m: -------------------------------------------------------------------------------- 1 | c = [ 10; 30; 2; 5;6]; 2 | A = [ -1 -1 0 0 0; 1 0 -1 0 0; 0 1 1 -1 -1; 0 0 0 1 1]; 3 | 4 | n = 4; 5 | m = 5; 6 | p = zeros(n,1); % placeholder for maximums 7 | 8 | % searching the maximumum path 9 | for i=2:n 10 | for j=1:(i-1) 11 | p(i)= max([p(i);-(A(j,:).*A(i,:))'.*(c + p(j))]); 12 | end 13 | end 14 | 15 | disp(['result is: ', int2str(p(n)) ]); 16 | -------------------------------------------------------------------------------- /w11:12-equality_contrained/hw/interdict_alloc_data.m: -------------------------------------------------------------------------------- 1 | rand('state',0); 2 | n=10;m=20; 3 | edges=[[1 1 1 2 2 2 3 3 4 4 5 5 6 6 7 7 8 7 8 9]'... 4 | [2 3 4 6 3 4 5 6 6 7 8 7 7 8 8 9 9 10 10 10]']; 5 | A=zeros(n,m); 6 | for j=1:size(edges,1) 7 | A(edges(j,1),j)=-1;A(edges(j,2),j)=1; 8 | end 9 | a=2*rand(m,1); 10 | x_max = 1+rand(m,1);B=m/2; 11 | 12 | % code to plot the graph (if you have biograph) 13 | %G=sparse(edges(:,1),edges(:,2),1,n,n); 14 | %view(biograph(G)); 15 | -------------------------------------------------------------------------------- /w8-geometric_problems/fitting_sphere.m: -------------------------------------------------------------------------------- 1 | clear all; 2 | run sphere_fit_data.m 3 | U= U'; 4 | 5 | [n,m] = size(U); 6 | 7 | % dummy vectors 8 | o1 = ones(1,2); 9 | o2 = ones(size(U,1),1); 10 | 11 | cvx_begin 12 | variables x(3); 13 | A = [ -2*U ones(n,1)]; 14 | b = sum(U.^2,2); 15 | minimize (norm(A*x + b,2)) 16 | cvx_end 17 | 18 | x_c = x(1:2); 19 | r = sqrt( (norm(x_c,2))^2 - x(3)); 20 | 21 | plot(U(:,1),U(:,2),'*'); 22 | hold on 23 | circle2(x_c(1),x_c(2),r); -------------------------------------------------------------------------------- /w6-approximation_and_fitting/tv_img_interp.m: -------------------------------------------------------------------------------- 1 | % tv_img_interp.m 2 | % Total variation image interpolation. 3 | % EE364a 4 | % Defines m, n, Uorig, Known. 5 | 6 | % Load original image. 7 | Uorig = double(imread('tv_img_interp.png')); 8 | 9 | [m, n] = size(Uorig); 10 | 11 | % Create 50% mask of known pixels. 12 | rand('state', 1029); 13 | Known = rand(m,n) > 0.5; 14 | 15 | %%%%% Put your solution code here 16 | 17 | % Calculate and define Ul2 and Utv. 18 | 19 | % Placeholder: 20 | % Ul2 = ones(m, n); 21 | % Utv = ones(m, n); 22 | 23 | %%%%% 24 | 25 | 26 | -------------------------------------------------------------------------------- /w4-convex_problems/complex_ls.m: -------------------------------------------------------------------------------- 1 | 2 | m = 30; 3 | n = 100; 4 | 5 | A = randn(m,n) + i*randn(m,n); 6 | b = randn(m,1) + i*randn(m,1); 7 | 8 | % a) 9 | cvx_begin 10 | variable x_1(n) complex 11 | minimize (norm(x_1,2)) 12 | subject to 13 | A*x_1 == b 14 | cvx_end 15 | 16 | % b) 17 | cvx_begin 18 | variable x_2(n) complex 19 | minimize (norm(x_2,inf)) 20 | subject to 21 | A*x_2 == b 22 | cvx_end 23 | 24 | figure 25 | scatter(real(x_1),imag(x_1),'blue'); 26 | hold on 27 | scatter(real(x_2),imag(x_2),'filled'); 28 | title('norms') 29 | 30 | 31 | -------------------------------------------------------------------------------- /w10-unconstrained/advertising/ad_disp_data.m: -------------------------------------------------------------------------------- 1 | % data for online ad display problem 2 | rand('state',0); 3 | n=100; %number of ads 4 | m=30; %number of contracts 5 | T=60; %number of periods 6 | 7 | I=10*rand(T,1); %number of impressions in each period 8 | R=rand(n,T); %revenue rate for each period and ad 9 | q=T/n*50*rand(m,1); %contract target number of impressions 10 | p=rand(m,1); %penalty rate for shortfall 11 | Tcontr=(rand(T,m)>.8); %one column per contract. 1's at the periods to be displayed 12 | for i=1:n 13 | contract=ceil(m*rand); 14 | Acontr(i,contract)=1; %one column per contract. 1's at the ads to be displayed 15 | end 16 | -------------------------------------------------------------------------------- /w10-unconstrained/grav_feed_network_data.m: -------------------------------------------------------------------------------- 1 | rand('state',0); 2 | n=10;m=20;k = 3; % (edges 1,2,3 are producers and 4 to 10 are consumers) 3 | alpha = 15; 4 | Rmin = 0.5*ones(m,1); Rmax = 2.5*ones(m,1); 5 | Smax = 5*ones(k,1); 6 | L = 5*rand(m,1)+5; %pipe length 7 | 8 | N = 10; % 10 consumption scenarios 9 | C=2*rand(n-k,N); % C(:,i) = c^(i) Consumption vectors 10 | 11 | % altitudes 12 | h = rand(n,1); 13 | h = sort(h, 'descend'); 14 | 15 | edges=[[1 1 1 2 2 2 3 3 4 4 5 5 6 6 7 7 8 7 8 9]'... 16 | [2 3 4 6 3 4 5 6 6 7 8 7 7 8 8 9 9 10 10 10]']; 17 | 18 | % incidence matrix 19 | A=zeros(n,m); 20 | for j=1:size(edges,1) 21 | A(edges(j,1),j)=-1;A(edges(j,2),j)=1; 22 | end 23 | 24 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # convex-optimization 2 | Convex optimization course https://see.stanford.edu/Course/EE364A completed homework assignments 3 | 4 | ## Description 5 | 6 | This course concentrates on recognizing and solving convex optimization problems that arise in engineering. Convex sets, functions, and optimization problems. Basics of convex analysis. Least-squares, linear and quadratic programs, semidefinite programming, minimax, extremal volume, and other problems. Optimality conditions, duality theory, theorems of alternative, and applications. Interiorpoint methods. Applications to signal processing, control, digital and analog circuit design, computational geometry, statistics, and mechanical engineering. 7 | -------------------------------------------------------------------------------- /w6-approximation_and_fitting/team_abilities.m: -------------------------------------------------------------------------------- 1 | % in this problem we have a dataset of teams that compate against each 2 | % other. The task is to find a vector of their abilities (a). 3 | clear all; 4 | run team_data.m; 5 | A = sparse(1:m,train(:,1),train(:,3),m,n) + ... 6 | sparse(1:m,train(:,2),-train(:,3),m,n); 7 | C = sum(train(:,3)==1); 8 | 9 | % training 10 | cvx_begin quiet 11 | variable a(n) 12 | % maximize (sum(sigma*log_normcdf(a(train(:,2)) - a(train(:,1))))); 13 | maximize (sum(log_normcdf(A*a/sigma))) 14 | subject to 15 | a >= 0; 16 | a <= 1; 17 | cvx_end 18 | 19 | % testing 20 | y = sign(a(test(:,1)) - a(test(:,2))); 21 | correct = sum( y == test(:,3)) 22 | accuracy = correct / m 23 | 24 | 25 | -------------------------------------------------------------------------------- /w6-approximation_and_fitting/least_squares.m: -------------------------------------------------------------------------------- 1 | n = 2; 2 | m = 7; 3 | A = [1 2; 1 3; 1 3; 1 4; 1 4; 1 1; 1 8]; 4 | b = [2;2;3;3;4;60;8]; % with outliers 5 | %b = [2;2;3;3;4;1;8]; % without outliers 6 | 7 | % least squares solution 8 | cvx_begin 9 | variable x_1(n); 10 | minimize( norm(A*x_1-b)); 11 | cvx_end 12 | 13 | % huber least squares 14 | cvx_begin 15 | variable x_2(n); 16 | minimize (sum(huber(A*x_2-b))); 17 | cvx_end 18 | 19 | 20 | % plots 21 | rng default; % For reproducibility 22 | figure; 23 | 24 | scatter(A(:,2),b,'mo'); 25 | hold on 26 | 27 | % plotting least squares 28 | plot(A(:,2),A*x_1); 29 | 30 | % plotting huber least squares 31 | plot(A(:,2),A*x_2); 32 | legend('data points','LS fit','Huber LS fit'); 33 | hold off -------------------------------------------------------------------------------- /w6-approximation_and_fitting/signal_denoise.m: -------------------------------------------------------------------------------- 1 | randn('state',0); 2 | n = 4000; t = (0:n-1)'; 3 | exact = 0.5*sin((2*pi/n)*t).*sin(0.01*t); 4 | corrupt = exact + 0.05*randn(size(exact)); 5 | 6 | x = t*1e9; 7 | figure(1) 8 | subplot(311) 9 | plot(t,exact,'-'); 10 | axis([0 n -0.6 0.6]) 11 | title('original signal'); 12 | ylabel('ya'); 13 | 14 | subplot(312) 15 | plot(t,corrupt,'-'); 16 | axis([0 n -0.6 0.6]) 17 | title('corrupted signal'); 18 | ylabel('ya'); 19 | 20 | 21 | % reconstruction 22 | lambda = 50; % the coefficient of smoothness 23 | cvx_begin quiet 24 | variable x(n) 25 | minimize(norm(x-corrupt,2)+lambda*norm(x(2:n)-x(1:n-1),2)) 26 | cvx_end 27 | 28 | 29 | subplot(313) 30 | plot(t,x,'-'); 31 | axis([0 n -0.6 0.6]) 32 | title('reconstructed signal'); 33 | ylabel('ya'); 34 | -------------------------------------------------------------------------------- /w8-geometric_problems/quad_pseud_metric.m: -------------------------------------------------------------------------------- 1 | clear all; 2 | run quad_metric_data_norng.m; 3 | X = X'; 4 | Y = Y'; 5 | d = d'; 6 | 7 | cvx_begin quiet 8 | variable P(n,n) symmetric 9 | d_hat = []; 10 | for i = 1:N 11 | d_hat = [d_hat; ((X(i,:) - Y(i,:))*P*(X(i,:) - Y(i,:))')]; 12 | end 13 | minimize ((1/N) * sum(d.^2 - 2*d .* d_hat.^(1/2) + d_hat )); 14 | subject to 15 | P == semidefinite(n) 16 | d_hat > 0; 17 | cvx_end 18 | 19 | plot(d,'*'); 20 | hold on; 21 | plot(d_hat.^(1/2) ,'+'); 22 | 23 | % run on a test data 24 | X_test = X_test'; 25 | Y_test = Y_test'; 26 | d_hat = []; 27 | d_test = d_test'; 28 | 29 | for i = 1:N_test 30 | d_hat = [d_hat; (X_test(i,:) - Y_test(i,:))*P*(X_test(i,:) - Y_test(i,:))']; 31 | end 32 | 33 | RMSE = (1/N_test) * sum(d_test.^2 - 2*d_test .* d_hat.^(1/2) + d_hat ); -------------------------------------------------------------------------------- /w5-duality/hw2.m: -------------------------------------------------------------------------------- 1 | cvx_begin quiet 2 | variable x(1) 3 | dual variable lambda; 4 | minimize (x^2 + 1) 5 | subject to 6 | lambda: (x-2)*(x-4) <=0 7 | cvx_end 8 | disp(['optimal value: ',num2str(x^2 + 1)]); 9 | 10 | obj = @(x) x.^2 + 1; 11 | opt = obj(x); 12 | constr = @(x)(x_space-2).*(x_space-4); 13 | 14 | x_space = linspace(-5,5,100); 15 | x_feas = (constr(x)<=0); 16 | x_feas = x_space(x_feas); 17 | plot(x_space,obj(x_space),'blue'); 18 | hold on 19 | plot(x_space, constr(x_space)); 20 | plot(x_feas, obj(x_feas),'red'); 21 | legend('f_o','f_1'); 22 | plot(x,opt,'o'); 23 | 24 | lagr = [2]; 25 | % colors = ['blue','pink','yellow'] 26 | L = @(x,l) (x.^2 + 1)+ l*(x-2).*(x-4); 27 | x_opt = @(l) 3*l/(1+l); 28 | for i=1:length(lagr) 29 | lag = lagr(i); 30 | plot(x_space,L(x_space,lag)); 31 | L(x_opt(lag),lag) 32 | end -------------------------------------------------------------------------------- /w8-geometric_problems/min_vol_rect.m: -------------------------------------------------------------------------------- 1 | clear all; 2 | run max_vol_box_norng.m; 3 | 4 | 5 | cvx_begin quiet 6 | variables l(n) u(n) 7 | % the length of rectange sides 8 | maximize (geo_mean(u - l)) 9 | subject to 10 | l <= u; 11 | max(A,0)*u - max(-A,0)*l <= b; 12 | cvx_end 13 | 14 | 15 | % A = [0 1; 0 -1;1 0; -1 0;-1 1]; 16 | % m = 5; 17 | % n = 2; 18 | % b = [1 1 1 1 0]'; 19 | % 20 | % cvx_begin 21 | % variables l(n) u(n) x(n) 22 | % % the length of rectange sides 23 | % maximize (geo_mean(u - l)) 24 | % subject to 25 | % A*x <= b; 26 | % x >= l; 27 | % x <= u; 28 | % l <= u; 29 | % A*u <= b; 30 | % A*l <= b; 31 | % cvx_end 32 | % 33 | % plot(A(:,1),A(:,2),'o'); 34 | % hold on; 35 | % rectangle('Position',[l' (u-l)']); 36 | 37 | -------------------------------------------------------------------------------- /w7-stat_estimation/signal_estim2.m: -------------------------------------------------------------------------------- 1 | run ml_estim_incr_signal_data_norng.m; 2 | 3 | 4 | % with monotonocity constraints and non-negativity 5 | cvx_begin quiet 6 | variable x(N) 7 | yhat = conv(h,x); 8 | yhat = yhat(1:end-3); 9 | minimize (norm((y - yhat),2)) 10 | subject to 11 | x(1) >= 0; 12 | x(2:N) >= x(1:N-1); 13 | cvx_end 14 | 15 | figure 16 | subplot(211); 17 | plot(x,'r') 18 | hold on 19 | plot(xtrue,'b'); 20 | ylabel('xval'); 21 | legend('x','xtrue') 22 | title('xml'); 23 | hold off; 24 | 25 | % without monotonocity constraints and non-negativity 26 | cvx_begin quiet 27 | variable x(N) 28 | yhat = conv(h,x); 29 | yhat = yhat(1:end-3); 30 | minimize (norm((y - yhat),2)) 31 | cvx_end 32 | 33 | subplot(212); 34 | plot(x,'r') 35 | hold on 36 | plot(xtrue,'b'); 37 | ylabel('xval'); 38 | legend('x','xtrue') 39 | title('xml free'); 40 | hold off; -------------------------------------------------------------------------------- /w11:12-equality_contrained/extra/linear_barrier/logbarrier.m: -------------------------------------------------------------------------------- 1 | % logbarrier method as bescided on p569 [Convex Optimization, Stephan Boyed] 2 | function [x, history] = logbarrier(x, A, b, f, grad, hessian, phi, d_phi, d2_phi) 3 | 4 | %% hyper-parameters 5 | eps = 10^-3; % tolerance 6 | t = 1; 7 | mu = 20; 8 | 9 | [m, n] = size(A); 10 | history =[]; 11 | while (n/t > eps) 12 | % redefine given functions 13 | new_f = @(x) t*f(x) + phi(x); 14 | new_hessian = @(x) t*hessian(x) + d2_phi(x); 15 | new_grad = @(x) t* grad(x) + d_phi(x); 16 | %% centering step (using Newton's optimization) 17 | [x, nu, lambdas] = newton_KKT_and_BE(x, A, b, new_f, new_grad, new_hessian); 18 | t = t*mu; 19 | % storing history 20 | history = [history; length(lambdas) n/t]; 21 | end 22 | history = history'; 23 | end -------------------------------------------------------------------------------- /w10-unconstrained/grad_newton/grad_descent.m: -------------------------------------------------------------------------------- 1 | function [x, f_vals,i] = grad_descent(x, A, iter,alpha,beta,f,grad) 2 | eta = 0.0001; 3 | f_vals = []; 4 | for i=0:iter 5 | t = 1; 6 | gr = grad(x); 7 | 8 | % termination condition 9 | if(norm(gr)<=eta) 10 | break; 11 | end 12 | 13 | delta_x = - gr; 14 | % making sure that we start in the domain of the function 15 | while t > 0 && (any(A*(x + t*delta_x)>1) || any(abs(x + t*delta_x)>1)) 16 | t = beta*t; 17 | end 18 | 19 | % backtracking linesearch 20 | while t > 0 && (f(x + t*delta_x) > f(x) + alpha*t*gr'*delta_x) 21 | t = beta*t; 22 | end 23 | % the actual optimization 24 | x = x + t * delta_x; 25 | 26 | % saving f values 27 | f_vals = [f_vals; f(x)]; 28 | end 29 | 30 | end -------------------------------------------------------------------------------- /w11:12-equality_contrained/extra/linear_barrier/centering.m: -------------------------------------------------------------------------------- 1 | n = 100; 2 | m = 10; 3 | 4 | %% Generating data %% 5 | randn('seed',0); 6 | A=randn(m-1,n); 7 | A = [A;ones(1,n)]; 8 | 9 | while rank(A)~=m 10 | A=randn(m-1,n); 11 | A = [A;ones(1,n)]; 12 | end 13 | 14 | x = randn(n,1); 15 | x = sign(x) .* x; 16 | b = A*x; 17 | c = randn(n,1); 18 | %% Gradients and functions 19 | % run gradient_functions.m 20 | % need to derivative 21 | 22 | % newton's method 23 | [x_opt, nu, lambdas] = newton_KKT_and_BE(x, A, b, f, grad, hessian); 24 | %[x_opt, nu, lambdas] = lp_acent(A, b, c, x); 25 | % some final checks 26 | 27 | 28 | disp 'Final checks: '; 29 | 30 | norm(A*x_opt-b) 31 | all(x>=0) 32 | iters = 1:length(lambdas); 33 | % subplot(212); 34 | ax = gca; 35 | plot(iters, lambdas); 36 | title('Newtons method'); 37 | xlabel('iter'); 38 | ylabel('$\frac{\lambda^2}{2} $','Interpreter','latex'); 39 | set(gca,'yscale','log'); -------------------------------------------------------------------------------- /w10-unconstrained/grad_newton/newton.m: -------------------------------------------------------------------------------- 1 | function [x, f_vals,i] = newton(x, A, iter,alpha,beta,f,grad,hessian) 2 | eta = 0.0001; 3 | f_vals = [f(x)]; 4 | for i=0:iter 5 | t = 1; 6 | gr = grad(x); 7 | H = hessian(x); 8 | 9 | delta_x = - H\gr; 10 | 11 | % termination condition 12 | if(norm(gr'*delta_x)<= 2*eta) % norm is not affected by the sign 13 | break; 14 | end 15 | 16 | % making sure that we start in the domain of the function 17 | while t > 0 && (any(A*(x + t*delta_x)>1) || any(abs(x + t*delta_x)>1)) 18 | t = beta*t; 19 | end 20 | 21 | % backtracking linesearch 22 | while t > 0 && (f(x + t*delta_x) > f(x) + alpha*t*gr'*delta_x) 23 | t = beta*t; 24 | end 25 | % the actual optimization 26 | x = x + t * delta_x; 27 | 28 | % saving f values 29 | f_vals = [f_vals; f(x)]; 30 | end -------------------------------------------------------------------------------- /w11:12-equality_contrained/extra/linear_barrier/phase_1.m: -------------------------------------------------------------------------------- 1 | % We solve a simple LP problem using logbarrier method + phase I 2 | % the problem is : min c'x 3 | % s.t. Ax ==b 4 | % x >=0 5 | clear all; 6 | 7 | %% General parameters 8 | n = 500; 9 | m = 100; 10 | 11 | %% Hyperparameters 12 | t = 1; 13 | mu = 10; 14 | 15 | %% Generating data %% 16 | randn('seed', 0); 17 | A = [randn(m-1,n);ones(1,n)]; 18 | x = rand(n,1) + 0.1; 19 | c = randn(n,1); 20 | b = A*x; 21 | 22 | [x_star, history] = lp_solve(A, b, c); 23 | % compare to cvx 24 | cvx_begin quiet 25 | variables x(n) 26 | minimize (c'*x) 27 | subject to 28 | A*x == b 29 | x >= 0 30 | cvx_end 31 | 32 | 33 | fprintf('\n\n Optimal value found by barrier method: %d \n', c'*x_star); 34 | fprintf('Optimal value found by CVX: %d \n', cvx_optval); 35 | 36 | 37 | % try to solve infeasible problem 38 | b = randn(m,1); 39 | c = randn(n,1); 40 | [x_star, history] = lp_solve(A, b, c); -------------------------------------------------------------------------------- /w7-stat_estimation/ml_estim_incr_signal_data_norng.m: -------------------------------------------------------------------------------- 1 | clear all; close all; 2 | 3 | % create problem data 4 | N = 100; 5 | % create an increasing input signal 6 | xtrue = zeros(N,1); 7 | xtrue(1:40) = 0.1; 8 | xtrue(50) = 2; 9 | xtrue(70:80) = 0.15; 10 | xtrue(80) = 1; 11 | xtrue = cumsum(xtrue); 12 | 13 | % pass the increasing input through a moving-average filter 14 | % and add Gaussian noise 15 | h = [1 -0.85 0.7 -0.3]; k = length(h); 16 | yhat = conv(h,xtrue); 17 | y = yhat(1:end-3) ... 18 | + [-0.43;-1.7;0.13;0.29;-1.1;1.2;1.2;-0.038;0.33;0.17;-0.19;0.73;-0.59;2.2;-0.14;0.11;1.1;0.059;-0.096;-0.83;0.29;-1.3;0.71;1.6;-0.69;0.86;1.3;-1.6;-1.4;0.57;-0.4;0.69;0.82;0.71;1.3;0.67;1.2;-1.2;-0.02;-0.16;-1.6;0.26;-1.1;1.4;-0.81;0.53;0.22;-0.92;-2.2;-0.059;-1;0.61;0.51;1.7;0.59;-0.64;0.38;-1;-0.02;-0.048;4.3e-05;-0.32;1.1;-1.9;0.43;0.9;0.73;0.58;0.04;0.68;0.57;-0.26;-0.38;-0.3;-1.5;-0.23;0.12;0.31;1.4;-0.35;0.62;0.8;0.94;-0.99;0.21;0.24;-1;-0.74;1.1;-0.13;0.39;0.088;-0.64;-0.56;0.44;-0.95;0.78;0.57;-0.82;-0.27]; -------------------------------------------------------------------------------- /w11:12-equality_contrained/extra/linear_barrier/solution/phase_1.m: -------------------------------------------------------------------------------- 1 | % solves standard form LP for two problem instances 2 | clear all; 3 | m = 100; 4 | n = 500; 5 | % infeasible problem instance 6 | randn('seed',0); 7 | A = [rand(m-1,n); ones(1,n)]; 8 | b = randn(m,1); 9 | c = randn(n,1); 10 | [x_star,p_star,gap, status, nsteps] = lp_solve(A,b,c); 11 | % solve LP using cvx for comparison 12 | cvx_begin 13 | variable x(n) 14 | minimize(c'*x) 15 | subject to 16 | A*x == b 17 | x >= 0 18 | cvx_end 19 | 20 | 21 | % feasible problem instance 22 | A = [randn(m-1,n); ones(1,n)]; 23 | v = rand(n,1) + 0.1; 24 | b = A*v; 25 | c = randn(n,1); 26 | [x_star,p_star,gap,status,nsteps] = lp_solve(A,b,c); 27 | % solve LP using cvx for comparison 28 | cvx_begin 29 | variable x(n) 30 | minimize(c'*x) 31 | subject to 32 | A*x == b 33 | x >= 0 34 | cvx_end 35 | fprintf('\n\nOptimal value found by barrier method:\n'); 36 | p_star 37 | fprintf('Optimal value found by CVX:\n'); 38 | cvx_optval 39 | fprintf('Duality gap from barrier method:\n'); 40 | gap -------------------------------------------------------------------------------- /w10-unconstrained/advertising/ad_display.m: -------------------------------------------------------------------------------- 1 | clear all; 2 | run ad_disp_data.m 3 | o = ones(n,1); 4 | cvx_begin quiet 5 | variable N(n,T) 6 | s = max(0,q - diag( Acontr'*N*Tcontr)); 7 | rev = trace(R*N'); 8 | maximize(rev - s'*p) 9 | subject to 10 | N >= 0; 11 | N'*o - I == 0; % just like Ax - b == 0 12 | cvx_end 13 | 14 | disp(['net profit: ', num2str(cvx_optval)]); 15 | disp(['total revenue: ',num2str(rev)]) 16 | disp(['total penalty payment: ',num2str(s'*p)]); 17 | 18 | 19 | % creating the maximum revenue impression matrix 20 | N_max_rev = zeros(n,T); 21 | for t = 1:T 22 | [val idx] = max(R(:,t)); 23 | N_max_rev(idx,t) = I(t); 24 | end 25 | 26 | % check constraints 27 | assert( all(N_max_rev(:) >= 0)) 28 | assert(all(N_max_rev'*o - I == 0)) 29 | s = max(0,q - diag( Acontr'*N_max_rev*Tcontr)); 30 | rev = trace(R*N_max_rev'); 31 | disp(['largest revenue net profit: ', num2str(rev - s'*p)]); 32 | disp(['total revenue: ',num2str(rev)]) 33 | disp(['total penalty payment: ',num2str(s'*p)]); -------------------------------------------------------------------------------- /w10-unconstrained/bio/fba_data.m: -------------------------------------------------------------------------------- 1 | % data file for flux balance analysis in systems biology 2 | % From Segre, Zucker et al "From annotated genomes to metabolic flux 3 | % models and kinetic parameter fitting" OMICS 7 (3), 301-316. 4 | 5 | % Stoichiometric matrix 6 | S = [ 7 | % M1 M2 M3 M4 M5 M6 8 | 1 0 0 0 0 0 % R1: extracellular --> M1 9 | -1 1 0 0 0 0 % R2: M1 --> M2 10 | -1 0 1 0 0 0 % R3: M1 --> M3 11 | 0 -1 0 2 -1 0 % R4: M2 + M5 --> 2 M4 12 | 0 0 0 0 1 0 % R5: extracellular --> M5 13 | 0 -2 1 0 0 1 % R6: 2 M2 --> M3 + M6 14 | 0 0 -1 1 0 0 % R7: M3 --> M4 15 | 0 0 0 0 0 -1 % R8: M6 --> extracellular 16 | 0 0 0 -1 0 0 % R9: M4 --> cell biomass 17 | ]'; 18 | 19 | [m,n] = size(S); 20 | vmax = [ 21 | 10.10; % R1: extracellular --> M1 22 | 100; % R2: M1 --> M2 23 | 5.90; % R3: M1 --> M3 24 | 100; % R4: M2 + M5 --> 2 M4 25 | 3.70; % R5: extracellular --> M5 26 | 100; % R6: 2 M2 --> M3 + M6 27 | 100; % R7: M3 --> M4 28 | 100; % R8: M6 --> extracellular 29 | 100; % R9: M4 --> cell biomass 30 | ]; 31 | -------------------------------------------------------------------------------- /w5-duality/hw1.m: -------------------------------------------------------------------------------- 1 | % objective 2 | P = [1 -1/2; -1/2 2]; 3 | q = [-1;0]; 4 | 5 | % constraints 6 | u1 = -2; 7 | u2 = -3; 8 | a1 = [1;2]; 9 | a2 = [1;-4]; 10 | a3 = [-1;-1]; 11 | A = [a1';a2';a3']; % constraints matrix 12 | b = [ u1; u2; 5]; 13 | 14 | [lambda,x] = dual_solver(A,P,q,b); 15 | 16 | %% checking KKT conditions 17 | % constraints 18 | lagr_grad = @(x,lambda) 2*P*x + q + A'*lambda; 19 | disp(all(A*x <= b )); 20 | disp(all(lambda >=0)); 21 | disp(lambda'*(A*x-b)<=eps); % can be a veryyyy small number, so can't say == 0 22 | disp(all(lagr_grad(x,lambda)<=10^-6)); 23 | delta = [-0.1,0,0.1]; 24 | for i = 1:3 25 | for j = 1:3 26 | d1 = delta(i); 27 | d2 = delta(j); 28 | u = [d1;d2;0]; 29 | pred = x'*P*x + q'*x - lambda'*u; 30 | [lambda_per,x_per] = dual_solver(A,P,q,b+u); 31 | actual = x_per'*P*x_per + q'*x_per; 32 | disp(['for d1 ',num2str(d1),' d2 ',num2str(d2), ' pred: ', num2str(pred),' actual: ', num2str(actual), ' diff: ', num2str(actual-pred)]); 33 | end 34 | end 35 | 36 | -------------------------------------------------------------------------------- /w11:12-equality_contrained/extra/linear_barrier/lp_solve.m: -------------------------------------------------------------------------------- 1 | function [x, history] = lp_solve(A, b, c) 2 | [m,n] = size(A); 3 | x0 = A\b; 4 | t0 = 2 -min(x0); 5 | z0 = x0 + t0*ones(n,1) - ones(n,1); 6 | b1 = b - A*ones(n,1); 7 | A1 = [A,-A*ones(n,1)]; 8 | c1 = [zeros(n,1);1]; 9 | run gradient_functions.m; 10 | %% Gradients and functions (here we modify our functions a bit) 11 | f = @(x) c1'*x; 12 | grad = @(x) c1; 13 | hessian = @(x) zeros(n+1, n+1); 14 | 15 | % phase I 16 | [z_star, history] = logbarrier([z0;t0], A1, b1, f, grad, hessian, phi, d_phi, d2_phi); 17 | x_feas = z_star(1:n) + (1 - z_star(n+1)); 18 | t = z_star(n+1); 19 | 20 | if (t >=1) 21 | fprintf('the problem is infeasible, value of t is: %d \n', t-1); 22 | x = []; 23 | history = []; 24 | else 25 | % phase II 26 | run gradient_functions.m; % redifine functions 27 | [x, history] = logbarrier(x_feas, A, b, f, grad, hessian, phi, d_phi, d2_phi); 28 | end 29 | end 30 | 31 | -------------------------------------------------------------------------------- /w11:12-equality_contrained/extra/linear_barrier/LP_inequality.m: -------------------------------------------------------------------------------- 1 | % We solve a simple LP problem using logbarrier method 2 | % the problem is : min c'x 3 | % s.t. Ax ==b 4 | % x >=0 5 | clear all; 6 | %% General parameters 7 | 8 | n = 500; 9 | m = 100; 10 | 11 | %% Generating data %% 12 | randn('seed', 0); 13 | A = [randn(m-1,n);ones(1,n)]; 14 | x = rand(n,1) + 0.1; 15 | b = A*x; 16 | c = randn(n,1); 17 | 18 | %% Gradients and functions 19 | run gradient_functions.m; 20 | 21 | % run logbarrier method 22 | [x_sol, history] = logbarrier(x, A, b, f, grad, hessian, phi, d_phi, d2_phi); 23 | [xx, yy] = stairs(cumsum(history(1,:)),history(2,:)); 24 | semilogy(xx,yy); 25 | 26 | 27 | % for comparison use cvx 28 | % solve LP using cvx for comparison 29 | cvx_begin quiet 30 | variable x(n) 31 | minimize(c'*x) 32 | subject to 33 | A*x == b 34 | x >= 0 35 | cvx_end 36 | 37 | fprintf('\n\nOptimal value found by barrier method: %d \n', c'*x_sol); 38 | fprintf('Optimal value found by CVX: %d \n', cvx_optval); 39 | 40 | -------------------------------------------------------------------------------- /w11:12-equality_contrained/extra/linear_barrier/solution/lp_barrier.m: -------------------------------------------------------------------------------- 1 | function [x_star, history, gap] = lp_barrier(A,b,c,x_0) 2 | % solves standard form LP 3 | % minimizec^T x 4 | % subject to Ax = b, x >=0; 5 | % using barrier method, given strictly feasible x0 6 | % uses function std_form_LP_acent() to carry out centering steps 7 | % returns: 8 | % - primal optimal point x_star 9 | % - history, a 2xk matrix that returns number of newton steps 10 | % in each centering step (top row) and duality gap (bottom row) 11 | % (k is total number of centering steps) 12 | % - gap, optimal duality gap 13 | % barrier method parameters 14 | T_0 = 1; 15 | MU = 20; 16 | EPSILON = 1e-3; % duality gap stopping criterion 17 | n = length(x_0); 18 | t = T_0; 19 | x = x_0; 20 | history = []; 21 | while(1) 22 | [x_star, nu_star, lambda_hist] = lp_acent(A,b,t*c,x); 23 | x = x_star; 24 | gap = n/t; 25 | history = [history [length(lambda_hist); gap]]; 26 | if gap < EPSILON break; end 27 | t = MU*t; 28 | end 29 | -------------------------------------------------------------------------------- /w11:12-equality_contrained/extra/linear_barrier/solution/run_solution.m: -------------------------------------------------------------------------------- 1 | % script that generates data and tests the functions 2 | % std_form_LP_acent 3 | % std_form_LP_barrier 4 | clear all; 5 | m = 100; 6 | n = 500; 7 | rand('seed',0); 8 | A = [randn(m-1,n); ones(1,n)]; 9 | x_0 = rand(n,1) + 0.1; 10 | b = A*x_0; 11 | c = randn(n,1); 12 | % analytic centering 13 | figure 14 | [x_star, nu_star, lambda_hist] = lp_acent(A,b,c,x_0); 15 | semilogy(lambda_hist,'bo-') 16 | xlabel('iters') 17 | ylabel('lambdasqr/2') 18 | % solve the LP with barrier 19 | figure 20 | [x_star, history, gap] = lp_barrier(A,b,c,x_0); 21 | [xx, yy] = stairs(cumsum(history(1,:)),history(2,:)); 22 | semilogy(xx,yy,'bo-'); 23 | xlabel('iters') 24 | ylabel('gap') 25 | p_star = c'*x_star; 26 | % solve LP using cvx for comparison 27 | cvx_begin 28 | variable x(n) 29 | minimize(c'*x) 30 | subject to 31 | A*x == b 32 | x >= 0 33 | cvx_end 34 | fprintf('\n\nOptimal value found by barrier method:\n'); 35 | p_star 36 | fprintf('Optimal value found by CVX:\n'); 37 | cvx_optval 38 | fprintf('Duality gap from barrier method:\n'); 39 | gap -------------------------------------------------------------------------------- /w6-approximation_and_fitting/rational_minimax.m: -------------------------------------------------------------------------------- 1 | % QUASICONVEX examples 2 | 3 | k = 201; 4 | t = linspace(-3,3,k)'; 5 | T = [t t t]; 6 | y = exp(t); 7 | pow = 0:2; 8 | for i =1:k 9 | T(i,:) = T(i,:).^pow; 10 | end 11 | 12 | % upper and lower bounds 13 | u = exp(3); 14 | l = 0; 15 | th = 0.0001; 16 | 17 | while (u-l) > th 18 | gamma = (l+u)/2; 19 | cvx_begin quiet 20 | variables a(3) b(2) 21 | subject to 22 | T*a - y.*(T*[1;b]) <= gamma*(T*[1;b]) 23 | T*a - y.*(T*[1;b])>= - gamma*(T*[1;b]) 24 | % T*[1;b] > 0 25 | cvx_end 26 | if strcmp(cvx_status,'Solved') 27 | u = gamma; 28 | else 29 | l = gamma; 30 | end 31 | end 32 | 33 | obj = norm((T*a)./(T*[1;b])-y,inf) 34 | y_fit = (T*a)./(T*[1;b]); 35 | 36 | 37 | % cvx_begin 38 | % variables a(3) b(2) t 39 | % minimize t 40 | % subject to 41 | % T*a - y.*(T*[1;b]) <= t.* (T*[1;b]) 42 | % T*[1;b] > 0 43 | 44 | % cvx_end 45 | 46 | figure(1); 47 | plot(t,y,'b', t,y_fit,'r+'); 48 | xlabel('t'); 49 | ylabel('y'); 50 | 51 | 52 | 53 | 54 | 55 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2016 Arthur 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /w10-unconstrained/simple_backtrack_line_search.m: -------------------------------------------------------------------------------- 1 | 2 | function simple_backtrack_line_search 3 | clear all; 4 | range_x = [-10:10]; 5 | 6 | ALPHA = 0.05; 7 | BETA = 0.7; 8 | 9 | x = 2; % starting point 10 | iter = 10; 11 | 12 | 13 | figure(1) 14 | plot(range_x, f(range_x)); 15 | hold on; 16 | % plot(x,f(x),'o'); 17 | t=1; 18 | % x_delta = -der_f(x); 19 | % plot(x + t*x_delta ,f(x) + t*der_f(x)*x_delta,'o'); 20 | % plot(range_x,line(x,t,der_f(x))); 21 | 22 | for i = 1:iter 23 | x_delta = - der_f(x); 24 | t = 1; 25 | while ( f(x+ t*x_delta) > f(x) - ALPHA * t * x_delta^2) 26 | plot(x,f(x),'o') 27 | plot(x + t*x_delta ,f(x) + t*der_f(x)*x_delta,'o'); 28 | plot(range_x,line(x,t,-x_delta)); 29 | t = BETA * t; 30 | plot(x + t*x_delta ,f(x) + t*der_f(x)*x_delta,'o'); 31 | 32 | end 33 | x = x + t*x_delta; 34 | end 35 | 36 | 'optimal solution is: ' 37 | x 38 | 39 | function y = f(x) 40 | y = x.^2; 41 | end 42 | 43 | function der = der_f(x) 44 | der = 2*x; 45 | end 46 | 47 | function l = line(cur_x,t,slope) 48 | l = f(cur_x) + t*slope * (range_x-cur_x); 49 | end 50 | 51 | end -------------------------------------------------------------------------------- /w10-unconstrained/ranking/ranking.m: -------------------------------------------------------------------------------- 1 | clear all; 2 | run rank_aggr_data.m; 3 | 4 | P = zeros(m,n); 5 | % creating incident matrix 6 | for k = 1:m 7 | ri = preferences(k,1); 8 | rj = preferences(k,2); 9 | P(k,ri)= -1; 10 | P(k,rj) = 1; 11 | end 12 | 13 | 14 | % plain penalty function 15 | cvx_begin quiet 16 | variable r_p(n) 17 | minimize (sum(max(P*r_p+1,0))); 18 | cvx_end 19 | 20 | % quadratic penalty function 21 | cvx_begin quiet 22 | variable r_s(n) 23 | minimize (sum_square_pos(P*r_s+1)); 24 | cvx_end 25 | 26 | % computing positive violations 27 | v_p = max(P*r_p+1,0); 28 | v_s = (max(P*r_s+1,0)).^2; 29 | 30 | v_pos_p = sum(v_p>0.001); 31 | v_pos_s = sum(v_s>0.001); 32 | disp(['squared has ', num2str(v_pos_s - v_pos_p), ' more violations ']); 33 | 34 | ss = max(abs([P*r_p+1;P*r_s+1])); 35 | tt = -ceil(ss):0.05:ceil(ss); % sets center for each bin 36 | 37 | range_max=2.0; rr=-range_max:1e-2:range_max; 38 | figure(1), clf 39 | subplot(211); 40 | h_p = histogram(v_p,tt); 41 | title('Plain penalty'); 42 | axis([-range_max range_max 0 40]); 43 | 44 | subplot(212) 45 | h_s = histogram(v_s,tt); 46 | title('Quadratic penalty'); 47 | axis([-range_max range_max 0 40]); 48 | 49 | -------------------------------------------------------------------------------- /w10-unconstrained/grad_newton/grad_and_newton_last.m: -------------------------------------------------------------------------------- 1 | n = 100; 2 | m = 200; 3 | randn('state',1); 4 | A=randn(m,n); 5 | iter = 1000; 6 | alpha = 0.01; 7 | beta = 0.5; 8 | 9 | grad = @(x) A'*(1./(1-A*x)) + 1./(1-x) - 1./(1+x); 10 | hessian = @(x) A'*diag(1./(A*x-1).^2)*A + diag(1./(1-x).^2 + 1./(1+x).^2); 11 | f = @(x) - sum(log(1 - A*x)) - sum(log(1-x)) - sum(log(1+x)); 12 | 13 | x = zeros(n,1); 14 | % gradient method 15 | [x, f_vals,grad_it] = grad_descent(x, A, iter, alpha,beta,f,grad); 16 | 17 | figure 18 | % some final checks 19 | disp 'Final checks:'; 20 | all(A*x<=1) 21 | any(abs(x)<=1) 22 | p_star = f(x); % assuming that the last solution is the most optimal 23 | subplot(211); 24 | plot(f_vals - p_star); 25 | title('Gradient descent'); 26 | xlabel('iter'); 27 | ylabel('f - p^{*}'); 28 | 29 | 30 | x = zeros(n,1); 31 | % newton's method 32 | [x, f_vals, newtons_it] = newton(x, A, iter, alpha, beta, f, grad, hessian); 33 | 34 | % some final checks 35 | disp 'Final checks:'; 36 | all(A*x<=1) 37 | any(abs(x)<=1) 38 | p_star = f(x); % assuming that the last solution is the most optimal 39 | subplot(212); 40 | plot(f_vals - p_star); 41 | title('Newtons method'); 42 | xlabel('iter'); 43 | ylabel('f(x) - p^{*}'); 44 | -------------------------------------------------------------------------------- /w4-convex_problems/hw.m: -------------------------------------------------------------------------------- 1 | % question 2 2 | % cvx_begin 3 | % variable x1; 4 | % variable x2; 5 | % minimize (x1^2+9*x2^2); 6 | % subject to 7 | % 2*x1 + x2 >= 1; 8 | % x1 + 3*x2 >= 1; 9 | % x1 >= 0; 10 | % x2 >= 0; 11 | % cvx_end 12 | 13 | % question 3 14 | rng(0,'v5uniform'); 15 | n=100; 16 | m=300; 17 | A=rand(m,n); 18 | b=A*ones(n,1)/2; 19 | c=-rand(n,1); 20 | 21 | 22 | cvx_begin 23 | variable x(n) 24 | minimize (c'*x) 25 | subject to 26 | A*x <= b 27 | x>=0 28 | x<=1 29 | cvx_end 30 | lower_bound = cvx_optval 31 | 32 | 33 | t_vec = linspace(0,1,n); 34 | obj = zeros(n,1); 35 | max_viol = zeros(n,1); 36 | for i = 1:n 37 | t = t_vec(i); 38 | x_prime = (x >= t); 39 | f_val = c'*x_prime; 40 | max_viol(i) = max(A*x_prime - b); 41 | obj(i) = f_val; 42 | end 43 | figure 44 | subplot(2,1,1); 45 | plot(t_vec,obj); 46 | title('objective values'); 47 | xlabel('t'); 48 | ylabel('objective'); 49 | 50 | subplot(2,1,2); 51 | plot(t_vec,max_viol); 52 | title('maximum violation'); 53 | xlabel('t') 54 | ylabel('maximum violation') 55 | 56 | 57 | upper_bound = min(obj(find(max_viol<=0))); 58 | 59 | upper_bound - lower_bound 60 | 61 | 62 | 63 | -------------------------------------------------------------------------------- /w8-geometric_problems/three_way_lin_class.m: -------------------------------------------------------------------------------- 1 | clear all; 2 | run sep3way_data.m; 3 | 4 | cvx_begin 5 | variables a1(2) a2(2) a3(2) b1 b2 b3 6 | % maximize (sum(X*a1 + Y*a2 + Z*a3 ) - M*b1 - N*b2 - P*b3) 7 | 8 | subject to 9 | % 1. 10 | X'*a1 - b1 > max(X'*a2 - b2, X'*a3 - b3) 11 | % 2. 12 | Y'*a2 - b2 > max(Y'*a3 - b3, Y'*a1 - b1) 13 | % 3. 14 | Z'*a3 - b3 > max(Z'*a1 - b1, Z'*a2 - b2) 15 | 16 | % 4. 17 | % to avoid undounded optimizations 18 | norm(a1,2) <= 1 19 | norm(a2,2) <= 1 20 | norm(a3,2) <= 1 21 | 22 | cvx_end 23 | 24 | 25 | % now let's plot the three-way separation induced by 26 | % a1,a2,a3,b1,b2,b3 27 | % find maximally confusing point 28 | % a1=[1;1];a2=[1;-5];a3=[-1;-1];b1=0;b2=0;b3=0; 29 | p = [(a1-a2)';(a1-a3)']\[(b1-b2);(b1-b3)]; 30 | % 31 | % % plot 32 | t = [-7:0.01:7]; 33 | u1 = a1-a2; u2 = a2-a3; u3 = a3-a1; 34 | v1 = b1-b2; v2 = b2-b3; v3 = b3-b1; 35 | line1 = (-t*u1(1)+v1)/u1(2); idx1 = find(u2'*[t;line1]-v2>0); 36 | line2 = (-t*u2(1)+v2)/u2(2); idx2 = find(u3'*[t;line2]-v3>0); 37 | line3 = (-t*u3(1)+v3)/u3(2); idx3 = find(u1'*[t;line3]-v1>0); 38 | % 39 | plot(X(1,:),X(2,:),'*',Y(1,:),Y(2,:),'ro',Z(1,:),Z(2,:),'g+',... 40 | t(idx1),line1(idx1),'k',t(idx2),line2(idx2),'k',t(idx3),line3(idx3),'k'); 41 | axis([-7 7 -7 7]); 42 | -------------------------------------------------------------------------------- /w11:12-equality_contrained/extra/linear_barrier/solution/lp_solve.m: -------------------------------------------------------------------------------- 1 | function [x_star,p_star,gap,status,nsteps] = lp_solve(A,b,c); 2 | % solves the LP 3 | % minimize c^T x 4 | % subject to Ax = b, x >= 0; 5 | % using a barrier method 6 | % computes a strictly feasible point by carrying out 7 | % a phase I method 8 | % returns: 9 | % - a primal optimal point x_star 10 | % - the primal optimal value p_star 11 | % - status: either ?Infeasible? or ?Solved? 12 | % - nsteps(1): number of newton steps for phase I 13 | % - nsteps(2): number of newton steps for phase I 14 | [m,n] = size(A); 15 | nsteps = zeros(2,1); 16 | % phase I 17 | x0 = A\b; t0 = 2+max(0,-min(x0)); 18 | A1 = [A,-A*ones(n,1)]; 19 | b1 = b-A*ones(n,1); 20 | 21 | z0 = x0+t0*ones(n,1)-ones(n,1); 22 | c1 = [zeros(n,1);1]; 23 | [z_star, history, gap] = lp_barrier(A1,b1,c1,[z0;t0]); 24 | if (z_star(n+1) >= 1) 25 | fprintf('\nProblem is infeasible\n'); 26 | x_star = []; p_star = Inf; status = 'Infeasible'; 27 | nsteps(1) = sum(history(1,:)); gap = []; 28 | return; 29 | end 30 | fprintf('\nFeasible point found\n'); 31 | nsteps(1) = sum(history(1,:)); 32 | x_0 = z_star(1:n)-z_star(n+1)*ones(n,1)+ones(n,1); 33 | % phase II 34 | [x_star, history, gap] = lp_barrier(A,b,c,x_0); 35 | status = 'Solved'; p_star = c'*x_star; 36 | nsteps(2) = sum(history(1,:)); -------------------------------------------------------------------------------- /w6-approximation_and_fitting/team_data.m: -------------------------------------------------------------------------------- 1 | n = 10; 2 | 3 | m = 45; 4 | 5 | m_test = 45; 6 | 7 | sigma= 0.250; 8 | 9 | train=[1 2 1; 10 | 1 3 1; 11 | 1 4 1; 12 | 1 5 1; 13 | 1 6 1; 14 | 1 7 1; 15 | 1 8 1; 16 | 1 9 1; 17 | 1 10 1; 18 | 2 3 -1; 19 | 2 4 -1; 20 | 2 5 -1; 21 | 2 6 -1; 22 | 2 7 -1; 23 | 2 8 -1; 24 | 2 9 -1; 25 | 2 10 -1; 26 | 3 4 1; 27 | 3 5 -1; 28 | 3 6 -1; 29 | 3 7 1; 30 | 3 8 1; 31 | 3 9 1; 32 | 3 10 1; 33 | 4 5 -1; 34 | 4 6 -1; 35 | 4 7 1; 36 | 4 8 1; 37 | 4 9 -1; 38 | 4 10 -1; 39 | 5 6 1; 40 | 5 7 1; 41 | 5 8 1; 42 | 5 9 -1; 43 | 5 10 1; 44 | 6 7 1; 45 | 6 8 1; 46 | 6 9 -1; 47 | 6 10 -1; 48 | 7 8 1; 49 | 7 9 1; 50 | 7 10 -1; 51 | 8 9 -1; 52 | 8 10 -1; 53 | 9 10 1; 54 | ]; 55 | 56 | test=[1 2 1; 57 | 1 3 1; 58 | 1 4 1; 59 | 1 5 1; 60 | 1 6 1; 61 | 1 7 1; 62 | 1 8 1; 63 | 1 9 1; 64 | 1 10 1; 65 | 2 3 -1; 66 | 2 4 1; 67 | 2 5 -1; 68 | 2 6 -1; 69 | 2 7 -1; 70 | 2 8 1; 71 | 2 9 -1; 72 | 2 10 -1; 73 | 3 4 1; 74 | 3 5 -1; 75 | 3 6 1; 76 | 3 7 1; 77 | 3 8 1; 78 | 3 9 -1; 79 | 3 10 1; 80 | 4 5 -1; 81 | 4 6 -1; 82 | 4 7 -1; 83 | 4 8 1; 84 | 4 9 -1; 85 | 4 10 -1; 86 | 5 6 -1; 87 | 5 7 1; 88 | 5 8 1; 89 | 5 9 1; 90 | 5 10 1; 91 | 6 7 1; 92 | 6 8 1; 93 | 6 9 1; 94 | 6 10 1; 95 | 7 8 1; 96 | 7 9 -1; 97 | 7 10 1; 98 | 8 9 -1; 99 | 8 10 -1; 100 | 9 10 1; 101 | ]; 102 | 103 | -------------------------------------------------------------------------------- /w7-stat_estimation/worse_case_loss.m: -------------------------------------------------------------------------------- 1 | clear all; 2 | 3 | % initially known parameters 4 | mu1 = 8; 5 | mu2 = 20; 6 | sigma1 = 6; 7 | sigma2 = 17.5; 8 | ro = -0.25; 9 | rmin= -30; 10 | rmax = 70; 11 | 12 | % discretized number of values that random variables can take 13 | n = 100; 14 | r = linspace(rmin,rmax,n)'; 15 | 16 | % define marginal probability functions 17 | prob = @(mu,sigma,r) (exp(- (r - mu).^2 /(2*sigma^2))) / sum(exp(- (r - mu).^2 /(2*sigma^2))); 18 | 19 | % marginals 20 | p1 = prob(mu1,sigma1,r); 21 | p2 = prob(mu2,sigma2,r); 22 | 23 | % form mask of region where R1 + R2 <= 0 24 | r1p = r*ones(1,n); r2p = ones(n,1)*r'; 25 | loss_mask = (r1p + r2p <= 0)'; 26 | 27 | cvx_begin quiet 28 | variable P(n,n); 29 | %p(R1 + R2 <=0 )% 30 | % maximize (sum(sum(P.*idx))); 31 | maximize (sum(sum((P(loss_mask))))) 32 | subject to 33 | % check positivity of pdf 34 | P >= 0; 35 | % check marginals 36 | sum(P,2) == p1; 37 | sum(P',2) == p2; 38 | % check that it sums to one 39 | sum(sum(P)) == 1; 40 | (sum(sum(P.*(r*r'))) - mu1*mu2) == ro * sigma1*sigma2; 41 | cvx_end 42 | 43 | % plotting 44 | P = full(P); 45 | figure 46 | subplot(211) 47 | mesh(r1p,r2p,P'); 48 | xlabel('r1'); 49 | ylabel('r2'); 50 | zlabel('density'); 51 | title('mesh'); 52 | 53 | subplot(212) 54 | contour(r1p,r2p,P'); 55 | title('contour'); 56 | 57 | 58 | -------------------------------------------------------------------------------- /w4-convex_problems/simple_portfolio_data.m: -------------------------------------------------------------------------------- 1 | %% simple_portfolio_data 2 | n=20; 3 | rng(5,'v5uniform'); 4 | pbar = ones(n,1)*.03+[rand(n-1,1); 0]*.12; 5 | rng(5,'v5normal'); 6 | S = randn(n,n); 7 | S = S'*S; 8 | S = S/max(abs(diag(S)))*.2; 9 | S(:,n) = zeros(n,1); 10 | S(n,:) = zeros(n,1)'; 11 | x_unif = ones(n,1)/n; 12 | 13 | 14 | % a scalar version 15 | 16 | % cvx_begin 17 | % variable x(n) 18 | % minimize (x'*S*x) 19 | % subject to 20 | % pbar'*x == pbar'*x_unif; % same expected return as in the case of uniform portfolio 21 | % sum(x) == 1 22 | % %x>=0 23 | % sum(max(-x,zeros(n,1))) <= 0.5 % limit on total short position 24 | % cvx_end 25 | % 26 | % % risk (standard devitation) 27 | % risk_unif = sqrt(x_unif'*S*x_unif); 28 | % risk = sqrt(x'*S*x); 29 | 30 | % a vectorized version 31 | 32 | m = 10; 33 | mus = linspace(0,1,m); 34 | risk = zeros(m,1); 35 | for i = 1:m 36 | mu = mus(i); 37 | cvx_begin quiet 38 | variable x(n) 39 | minimize (-pbar'*x + mu* x'*S*x ) 40 | subject to 41 | pbar'*x == pbar'*x_unif; % same expected return as in the case of uniform portfolio 42 | sum(x) == 1 43 | %x>=0 44 | sum(max(-x,zeros(n,1))) <= 0.5 % limit on total short position 45 | cvx_end 46 | risk(i)= x'*S*x; 47 | end 48 | 49 | figure 50 | plot(mus,risk); 51 | title('mu vs risk'); 52 | ylabel('risk'); 53 | xlabel('mu'); -------------------------------------------------------------------------------- /w10-unconstrained/grad_newton/hessian_approximation.m: -------------------------------------------------------------------------------- 1 | n = 1000; 2 | m = 1000; 3 | randn('state',5); 4 | A=randn(m,n); 5 | iter = 1000; 6 | alpha = 0.01; 7 | beta = 0.5; 8 | 9 | grad = @(x) A'*(1./(1-A*x)) + 1./(1-x) - 1./(1+x); 10 | hessian = @(x) A'*diag(1./(A*x-1).^2)*A + diag(1./(1-x).^2 + 1./(1+x).^2); 11 | f = @(x) - sum(log(1 - A*x)) - sum(log(1-x)) - sum(log(1+x)); 12 | 13 | 14 | 15 | % newton's method 16 | disp('running pure netwons method'); 17 | tic 18 | x = zeros(n,1); 19 | [x, f_vals, newtons_it] = newton(x, A, iter, alpha, beta, f, grad, hessian); 20 | toc 21 | 22 | figure 23 | subplot(311) 24 | p_star = f(x);% assuming that the last solution is the most optimal 25 | plot_res(f_vals, p_star, 'pure newtons'); 26 | 27 | 28 | % reusing hessian newton's 29 | disp('running reusing hessian matrix netwons method'); 30 | tic 31 | x = zeros(n,1); 32 | [x, f_vals, newtons_it] = mod_newton(x, A, iter, alpha, beta, f, grad, hessian, 1, 4); 33 | toc 34 | 35 | subplot(312) 36 | p_star = f(x);% assuming that the last solution is the most optimal 37 | plot_res(f_vals, p_star, 'R newtons'); 38 | 39 | % reusing hessian newton's 40 | disp('running diagonal hessian matrix netwons method'); 41 | tic 42 | x = zeros(n,1); 43 | [x, f_vals, newtons_it] = mod_newton(x, A, iter, alpha, beta, f, grad, hessian, 2); 44 | toc 45 | 46 | subplot(313) 47 | p_star = f(x);% assuming that the last solution is the most optimal 48 | plot_res(f_vals, p_star, 'D newtons'); 49 | -------------------------------------------------------------------------------- /w10-unconstrained/grad_newton/mod_newton.m: -------------------------------------------------------------------------------- 1 | % reuses a previously computed hessian matrix, computes a new hessian every 2 | % N iterations 3 | % mode = 1: hessian reuse, 2: diagonal hessian 4 | function [x, f_vals,i] = mod_newton(x, A, iter,alpha,beta,f,grad,hessian, mode, N) 5 | if nargin <=9 && mode == 2 6 | N= 1; 7 | else 8 | N = N-1; % because we start from 0 and not 1 9 | end 10 | 11 | eta = 0.0001; 12 | f_vals = [f(x)]; 13 | for i=0:iter 14 | t = 1; 15 | gr = grad(x); 16 | 17 | % re-evaluate hessian every N iterations 18 | if (mod(i,N)==0) 19 | H = hessian(x); 20 | end 21 | if(mode == 2) 22 | H = diag(diag(H)); 23 | end 24 | 25 | delta_x = - H\gr; 26 | 27 | % termination condition 28 | if(norm(gr'*delta_x)<= 2*eta) % norm is not affected by the sign 29 | break; 30 | end 31 | 32 | % making sure that we start in the domain of the function 33 | while t > 0 && (any(A*(x + t*delta_x)>1) || any(abs(x + t*delta_x)>1)) 34 | t = beta*t; 35 | end 36 | 37 | % backtracking linesearch 38 | while t > 0 && (f(x + t*delta_x) > f(x) + alpha*t*gr'*delta_x) 39 | t = beta*t; 40 | end 41 | % the actual optimization 42 | x = x + t * delta_x; 43 | 44 | % saving f values 45 | f_vals = [f_vals; f(x)]; 46 | end -------------------------------------------------------------------------------- /w6-approximation_and_fitting/img_interpolation.m: -------------------------------------------------------------------------------- 1 | clear all; 2 | run tv_img_interp.m 3 | 4 | % preparing mask matrices 5 | U_ver = ones(m,n); 6 | U_ver(1,:) = zeros(n,1); 7 | U_hor = ones(m,n); 8 | U_hor(:,1) = zeros(1,m); 9 | 10 | 11 | % producing reconstructed images using l2 and total variance methods 12 | cvx_begin 13 | variable Ul2(m,n) 14 | Ul2(Known) == Uorig(Known); % Fix known pixel values. 15 | U_hor_der = Ul2 - circshift(Ul2,[0,1]) .* U_hor; % horizontal derivative 16 | U_ver_der = Ul2 - circshift(Ul2,[1,0]) .* U_ver; % vertical derivative 17 | minimize (norm([U_hor_der(:);(U_ver_der(:))],2)); % l2 measure 18 | cvx_end 19 | 20 | cvx_begin 21 | variable Utv(m,n) 22 | Utv(Known) == Uorig(Known); % Fix known pixel values. 23 | U_hor_der = Utv - circshift(Utv,[0,1]) .* U_hor; % horizontal derivative 24 | U_ver_der = Utv - circshift(Utv,[1,0]) .* U_ver; % vertical derivative 25 | minimize (norm([U_hor_der(:);(U_ver_der(:))],1)); % total variance measure 26 | cvx_end 27 | 28 | % Graph everything. 29 | figure(1); cla; 30 | colormap gray; 31 | 32 | subplot(221); 33 | imagesc(Uorig) 34 | title('Original image'); 35 | axis image; 36 | 37 | subplot(222); 38 | imagesc(Known.*Uorig + 256-150*Known); 39 | title('Obscured image'); 40 | axis image; 41 | 42 | subplot(223); 43 | imagesc(Ul2); 44 | title('l_2 reconstructed image'); 45 | axis image; 46 | 47 | subplot(224); 48 | imagesc(Utv); 49 | title('Total variation reconstructed image'); 50 | axis image; 51 | -------------------------------------------------------------------------------- /w6-approximation_and_fitting/convex_function_fitting.m: -------------------------------------------------------------------------------- 1 | % Section 6.5.5 2 | % Boyd & Vandenberghe "Convex Optimization" 3 | % Original by Lieven Vandenberghe 4 | % Adapted for CVX by Argyris Zymnis - 11/27/2005 5 | % 6 | % Here we find the convex function f that best fits 7 | % some given data in the least squares sense. 8 | % To do this we solve 9 | % minimize ||yns - yhat||_2 10 | % subject to yhat(j) >= yhat(i) + g(i)*(u(j) - u(i)), for all i,j 11 | 12 | clear 13 | 14 | % Noise level in percent and random seed. 15 | rand('state',29); 16 | noiseint=.05; 17 | 18 | % Generate the data set 19 | u = [0:0.04:2]'; 20 | m=length(u); 21 | y = 5*(u-1).^4 + .6*(u-1).^2 + 0.5*u; 22 | v1=u>=.2; 23 | v2=u<=.6; 24 | v3=v1.*v2; 25 | dipvec=((v3.*u-.4*ones(1,size(v3,2))).^(2)).*v3; 26 | y=y+40*(dipvec-((.2))^2*v3); 27 | 28 | % add perturbation and plots the input data 29 | randf=noiseint*(rand(m,1)-.5); 30 | yns=y+norm(y)*(randf); 31 | figure 32 | plot(u,yns,'o'); 33 | 34 | % min. ||yns-yhat||_2 35 | % s.t. yhat(j) >= yhat(i) + g(i)*(u(j) - u(i)), for all i,j 36 | cvx_begin 37 | variables yhat(m) g(m) 38 | minimize(norm(yns-yhat)) 39 | subject to 40 | yhat*ones(1,m) >= ones(m,1)*yhat' + (ones(m,1)*g').*(u*ones(1,m)-ones(m,1)*u'); 41 | cvx_end 42 | 43 | nopts =1000; 44 | t = linspace(0,2,nopts); 45 | f = max(yhat(:,ones(1,nopts)) + ... 46 | g(:,ones(1,nopts)).*(t(ones(m,1),:)-u(:,ones(1,nopts)))); 47 | plot(u,yns,'o',t,f,'-'); 48 | axis off 49 | %print -deps interpol_convex_function2.eps 50 | -------------------------------------------------------------------------------- /w6-approximation_and_fitting/log_normcdf.m: -------------------------------------------------------------------------------- 1 | function y = log_normcdf( x, approx ) %#ok 2 | 3 | %LOG_NORMCDF Logarithm of the cumulative normal distribution. 4 | % Y = LOG_NORMCDF(X) is the logarithm of the CDF of the normal 5 | % distribution at the point X. 6 | % 7 | % 1 / x 8 | % LOG_NORMCDF(X) = LOG( ------- | exp(-t^2/2) dt ) 9 | % sqrt(2) / -Inf 10 | % 11 | % For numeric X, LOG_NORMCDF(X) is computed using the equivalent 12 | % expression LOG(0.5*ERFC(-X*SQRT(0.5))). When X is a CVX variable, a 13 | % a piecewise quadratic *approximation* is employed instead. This 14 | % approximation gives good results when -4 <= x <= 4, and will be 15 | % improved in future releases of CVX. 16 | % 17 | % For array values of X, the LOG_NORMCDF returns an array of identical 18 | % size with the calculation applied independently to each element. 19 | % 20 | % X must be real. 21 | % 22 | % Disciplined convex programming information: 23 | % LOG_NORMCDF is concave and nondecreasing in X. Therefore, when used 24 | % in CVX specifications, X must be concave. 25 | 26 | error(nargchk(1,2,nargin)); %#ok 27 | if ~isreal( x ), 28 | error( 'Argument must be real.' ); 29 | end 30 | if nargin > 1, 31 | % For debugging purposes only 32 | y = cvx_constant(log_normcdf(cvx(x))); 33 | else 34 | y = log(0.5*erfc(-x*sqrt(0.5))); 35 | end 36 | 37 | % Copyright 2005-2014 CVX Research, Inc. 38 | % See the file LICENSE.txt for full copyright information. 39 | % The command 'cvx_where' will show where this file is located. 40 | -------------------------------------------------------------------------------- /w7-stat_estimation/signal_estim.m: -------------------------------------------------------------------------------- 1 | clear all; close all; 2 | 3 | % create problem data 4 | N = 100; 5 | % create an increasing input signal 6 | xtrue = zeros(N,1); 7 | xtrue(1:40) = 0.1; 8 | xtrue(50) = 2; 9 | xtrue(70:80) = 0.15; 10 | xtrue(80) = 1; 11 | xtrue = cumsum(xtrue); 12 | 13 | % pass the increasing input through a moving-average filter 14 | % and add Gaussian noise 15 | h = [1 -0.85 0.7 -0.3]; k = length(h); 16 | yhat = conv(h,xtrue); 17 | y = yhat(1:end-3) ... 18 | + [-0.43;-1.7;0.13;0.29;-1.1;1.2;1.2;-0.038;0.33;0.17;-0.19;0.73;-0.59;2.2;-0.14;0.11;1.1;0.059;-0.096;-0.83;0.29;-1.3;0.71;1.6;-0.69;0.86;1.3;-1.6;-1.4;0.57;-0.4;0.69;0.82;0.71;1.3;0.67;1.2;-1.2;-0.02;-0.16;-1.6;0.26;-1.1;1.4;-0.81;0.53;0.22;-0.92;-2.2;-0.059;-1;0.61;0.51;1.7;0.59;-0.64;0.38;-1;-0.02;-0.048;4.3e-05;-0.32;1.1;-1.9;0.43;0.9;0.73;0.58;0.04;0.68;0.57;-0.26;-0.38;-0.3;-1.5;-0.23;0.12;0.31;1.4;-0.35;0.62;0.8;0.94;-0.99;0.21;0.24;-1;-0.74;1.1;-0.13;0.39;0.088;-0.64;-0.56;0.44;-0.95;0.78;0.57;-0.82;-0.27]; 19 | 20 | 21 | 22 | 23 | 24 | % ml with contraints 25 | cvx_begin 26 | variable x_ml(N); 27 | y_hat = conv(h,x_ml); 28 | y_hat = y_hat(1:end-k+1); 29 | minimize(sum_square(y - y_hat)); 30 | subject to 31 | x_ml >= 0; 32 | x_ml(2:end) >= x_ml(1:end-1); 33 | cvx_end 34 | 35 | % ml without contraints 36 | cvx_begin 37 | variable x_ml_free(N); 38 | y_hat = conv(h,x_ml_free); 39 | y_hat = y_hat(1:end-k+1); 40 | minimize(sum_square(y - y_hat)); 41 | cvx_end 42 | 43 | % plotting 44 | figure(1) 45 | plot(xtrue); 46 | hold on; 47 | 48 | plot(x_ml); 49 | plot(x_ml_free); 50 | legend('True signal','ML signal') 51 | 52 | -------------------------------------------------------------------------------- /w11:12-equality_contrained/extra/linear_barrier/solution/lp_acent.m: -------------------------------------------------------------------------------- 1 | function [x_star, nu_star, lambda_hist] = lp_acent(A,b,c, x_0) 2 | % solves problem 3 | % minimize c?*x - sum(log(x)) 4 | % subject to A*x = b 5 | % using Newton?s method, given strictly feasible starting p 6 | % input (A, b, c, x_0) 7 | % returns primal and dual optimal points 8 | % lambda_hist is a vector showing lambda^2/2 for each newton 9 | step 10 | % returns [], [] if MAXITERS reached, or x_0 not feasible 11 | % algorithm parameters 12 | ALPHA = 0.01; 13 | BETA = 0.5; 14 | EPSILON = 1e-6; 15 | MAXITERS = 100; 16 | if (min(x_0) <= 0) || (norm(A*x_0 - b) > 1e-3) % x0 not feasible 17 | fprintf('FAILED'); 18 | nu_star = []; x_star = []; lambda_hist=[]; 19 | return; 20 | end 21 | m = length(b); 22 | n = length(x_0); 23 | x = x_0; lambda_hist = []; 24 | for iter = 1:MAXITERS 25 | H = diag(x.^(-2)); 26 | g = c - x.^(-1); 27 | % lines below compute newton step via whole KKT system 28 | % M = [ H A?; A zeros(m,m)]; 29 | % d = M\[-g; zeros(m,1)]; 30 | % dx = d(1:n); 31 | % w = d(n+1:end); 32 | % newton step by elimination method 33 | w = (A*diag(x.^2)*A')\(-A*diag(x.^2)*g); 34 | dx = -diag(x.^2)*(A'*w + g); 35 | lambdasqr = -g'*dx; % dx?*H*dx; 36 | lambda_hist = [lambda_hist lambdasqr/2]; 37 | if lambdasqr/2 <= EPSILON break; end 38 | % backtracking line search 39 | % first bring the point inside the domain 40 | t = 1; while min(x+t*dx) <= 0 t = BETA*t; end 41 | % now do backtracking line search 42 | 43 | while c'*(t*dx)-sum(log(x+t*dx))+sum(log(x))-ALPHA*t*g'*dx> 0 44 | t = BETA*t; 45 | end 46 | x = x + t*dx; 47 | end 48 | if iter == MAXITERS % MAXITERS reached 49 | fprintf('ERROR: MAXITERS reached.\n'); 50 | x_star = []; nu_star = []; 51 | else 52 | x_star = x; 53 | nu_star = w; 54 | end -------------------------------------------------------------------------------- /w10-unconstrained/bio/flux_analysis.m: -------------------------------------------------------------------------------- 1 | clear all; 2 | run fba_data.m; 3 | eps = 0.1; 4 | 5 | 6 | 7 | % loosen the boundaries of maximum dual variable 8 | % in order to get the highest profit gain 9 | % vmax(5)= vmax(5) + eps; 10 | 11 | % if you want to find what rate the model is most sensitive to 12 | % set them all to the same value 13 | % vmax =10*ones(n,1); 14 | 15 | % some experiments with perturbation 16 | cvx_begin quiet 17 | variable v(n) 18 | dual variable l; 19 | maximize(v(n)); % the cell growth rate 20 | subject to 21 | S*v == 0; 22 | v>=0; 23 | l: v<=vmax; 24 | cvx_end 25 | 26 | G_min = 0.2*cvx_optval; 27 | % searching for essential genes 28 | for i=1:n 29 | vknock = vmax; 30 | vknock(i) = 0; % knocking the gene 31 | cvx_begin quiet 32 | variable v(n) 33 | subject to 34 | S*v == 0; 35 | v >= 0; 36 | v(n) >= G_min; 37 | v <= vknock; 38 | cvx_end 39 | if (strcmp(cvx_status, 'Infeasible')) 40 | disp(['gene ', num2str(i), ' is essential']); 41 | end 42 | end 43 | 44 | 45 | % searching for lethal genes 46 | 47 | % cartesian product of all possibilities 48 | % we know that 1 and 9 are essential genes 49 | [X,Y] = meshgrid(2:8,2:8); 50 | knocks = [X(:) Y(:)]; 51 | 52 | for i=1:length(knocks) 53 | k1 = knocks(i,1); 54 | k2 = knocks(i,2); 55 | vknock = vmax; 56 | % knocking the gene 57 | vknock(k1) = 0; 58 | vknock(k2) = 0; 59 | 60 | cvx_begin quiet 61 | variable v(n) 62 | subject to 63 | S*v == 0; 64 | v >= 0; 65 | v(n) >= G_min; 66 | v <= vknock; 67 | cvx_end 68 | if (strcmp(cvx_status, 'Infeasible')) 69 | disp(['genes ', num2str(k1),' and ', num2str(k2), ' are lethals']); 70 | end 71 | end 72 | 73 | -------------------------------------------------------------------------------- /w8-geometric_problems/sphere_fit_data.m: -------------------------------------------------------------------------------- 1 | U = [ ... 2 | -3.8355737e+00 5.9061250e+00 3 | -3.2269177e+00 7.5112709e+00 4 | -1.6572955e+00 7.4704730e+00 5 | -2.8202585e+00 7.7378120e+00 6 | -1.7831869e+00 5.4818448e+00 7 | -2.1605783e+00 7.7231450e+00 8 | -2.0960803e+00 7.7072529e+00 9 | -1.3866295e+00 6.1452654e+00 10 | -3.2077849e+00 7.6023307e+00 11 | -2.0095986e+00 7.6382459e+00 12 | -2.0965432e+00 5.2421510e+00 13 | -2.8128775e+00 5.1622157e+00 14 | -3.6501826e+00 7.2585500e+00 15 | -2.1638414e+00 7.6899057e+00 16 | -1.7274710e+00 5.4564872e+00 17 | -1.5743230e+00 7.3510769e+00 18 | -1.3761806e+00 6.9730981e+00 19 | -1.3602495e+00 6.9056362e+00 20 | -1.5257654e+00 5.7518622e+00 21 | -1.9231176e+00 7.6775030e+00 22 | -2.9296195e+00 7.7561481e+00 23 | -3.2828270e+00 5.4188036e+00 24 | -2.9078414e+00 5.1741322e+00 25 | -3.5423007e+00 5.5660735e+00 26 | -3.1388035e+00 7.7008514e+00 27 | -1.7957226e+00 5.4273243e+00 28 | -2.6267585e+00 7.7336173e+00 29 | -3.6652627e+00 7.2686635e+00 30 | -3.7394118e+00 6.0293335e+00 31 | -3.7898021e+00 5.9057623e+00 32 | -3.6200108e+00 5.7754097e+00 33 | -3.0386294e+00 5.3028798e+00 34 | -2.0320023e+00 5.2594588e+00 35 | -2.9577808e+00 5.3040353e+00 36 | -2.9146706e+00 7.7731243e+00 37 | -3.2243786e+00 5.4402982e+00 38 | -2.1781976e+00 7.7681141e+00 39 | -2.2545150e+00 5.2233652e+00 40 | -1.2559218e+00 6.2741755e+00 41 | -1.8875105e+00 5.4133273e+00 42 | -3.6122685e+00 7.2743342e+00 43 | -2.6552417e+00 7.7564498e+00 44 | -1.4127560e+00 6.0732284e+00 45 | -3.7475311e+00 7.2351834e+00 46 | -2.1367633e+00 7.6955709e+00 47 | -3.9263527e+00 6.2241593e+00 48 | -2.3118969e+00 7.7636052e+00 49 | -1.4249518e+00 7.1457752e+00 50 | -2.0196394e+00 5.3154475e+00 51 | -1.4021445e+00 5.9675466e+00 52 | ]'; 53 | -------------------------------------------------------------------------------- /w11:12-equality_contrained/extra/linear_barrier/newton_KKT_and_BE.m: -------------------------------------------------------------------------------- 1 | % Newton optimization method based on block elimination or KKT matrix depending on the last passed parameter% 2 | % parameters: 3 | % x : starting feasible vector 4 | % A : matrix involved in Ax = b constraint 5 | % b : 6 | % grad : gradient of the objective function 7 | % H : hessian of the objective 8 | % KKT : boolean flag 9 | function [x, nu, lambdas] = newton(x, A, b, f, grad, hessian, KKT) 10 | %% hyper-parameters for backtrack line search%% 11 | alpha = 0.01; 12 | beta = 0.5; 13 | iter = 2000; 14 | eta = 10^-6; 15 | 16 | lambdas = []; 17 | [m,n] = size(A); 18 | for i=0:iter 19 | t = 1; 20 | gr = grad(x); 21 | H = hessian(x); 22 | H_inv = inv(H); 23 | 24 | if nargin() == 6 25 | % Block elimination 26 | % Compute delta_x and lambda via KKT block elimination 27 | % 1-2. compute Schur's complement and b_hat 28 | S = - A*H_inv*A'; 29 | b_hat = A*H_inv*gr; 30 | % 3. solve for nu (dual variable) 31 | nu = linsolve(S,b_hat); 32 | % 4. solve for delta_x (Newton step) 33 | delta_x = linsolve(H, -(gr + A'*nu)); 34 | elseif nargin() == 7 && KKT 35 | % KKT matrix solution 36 | KKT = [H A'; A zeros(m,m)]; 37 | b_tilda = [-gr; zeros(m,1)]; 38 | X = linsolve(KKT,b_tilda); 39 | delta_x = X(1:n); 40 | nu = X((n+1):n+m); 41 | end 42 | 43 | lambda_sq_norm = -gr'*delta_x; 44 | lambdas = [lambdas; lambda_sq_norm/2]; 45 | 46 | % termination condition 47 | if(lambda_sq_norm<= 2*eta) % norm is not affected by the sign 48 | break; 49 | end 50 | 51 | % making sure that we start in the domain of the function 52 | while t > eta && (any((x + t*delta_x)< 0)|| norm(A*delta_x)>eta) 53 | t = beta*t; 54 | end 55 | 56 | % backtracking linesearch 57 | while t > 0 && (f(x + t*delta_x) > f(x) + alpha*t*gr'*delta_x) 58 | t = beta*t; 59 | end 60 | 61 | % the actual optimization 62 | x = x + t * delta_x; 63 | end 64 | if i == iter 65 | fprintf ('could not find a solution in %d iterations \n' ,iter) 66 | x = []; 67 | nu = []; 68 | lambdas =[]; 69 | end 70 | end -------------------------------------------------------------------------------- /w8-geometric_problems/sep3way_data.m: -------------------------------------------------------------------------------- 1 | clear all; close all; 2 | % three way separation problem 3 | % ee364a 4 | % convex optimization 5 | 6 | % data for problem instance 7 | M = 20; 8 | N = 20; 9 | P = 20; 10 | 11 | X = [ 12 | 13 | 3.5674 4.1253 2.8535 5.1892 4.3273 3.8133 3.4117 ... 14 | 3.8636 5.0668 3.9044 4.2944 4.7143 3.3082 5.2540 ... 15 | 2.5590 3.6001 4.8156 5.2902 5.1908 3.9802 ;... 16 | -2.9981 0.5178 2.1436 -0.0677 0.3144 1.3064 3.9297 ... 17 | 0.2051 0.1067 -1.4982 -2.4051 2.9224 1.5444 -2.8687 ... 18 | 1.0281 1.2420 1.2814 1.2035 -2.1644 -0.2821]; 19 | 20 | Y = [ 21 | -4.5665 -3.6904 -3.2881 -1.6491 -5.4731 -3.6170 -1.1876 ... 22 | -1.0539 -1.3915 -2.0312 -1.9999 -0.2480 -1.3149 -0.8305 ... 23 | -1.9355 -1.0898 -2.6040 -4.3602 -1.8105 0.3096; ... 24 | 2.4117 4.2642 2.8460 0.5250 1.9053 2.9831 4.7079 ... 25 | 0.9702 0.3854 1.9228 1.4914 -0.9984 3.4330 2.9246 ... 26 | 3.0833 1.5910 1.5266 1.6256 2.5037 1.4384]; 27 | 28 | Z = [ 29 | 1.7451 2.6345 0.5937 -2.8217 3.0304 1.0917 -1.7793 ... 30 | 1.2422 2.1873 -2.3008 -3.3258 2.7617 0.9166 0.0601 ... 31 | -2.6520 -3.3205 4.1229 -3.4085 -3.1594 -0.7311; ... 32 | -3.2010 -4.9921 -3.7621 -4.7420 -4.1315 -3.9120 -4.5596 ... 33 | -4.9499 -3.4310 -4.2656 -6.2023 -4.5186 -3.7659 -5.0039 ... 34 | -4.3744 -5.0559 -3.9443 -4.0412 -5.3493 -3.0465]; 35 | 36 | %cvx_begin 37 | % variables a1(2) a2(2) a3(2) b1 b2 b3 38 | % fill in your solution code here 39 | % 40 | %cvx_end 41 | 42 | % comment out the following line after filling in cvx part! 43 | % values below are not right!! 44 | %a1=[1;1];a2=[1;-5];a3=[-1;-1];b1=0;b2=0;b3=0; 45 | 46 | % % now let's plot the three-way separation induced by 47 | % % a1,a2,a3,b1,b2,b3 48 | % % find maximally confusing point 49 | % p = [(a1-a2)';(a1-a3)']\[(b1-b2);(b1-b3)]; 50 | % 51 | % % plot 52 | % t = [-7:0.01:7]; 53 | % u1 = a1-a2; u2 = a2-a3; u3 = a3-a1; 54 | % v1 = b1-b2; v2 = b2-b3; v3 = b3-b1; 55 | % line1 = (-t*u1(1)+v1)/u1(2); idx1 = find(u2'*[t;line1]-v2>0); 56 | % line2 = (-t*u2(1)+v2)/u2(2); idx2 = find(u3'*[t;line2]-v3>0); 57 | % line3 = (-t*u3(1)+v3)/u3(2); idx3 = find(u1'*[t;line3]-v1>0); 58 | % 59 | % plot(X(1,:),X(2,:),'*',Y(1,:),Y(2,:),'ro',Z(1,:),Z(2,:),'g+',... 60 | % t(idx1),line1(idx1),'k',t(idx2),line2(idx2),'k',t(idx3),line3(idx3),'k'); 61 | % axis([-7 7 -7 7]); 62 | -------------------------------------------------------------------------------- /w7-stat_estimation/pwl_fit_data.m: -------------------------------------------------------------------------------- 1 | x = [0.000; 2 | 0.010; 3 | 0.020; 4 | 0.030; 5 | 0.040; 6 | 0.051; 7 | 0.061; 8 | 0.071; 9 | 0.081; 10 | 0.091; 11 | 0.101; 12 | 0.111; 13 | 0.121; 14 | 0.131; 15 | 0.141; 16 | 0.152; 17 | 0.162; 18 | 0.172; 19 | 0.182; 20 | 0.192; 21 | 0.202; 22 | 0.212; 23 | 0.222; 24 | 0.232; 25 | 0.242; 26 | 0.253; 27 | 0.263; 28 | 0.273; 29 | 0.283; 30 | 0.293; 31 | 0.303; 32 | 0.313; 33 | 0.323; 34 | 0.333; 35 | 0.343; 36 | 0.354; 37 | 0.364; 38 | 0.374; 39 | 0.384; 40 | 0.394; 41 | 0.404; 42 | 0.414; 43 | 0.424; 44 | 0.434; 45 | 0.444; 46 | 0.455; 47 | 0.465; 48 | 0.475; 49 | 0.485; 50 | 0.495; 51 | 0.505; 52 | 0.515; 53 | 0.525; 54 | 0.535; 55 | 0.545; 56 | 0.556; 57 | 0.566; 58 | 0.576; 59 | 0.586; 60 | 0.596; 61 | 0.606; 62 | 0.616; 63 | 0.626; 64 | 0.636; 65 | 0.646; 66 | 0.657; 67 | 0.667; 68 | 0.677; 69 | 0.687; 70 | 0.697; 71 | 0.707; 72 | 0.717; 73 | 0.727; 74 | 0.737; 75 | 0.747; 76 | 0.758; 77 | 0.768; 78 | 0.778; 79 | 0.788; 80 | 0.798; 81 | 0.808; 82 | 0.818; 83 | 0.828; 84 | 0.838; 85 | 0.848; 86 | 0.859; 87 | 0.869; 88 | 0.879; 89 | 0.889; 90 | 0.899; 91 | 0.909; 92 | 0.919; 93 | 0.929; 94 | 0.939; 95 | 0.949; 96 | 0.960; 97 | 0.970; 98 | 0.980; 99 | 0.990; 100 | 1.000; 101 | ]; 102 | 103 | y = [0.200; 104 | 0.117; 105 | 0.041; 106 | -0.029; 107 | -0.093; 108 | -0.152; 109 | -0.204; 110 | -0.252; 111 | -0.295; 112 | -0.334; 113 | -0.369; 114 | -0.400; 115 | -0.428; 116 | -0.453; 117 | -0.475; 118 | -0.495; 119 | -0.513; 120 | -0.529; 121 | -0.545; 122 | -0.559; 123 | -0.573; 124 | -0.586; 125 | -0.600; 126 | -0.614; 127 | -0.628; 128 | -0.641; 129 | -0.653; 130 | -0.663; 131 | -0.671; 132 | -0.675; 133 | -0.676; 134 | -0.672; 135 | -0.664; 136 | -0.650; 137 | -0.630; 138 | -0.606; 139 | -0.577; 140 | -0.544; 141 | -0.509; 142 | -0.473; 143 | -0.436; 144 | -0.399; 145 | -0.364; 146 | -0.330; 147 | -0.300; 148 | -0.273; 149 | -0.250; 150 | -0.229; 151 | -0.211; 152 | -0.194; 153 | -0.179; 154 | -0.164; 155 | -0.149; 156 | -0.134; 157 | -0.118; 158 | -0.100; 159 | -0.080; 160 | -0.059; 161 | -0.035; 162 | -0.010; 163 | 0.017; 164 | 0.045; 165 | 0.074; 166 | 0.104; 167 | 0.135; 168 | 0.167; 169 | 0.200; 170 | 0.233; 171 | 0.267; 172 | 0.301; 173 | 0.336; 174 | 0.371; 175 | 0.408; 176 | 0.444; 177 | 0.482; 178 | 0.521; 179 | 0.560; 180 | 0.600; 181 | 0.641; 182 | 0.683; 183 | 0.726; 184 | 0.770; 185 | 0.814; 186 | 0.860; 187 | 0.906; 188 | 0.953; 189 | 1.001; 190 | 1.050; 191 | 1.100; 192 | 1.151; 193 | 1.202; 194 | 1.254; 195 | 1.307; 196 | 1.361; 197 | 1.416; 198 | 1.471; 199 | 1.527; 200 | 1.584; 201 | 1.642; 202 | 1.700; 203 | ]; 204 | 205 | -------------------------------------------------------------------------------- /w10-unconstrained/grad_and_newton.m: -------------------------------------------------------------------------------- 1 | 2 | function grad_and_newton 3 | % set(gca, 'YTickLabel', num2str(get(gca, 'YTick'), '%.7f')) 4 | 5 | n = 100; 6 | m = 200; 7 | randn('state',1); 8 | A = randn(m,n); 9 | ITER = 7; 10 | 11 | 12 | 13 | x = zeros(n,1); % initial x 14 | ALPHA = 0.01; 15 | BETA = 0.5; 16 | TOLERANCE = 10^-6; 17 | 18 | gradient_optimization(x); 19 | %newton_optimization(x); 20 | 21 | 22 | 23 | % optimizing the function 24 | 25 | % x : initial point 26 | function x = gradient_optimization(x) 27 | f_val = []; 28 | iter_nr = []; 29 | step_len = []; 30 | 31 | for i = 1:ITER 32 | % compute the step direction 33 | delta_x = - f_der(x); 34 | 35 | if(norm(-delta_x,2)1) | any(abs(x)>1) & t > 0) 42 | t = BETA*t; 43 | end 44 | 45 | % compute t value (step-size) 46 | while (f(x + t*delta_x) > f(x) - ALPHA*t*delta_x'*delta_x & t > 0 ) 47 | t = BETA*t; 48 | end 49 | 50 | % the actual parameters adjustment 51 | x = x + t * delta_x; 52 | 53 | if (mod(i,1)==0) 54 | step_len = [step_len t]; 55 | f_val = [f_val f(x)]; 56 | iter_nr = [iter_nr i]; 57 | end 58 | end 59 | 'best result is:' 60 | f(x) 61 | ' in # steps:' 62 | i 63 | % plot the result 64 | my_plot(iter_nr,f_val,step_len,x) 65 | end 66 | 67 | 68 | 69 | function x = newton_optimization(x) 70 | f_val = []; 71 | iter_nr = []; 72 | step_len = []; 73 | 74 | for i = 1:ITER 75 | 76 | % compute the step direction 77 | f_d = f_der(x); 78 | f_2_d = f_sec_der(x); 79 | delta_x = - inv(f_2_d)*f_d; 80 | lambda = -1*f_d'*delta_x/2; 81 | 82 | if(lambda<=TOLERANCE) 83 | break; 84 | end 85 | 86 | t = 1; 87 | % enforced condition that x must be in the domain 88 | while (any(A*(x+t*delta_x)>1) | any(abs(x)>1) & t > 0) 89 | t = BETA*t; 90 | end 91 | 92 | % compute t value (step-size) 93 | while (f(x + t*delta_x) > f(x) - ALPHA*t*delta_x'*delta_x & t > 0 ) 94 | t = BETA*t; 95 | end 96 | 97 | % the actual parameters adjustment 98 | x = x + t * delta_x; 99 | 100 | if (mod(i,1)==0) 101 | step_len = [step_len t]; 102 | f_val = [f_val f(x)]; 103 | iter_nr = [iter_nr i]; 104 | end 105 | end 106 | my_plot(iter_nr,f_val,step_len,x); 107 | 'best result is:' 108 | f(x) 109 | ' in # steps:' 110 | i 111 | x; 112 | end 113 | 114 | 115 | function [] = my_plot (iter_nr,f_val,step_len,x) 116 | figure(1) 117 | subplot(211); 118 | plot(iter_nr,real(f_val) - f(x)); 119 | ylabel('f(x^k) - p*'); 120 | xlabel('iter'); 121 | subplot(212); 122 | plot(iter_nr,step_len,'o'); 123 | ylabel('t'); 124 | xlabel('iter'); 125 | title('gradient descent'); 126 | end 127 | 128 | 129 | 130 | function res = f (x) 131 | res = - sum(log(1 - A*x)) - sum(log(1 - x)) - sum(log(1 + x)); 132 | end 133 | 134 | 135 | function der = f_der (x) 136 | der = A'* (1./(1-A*x)) + 1./(1-x) - 1./(1+x); 137 | end 138 | 139 | function der = f_sec_der(x) 140 | d = 1./(1-A*x); 141 | der = A'*diag(d.^2)*A + diag(1./(1-x).^2) + diag(1./(1+x).^2); 142 | end 143 | 144 | 145 | 146 | end 147 | -------------------------------------------------------------------------------- /w6-approximation_and_fitting/costumer_preference.m: -------------------------------------------------------------------------------- 1 | 2 | 3 | % Section 6.5.5, Figures 6.25-6.26 4 | % Boyd & Vandenberghe "Convex Optimization" 5 | % Original by Lieven Vandenberghe 6 | % Adapted for CVX Argyris Zymnis - 11/30/2005 7 | % 8 | % We are given a set of consumer preference data for bundles 9 | % of two goods x_1 and x_2. These points are generated by 10 | % taking 40 random points and comparing them using the 11 | % utility function: u(x_1,x_2) = (1.1*sqrt(x_1)+0.8*sqrt(x_2))/1.9 12 | % Then, if we have u(i) >= u(j) we say that (i,j) is in Pweak. 13 | % 14 | % Given this, we wish to compare the point (0.5,0.5) to each 15 | % of the bundles in the given dataset. I.e. for each point k in the 16 | % dataset, we wish to decide wether u(k) >= u(0) or u(k) <= u(0), 17 | % or both, in which case we cannot make any conclusions about 18 | % consumer preferences. 19 | % 20 | % To do this, we have to solve two LPs for each point: 21 | % minimize u(k) - u(0) 22 | % subject to g_i >= 0 23 | % u(j) <= u(i) + g_i^T(a_j - a_i), for all i,j 24 | % u(i) >= u(j), for all (i,j) in Pweak 25 | % 26 | % and: 27 | % maximize u(k) - u(0) 28 | % subject to g_i >= 0 29 | % u(j) <= u(i) + g_i^T(a_j - a_i), for all i,j 30 | % u(i) >= u(j), for all (i,j) in Pweak 31 | % 32 | % If the second LP has a strictly negative solution, we can deduce that 33 | % u(k) < u(0). If on the other hand the second LP has a nonnegative 34 | % solution and the first LP has a strictly positive solution, we can 35 | % deduce that u(k) > u(0). Finally if none of the two previous cases 36 | % holds, we cannot make a decision between the two bundles. 37 | % 38 | % NOTE: This file requires the auxilliary function utilfun.m to run. 39 | 40 | data= [... 41 | 4.5e-01 9.6e-01 42 | 2.1e-01 3.4e-01 43 | 9.6e-01 3.0e-02 44 | 8.0e-02 9.2e-01 45 | 2.0e-02 2.2e-01 46 | 0.0e+00 3.9e-01 47 | 2.6e-01 6.4e-01 48 | 3.5e-01 9.7e-01 49 | 9.1e-01 7.8e-01 50 | 1.2e-01 1.4e-01 51 | 5.8e-01 8.4e-01 52 | 4.9e-01 2.7e-01 53 | 7.0e-02 8.0e-01 54 | 9.3e-01 8.7e-01 55 | 4.4e-01 8.6e-01 56 | 3.3e-01 4.2e-01 57 | 8.9e-01 9.0e-01 58 | 4.9e-01 7.0e-02 59 | 9.5e-01 3.3e-01 60 | 6.6e-01 2.6e-01 61 | 9.5e-01 7.3e-01 62 | 4.2e-01 9.1e-01 63 | 6.8e-01 2.0e-01 64 | 5.2e-01 6.2e-01 65 | 7.7e-01 6.3e-01 66 | 2.0e-02 2.9e-01 67 | 9.8e-01 2.0e-02 68 | 5.0e-02 7.9e-01 69 | 7.9e-01 1.9e-01 70 | 6.2e-01 6.0e-02 71 | 2.8e-01 8.7e-01 72 | 6.9e-01 1.0e-01 73 | 6.9e-01 3.7e-01 74 | 0.0e+00 7.2e-01 75 | 8.7e-01 1.7e-01 76 | 6.3e-01 4.0e-02 77 | 3.2e-01 7.3e-01 78 | 4.0e-02 4.6e-01 79 | 3.6e-01 9.5e-01 80 | 8.2e-01 6.7e-01 ]; 81 | 82 | % objective point 83 | obj=[0.5,0.5]; 84 | 85 | figure(1); 86 | % display the utility function's level sets on some data points. 87 | 88 | plot(data(:,1),data(:,2),'o'); 89 | hold on; 90 | 91 | [X,Y] = meshgrid(0:.01:1,0:.01:1); 92 | Z=(1.1*X.^(1/2)+0.8*Y.^(1/2))/1.9; 93 | 94 | [C,h] = contour(X,Y,Z,[.1,.2,.3,.4,.5,.6,.7,.8,.9],'--'); 95 | clear X Y Z C 96 | hold off; 97 | xlabel('x_1'); 98 | ylabel('x_2'); 99 | hold off 100 | 101 | m = size(data,1); % number of baskets, including 0,1 102 | 103 | % add preference data 104 | Pweak = zeros(m+1,m+1); 105 | for i=1:m, 106 | for j=1:m 107 | if (i~=j) & (1.1*data(i,1).^(1/2)+0.8*data(i,2).^(1/2))/1.9 >= ... 108 | (1.1*data(j,1).^(1/2)+0.8*data(j,2).^(1/2))/1.9, 109 | Pweak(i,j) = 1; 110 | end; 111 | end; 112 | end; 113 | 114 | % Find consumer preferences 115 | data = [data; 0.5 0.5]; 116 | bounds = zeros(m,2); 117 | for k = 1:m 118 | fprintf(1,'Deciding on bundle %d of %d: ',k,m); 119 | 120 | % Check for u(k) >= u(0.5,0.5) 121 | cvx_begin quiet 122 | variables u(m+1) g_x(m+1) g_y(m+1) 123 | minimize(u(k)-u(m+1)) 124 | subject to 125 | g_x >= 0; 126 | g_y >= 0; 127 | ones(m+1,1)*u' <= u*ones(1,m+1)+(g_x*ones(1,m+1)).*... 128 | (ones(m+1,1)*data(:,1)'-data(:,1)*ones(1,m+1))+... 129 | (g_y*ones(1,m+1)).*(ones(m+1,1)*data(:,2)'-data(:,2)*ones(1,m+1)); 130 | (u*ones(1,m+1)).*Pweak >= (ones(m+1,1)*u').*Pweak; 131 | cvx_end 132 | bounds(k,1) = cvx_optval; 133 | fprintf( 1,'%g', round(cvx_optval) ); 134 | 135 | % Check for u(0.5,0.5) >= u(k) 136 | cvx_begin quiet 137 | variables u(m+1) g_x(m+1) g_y(m+1) 138 | maximize(u(k)-u(m+1)) 139 | subject to 140 | g_x >= 0; 141 | g_y >= 0; 142 | ones(m+1,1)*u' <= u*ones(1,m+1) + (g_x*ones(1,m+1)).*... 143 | (ones(m+1,1)*data(:,1)'-data(:,1)*ones(1,m+1))+... 144 | (g_y*ones(1,m+1)).*(ones(m+1,1)*data(:,2)'-data(:,2)*ones(1,m+1)); 145 | (u*ones(1,m+1)).*Pweak >= (ones(m+1,1)*u').*Pweak; 146 | cvx_end 147 | bounds(k,2) = cvx_optval; 148 | fprintf( 1,' %g\n', round(cvx_optval) ); 149 | 150 | end 151 | 152 | figure(2); 153 | hold off 154 | 155 | % plot data pt and contour line through it 156 | val = 1.1*sqrt(0.5)+ 0.8*sqrt(.5); % value at center 157 | t = linspace(((val-.8)/1.1)^2, 1, 1000); 158 | y = ( (val - 1.1*(t.^(1/2)))/.8 ).^2; 159 | plot(t,y,'--', [.5 .5], [0 1], ':', [0 1], [.5 .5], ':'); 160 | axis([0 1 0 1]); 161 | hold on 162 | 163 | for k=1:m 164 | if bounds(k,2) < 1e-5, % preferred over (.5,.5) 165 | dot = plot(data(k,1),data(k,2),'o'); 166 | %'MarkerSize',8); 167 | elseif bounds(k,1) > -1e-5, % rejected in favor of (.5,.5) 168 | dot = plot(data(k,1),data(k,2),'o','MarkerFaceColor',[0 0 0]); 169 | else % no conclusion 170 | dot = plot(data(k,1),data(k,2),'square', 'LineWidth',1.0,... 171 | 'MarkerSize',10); 172 | end; 173 | end; 174 | xlabel('x_1'); ylabel('x_2'); 175 | -------------------------------------------------------------------------------- /w6-approximation_and_fitting/cens_fit_data_norng.m: -------------------------------------------------------------------------------- 1 | % data for censored fitting problem. 2 | n = 20; % dimension of x's 3 | M = 25; % number of non-censored data points 4 | K = 100; % total number of points 5 | c_true=[-0.43;-1.7;0.13;0.29;-1.1;1.2;1.2;-0.038;0.33;0.17;-0.19;0.73;-0.59;2.2;-0.14;0.11;1.1;0.059;-0.096;-0.83]; 6 | 7 | X = [0.29 -1.6 4.3e-05 0.62 -1.2 0.13 -0.33 0.47 -0.46 0.64 -1 0.49 -0.54 1.1 -0.9 0.84 -0.65 -0.98 1.1 -0.072 1.4 -0.78 -0.89 -2.4 -1.4 0.083 -0.57 -0.072 -0.026 1.1 2.1 -0.51 0.72 -0.089 -0.37 0.53 -0.027 -0.51 -0.61 0.76 -1.4 0.76 0.2 -0.42 0.44 -1.6 -0.85 -1 0.49 0.72 -0.15 1.3 0.66 -0.26 1.7 -0.25 0.26 0.56 -0.8 -0.073 -0.3 0.18 -0.67 -1.4 -0.76 0.74 -0.33 -0.5 0.045 0.53 0.81 -0.8 0.25 0.31 -2.1 0.56 0.54 0.93 1.4 0.81 -0.4 -1.3 0.14 -0.023 -1.3 -1.8 -1.7 -1.2 0.8 -0.25 -0.3 -0.53 0.87 0.72 0.87 0.45 0.91 0.37 -2.3 -0.6; 8 | -1.3 0.26 -0.32 0.8 -2.2 0.66 -0.84 -0.9 0.37 -0.6 -0.18 -0.005 -1.3 -0.25 0.14 -0.72 -1.1 -0.69 2.4 0.28 1.3 -0.77 0.14 -0.22 0.61 0.77 -1.5 -2.4 -1.1 1.6 0.08 0.25 0.039 -1 -0.83 1 0.17 0.23 0.69 -0.69 -0.69 -1.7 0.26 -0.61 0.57 0.43 0.35 -0.66 -0.87 1.6 0.32 0.67 -1.6 -1.4 0.12 -1.3 -0.19 0.48 0.31 -0.99 -0.17 0.23 -0.9 -0.3 -0.6 0.81 -2 0.21 2.4 2.1 -1 -0.0083 -1.4 1.4 -0.66 -2 0.68 1.2 0.42 1.9 0.91 1.2 -0.016 0.11 -0.3 -1.5 0.48 0.3 0.88 -0.15 1.2 0.54 -0.8 -0.28 2.4 0.89 1.5 -0.55 1.4 -0.99; 9 | 0.71 -1.1 1.1 0.94 0.99 -1.2 0.5 0.036 0.73 0.55 1.5 -0.28 1.1 -1.5 -0.14 -0.72 -0.048 1.3 0.23 1.4 -0.91 -0.11 -0.24 0.058 -1.3 2.2 -0.05 -0.69 0.75 1.4 -0.94 0.37 1.5 0.94 0.29 -1.1 0.88 -0.6 0.02 0.68 0.33 1.5 2.1 0.72 -0.69 -0.74 0.11 0.56 0.08 -2.1 1.3 -0.28 -3 -1.3 0.65 1.2 -0.079 -0.68 1.1 -0.75 0.18 -1.2 -0.15 -0.57 0.81 -0.14 1.6 0.75 -0.31 0.35 1.3 0.63 0.97 0.33 1.5 -0.76 -0.59 -0.25 0.069 0.4 -0.14 1.5 -0.94 0.81 -2.6 0.82 -0.45 -0.73 -0.22 -1.2 1 0.68 -0.75 -1.4 0.11 1.1 -1.1 0.58 1.4 1.2; 10 | 1.6 1.4 -1.9 -0.99 -0.52 -0.46 1.5 -0.63 2.1 -1.1 -0.038 1.3 -0.71 0.0097 -1.2 -0.2 0.38 -0.91 -0.27 0.18 -2.3 -0.98 -0.075 -0.42 -0.66 0.33 0.55 -1.4 0.5 -0.76 0.64 0.18 -1.7 -1.1 -1.8 0.36 0.18 0.021 1.1 -1.1 -1 -1.6 -2.3 0.34 0.83 0.56 -1.1 -1.2 -0.52 -0.74 -2.2 -0.023 0.54 -0.89 2.1 1.5 0.7 0.28 -1.7 -0.031 0.42 0.7 0.95 -0.12 0.07 -0.1 0.23 -0.95 0.19 0.23 0.015 0.15 1.6 0.48 -0.33 -2.4 -0.26 -0.71 0.29 -0.86 1.3 1.8 -1.5 -1 0.78 -0.23 0.39 -1.1 0.3 -0.022 2.1 0.54 -0.75 0.46 0.026 -0.1 1.8 -2 -0.45 2.4; 11 | -0.69 -0.81 0.43 0.21 0.33 -0.26 -0.55 0.54 -1.4 0.086 1.2 1.9 -0.011 0.071 1.2 -0.02 -0.33 -0.41 0.7 -0.54 1.8 -0.96 -0.36 -0.2 -0.15 0.86 0.083 0.33 -0.52 0.44 1.7 -0.037 -1 -0.71 -1.6 -0.037 0.76 0.42 -1.3 0.9 0.29 1.1 0.34 0.88 -2.2 -1.4 -0.68 -0.78 -1.4 0.18 1.3 -0.91 -1 0.59 -0.34 0.24 -0.8 -1.3 -1.1 0.99 1.7 0.43 1.6 -0.39 -1.8 -0.8 0.65 0.61 0.95 1.3 0.22 2.6 -1.4 0.4 2.7 -0.66 1.5 -0.59 0.47 2.4 0.32 0.63 0.36 -1 0.6 -1.6 0.053 -1.4 -0.52 0.62 -0.66 -0.51 -0.31 1.1 0.97 1.5 0.15 0.52 -0.63 2.3; 12 | 0.86 0.53 0.9 0.24 0.23 -1.2 -0.85 0.55 -1 -2 -0.7 -0.52 -0.00082 0.32 -0.015 0.28 -0.5 -0.51 -0.49 1.6 0.39 -2.4 -2.1 -1.5 0.25 0.68 1.6 0.6 -0.56 0.91 0.59 -1.6 -0.76 -1.2 2 -1.2 0.51 1.2 0.48 -2.1 1.1 -1.1 0.29 0.28 1.1 0.46 -0.28 0.27 -0.38 0.53 -0.38 -1 0.91 1.8 0.73 -1.4 -0.8 -0.72 -0.19 -0.6 2 1.5 0.43 -0.84 1.8 0.49 -1.1 1.8 -0.53 -0.013 1.7 -1.3 -1.5 -0.073 -1.6 -0.11 1 -0.26 1.8 -0.84 -0.48 0.083 0.48 0.28 0.94 -0.32 -0.49 -0.59 -0.16 1.9 -1.1 -1.3 -1.5 -1 -0.0053 0.17 -0.28 1.6 1 2.3; 13 | 1.3 0.22 0.73 -1 0.021 -1.3 -0.25 -0.2 1 -0.49 0.0075 0.1 -0.25 0.5 0.54 1.1 -0.036 1.6 1.9 0.83 0.02 -0.84 -0.14 -1.1 -0.077 0.55 -0.33 0.15 -0.75 -1.1 0.79 0.34 2.2 1.1 -0.072 -0.28 0.13 0.77 -1.6 0.28 0.24 0.39 0.66 -0.15 -0.0016 0.63 0.65 1.5 -0.46 -0.55 0.0025 0.37 1.6 1.3 0.29 0.66 -0.0076 -0.66 0.46 1.5 0.7 -0.51 -0.56 -1.7 0.65 1.2 0.2 0.089 -1.1 -1.3 -2.1 1 -0.075 1.3 -0.54 0.3 0.3 1.2 0.26 0.28 0.076 2.1 0.32 0.29 -1 -0.8 0.24 0.52 -0.098 0.082 0.95 -0.61 0.83 -2.8 1.4 0.8 1.7 0.73 -0.49 -0.27; 14 | -1.6 -0.92 0.58 -0.74 -1 0.93 0.66 -2.1 -0.39 0.46 -0.78 -0.81 0.4 1.3 -0.72 0.62 -0.17 0.081 1.1 0.23 -0.41 0.26 1.4 -0.82 1.7 1 0.8 -0.1 0.93 0.2 0.11 -0.13 0.43 -0.68 2.6 -0.16 0.28 -2.6 -1.4 -0.73 0.16 0.97 -0.58 -0.09 -1.6 0.38 -1.2 -1.1 -0.29 0.3 0.88 0.9 -0.98 -0.49 -0.85 -2.6 -0.73 0.2 -0.92 -0.81 -1.4 -0.0067 0.18 -0.45 -1.5 1.3 1.7 2.6 -1.6 -0.56 0.11 0.78 0.081 0.98 0.55 -0.58 -0.82 -1.5 1.5 0.82 -0.11 1.3 -1.9 -0.25 -0.068 0.69 0.72 -1.5 1 1.6 -0.4 -0.57 -0.61 1 1.8 -1.7 0.67 0.56 -0.4 0.5; 15 | -1.4 -2.2 0.04 1.1 -0.95 0.011 -0.85 0.13 -1.4 -0.32 0.59 0.68 -0.26 -0.55 -0.66 -1.8 -0.96 -1.1 -1.2 0.67 -1.5 -0.18 0.65 0.37 1.6 1.3 -0.78 -2.6 -0.25 0.76 -0.16 0.49 -0.44 -1.7 -0.24 -1.1 -0.98 0.29 0.29 -0.77 0.41 0.82 0.89 0.29 -1.2 -1 -0.6 0.63 -0.3 -1.2 0.58 1.3 1 -2.2 -2.5 -0.53 -1.5 -1.8 -0.65 0.65 0.36 -0.53 -0.77 -1.5 -0.38 -0.28 0.73 -0.68 1.2 0.76 -1.1 -0.83 -0.84 1.7 1.5 -3.1 -0.49 -0.39 0.32 1.2 1.4 -1.8 0.68 -0.22 0.082 -0.042 0.15 -0.087 0.43 -0.38 -0.38 0.086 0.96 0.077 0.89 0.17 -0.35 -0.81 1.4 -0.12; 16 | 0.57 -0.059 0.68 -0.13 -0.37 -0.65 -1.2 1.6 0.32 1.2 -0.25 -2.4 -1.7 0.26 0.31 0.7 1.3 -1.1 -0.67 -0.51 0.22 -0.17 -0.38 -0.59 0.63 0.044 -1.3 0.028 -0.15 -1.3 0.87 0.6 0.03 0.81 0.17 -2 -0.94 0.83 -0.14 0.15 1.2 0.037 0.17 1.2 0.21 -0.35 -0.48 -0.8 -1.6 -0.19 -1.6 -0.13 0.16 0.24 -2.4 3.2 0.87 -1.4 0.62 -1.3 -0.57 0.72 -0.94 -0.096 0.21 0.22 0.79 2.8 0.49 -0.91 -1.6 -0.59 -0.56 -0.41 -0.46 1.6 0.87 0.28 0.8 -0.064 0.71 -0.014 0.23 0.9 -1.8 0.32 0.13 -0.013 -0.026 -1.3 0.48 0.69 2 -1.9 1.1 -0.48 -0.26 -0.54 0.28 -0.0019; 17 | -0.4 -1 0.57 0.39 -1.2 0.81 -0.12 1 1.6 -0.63 0.48 0.99 -1 -0.013 0.11 0.81 0.44 1.7 1.3 0.86 -1.4 -0.12 -0.66 1.5 0.092 -0.31 0.67 -0.88 -1.3 -0.95 -0.19 -0.086 -0.32 1.4 0.92 -0.91 -0.013 -0.0081 -1.1 -0.34 1.4 -0.93 0.85 0.81 0.22 0.44 0.98 -0.31 1.1 -0.3 -1.5 0.61 -0.59 -0.74 -0.35 0.44 -0.27 0.2 -1.3 -0.87 -1 1.1 -1.4 0.91 -0.77 0.63 0.6 -0.017 1.6 1.4 0.64 0.066 -0.028 0.57 -0.5 -0.41 0.36 0.83 0.58 0.65 0.37 2.5 1.2 -0.64 -1.8 0.51 -1 -0.35 -0.38 -0.72 0.44 2.1 0.29 -0.61 0.4 -0.73 -0.64 1.3 1.9 -0.43; 18 | 0.69 0.61 -0.26 0.088 -1.1 0.23 -0.065 -1.6 0.71 -2.3 0.67 0.22 0.24 -0.58 1.8 0.64 1.3 1.9 0.39 0.27 -0.84 0.17 0.25 0.14 -0.81 0.23 -1.4 -0.27 0.31 0.78 0.075 0.33 0.98 0.67 -0.18 -0.0056 0.35 0.86 -0.29 0.97 -1 -0.11 0.96 -1.4 -1 -1.6 1.8 -0.6 1.3 0.96 0.57 2 1.6 -1.8 -0.61 -1.1 -1.6 1.5 1 -0.47 0.7 0.5 -1.9 2.4 -0.11 -0.55 -0.058 0.27 -0.45 0.25 -0.4 -0.012 -1.2 0.74 1.2 1.4 -0.08 -0.98 1.8 -1.8 -0.6 -0.67 0.13 -0.18 -0.66 -1 1.3 0.99 -0.24 -0.56 0.38 -0.0029 -0.39 1.2 0.19 -2.1 -0.91 -0.48 -0.8 -0.19; 19 | 0.82 0.51 -0.38 -0.64 1.5 -0.99 0.49 -0.079 2 -1.2 -0.078 0.26 -1.3 2.1 -0.28 1.3 -0.5 1.6 0.39 0.62 -0.21 -0.5 -0.38 -1.9 -0.46 1 -1.3 -0.33 2.7 -0.0063 -0.53 -0.34 0.018 0.14 -0.52 -1.7 -0.89 0.77 -0.58 -0.11 0.21 -0.8 1.3 0.12 -0.45 -0.7 1.4 1.3 -0.13 -0.53 -0.91 2.3 -0.52 0.45 -0.41 0.89 -0.39 0.37 0.86 0.22 0.48 2.8 -0.065 0.52 0.34 0.23 -1.1 -0.91 1 0.12 -0.4 -0.077 0.73 0.22 0.041 -1.4 0.75 -0.1 -0.94 0.06 -0.85 0.26 0.18 0.72 1.4 0.099 1.2 0.64 -1.2 0.62 1.1 -0.09 0.58 -0.58 -0.69 -1.8 0.72 -1.8 -0.56 0.99; 20 | 0.71 1.7 -0.3 -0.56 0.056 1.3 -0.6 -0.68 0.5 1.1 0.89 1.2 -0.35 -0.26 2.2 0.33 -1.1 -1.3 -1.7 -1 0.76 -0.71 -0.53 -0.45 -1.4 1.2 -0.61 -1.2 0.29 0.52 -0.69 -0.32 0.82 -0.86 1.4 1.3 0.81 1.3 -0.9 1 0.59 -1.7 -0.064 -0.22 1.4 -1.1 0.91 0.86 -0.74 -0.9 -1.6 -0.37 1.2 0.58 -1.4 -0.28 -0.14 -0.44 -0.64 1.9 -0.19 -0.16 0.67 0.41 1 0.36 2.1 -2 2 0.38 0.084 -1.6 0.06 1.3 0.75 0.9 -1.8 0.13 -0.92 -0.76 0.66 -0.37 -0.61 0.3 2.1 -0.12 -2.6 2.9 0.76 -1.3 0.86 -0.25 -1.1 -0.26 0.011 -0.052 -0.0073 -0.93 1.7 0.47; 21 | 1.3 0.59 -1.5 0.44 -1.2 0.29 -0.15 -1 1.9 -0.11 2.3 -0.27 -0.94 -1.4 1.5 -0.67 0.81 -0.21 0.23 1.5 0.38 0.51 0.055 -0.65 -0.37 -0.54 -1.5 0.58 -1.4 1.4 -0.27 -0.38 0.7 -0.75 -0.87 -0.6 0.11 1.2 0.25 -0.48 -0.26 -0.9 1.3 0.57 -0.46 1 0.33 -2.1 0.21 -0.89 -0.36 2.2 1.6 0.86 0.23 1 -2.3 -0.049 0.66 0.11 -0.38 0.43 0.21 1.1 -1.4 0.52 -1.4 -0.32 0.6 -0.07 -0.44 1.7 0.15 0.63 1.2 0.54 1.2 0.063 0.38 -1.7 1.5 1.3 -1 1.5 0.17 0.69 -2.1 -1.2 1.2 -0.12 -1.2 -0.87 0.061 0.0047 -1.1 -0.086 -2.8 0.92 -0.64 -1.4; 22 | 0.67 -0.64 -0.23 -0.95 -0.041 1.5 -0.43 -1.2 -0.34 0.38 0.52 -0.13 -1.2 1.8 -1.9 -0.15 0.041 -0.2 0.69 0.43 -1.3 -0.42 1.3 0.1 -0.47 0.91 0.56 0.24 0.25 0.48 -1.2 -0.95 -0.23 1.2 0.81 -2.1 2.7 0.96 -1.5 0.069 2.5 0.59 0.23 -0.3 0.033 1.7 0.07 -0.36 -0.4 0.28 -0.4 -0.16 -2.1 -0.27 0.21 -0.36 -1.4 0.078 1.3 -0.41 -0.89 -2 -0.0081 0.43 -1 -0.62 0.46 0.59 0.018 -0.58 -0.56 -0.47 1.6 -1.1 0.3 -0.47 -0.061 0.37 0.91 1.1 -0.81 -0.65 -0.3 -0.044 -1.7 1.9 0.39 0.16 -1 -1.1 0.4 0.42 0.012 -0.039 1.9 -1.2 -0.15 0.041 -1.4 0.27; 23 | 1.2 0.38 0.12 0.78 -1.1 1.1 -0.079 0.29 -1.1 0.94 -0.012 -1.3 -1 0.33 -1.7 -2.4 -0.76 0.31 -0.64 -1.9 1.5 0.23 -2.5 -0.22 1.8 -0.17 -0.28 -0.35 -1.4 -0.79 0.25 0.23 -0.11 1.2 -0.51 0.11 0.41 -1.7 0.31 0.4 0.86 0.55 -1.4 1.1 0.8 0.71 -1.5 0.55 0.065 -0.75 -1.2 -0.7 2.9 -0.42 -0.72 1.3 -1.8 2 0.31 0.51 -1.8 -0.55 0.02 1.3 -0.64 1.3 0.39 0.83 -1.6 0.47 0.88 0.095 -0.78 -0.45 1.1 2.4 -0.39 -0.1 0.15 1.5 -1.3 0.062 1.5 -0.03 0.28 0.33 -0.86 0.79 1.7 -2.8 -0.58 -0.13 -0.17 -0.51 -0.17 -0.75 0.58 0.41 -0.76 2.6; 24 | -1.2 -1 0.31 0.57 -1.3 -0.68 1.5 -0.43 -0.21 -2.1 0.91 -1.7 -0.4 -1.1 -0.57 0.47 -0.089 -0.57 -1 0.47 0.033 -0.96 0.58 -0.28 0.75 -0.34 -1.3 0.89 0.15 0.75 0.1 1.2 0.13 -0.61 0.74 1.5 -1.3 -0.99 -2 1.1 -0.85 -0.42 -0.15 -0.18 0.9 -0.75 -0.42 -1.6 -1.8 1.6 -1.1 0.56 1.4 -0.21 0.76 1 1.1 -0.073 0.86 -1.2 -1.6 -1.9 -0.56 -0.19 0.17 0.97 2.1 -1.7 1.2 1.3 -0.81 0.29 1.6 -0.73 0.77 2 0.61 -0.7 -0.2 0.24 1.7 -0.74 1.8 -0.38 0.39 -0.095 -1.2 -0.58 -0.49 0.25 -0.98 0.54 -0.69 -1.2 -0.73 0.95 0.75 0.6 0.16 -0.054; 25 | -0.02 -0.02 1.4 -0.82 -0.26 -1.3 -0.61 0.056 1.2 -0.64 0.056 -0.7 0.17 0.62 -0.19 0.12 -2 -0.98 -0.19 1.3 1.9 -0.15 -1 -0.73 0.065 0.54 -0.89 1.6 -1.7 -0.17 -0.041 -0.58 -0.8 0.81 0.85 0.053 0.38 0.69 0.53 0.62 0.81 0.062 -0.5 -1.5 0.14 0.23 -0.021 -0.21 1.7 0.57 0.29 -0.05 1.1 -0.17 -1.6 0.21 -0.14 0.94 0.13 -0.096 -1.2 -0.11 1.9 0.13 1.3 -2.4 -0.32 -1.9 0.68 1.6 -0.26 0.92 1.1 0.35 -1.3 0.8 0.64 -0.39 1.5 -1.1 0.1 -0.18 -1.3 -0.55 -0.099 0.031 2.6 0.53 0.17 -0.86 0.12 0.88 -0.99 0.71 -0.99 -0.18 -0.17 1.9 0.38 0.47; 26 | -0.16 -0.048 -0.35 -0.27 0.95 -0.073 -1.3 -0.37 -1.1 -0.7 -1.1 0.28 -0.12 1.3 0.0089 -0.59 1.1 -0.45 -1.1 0.64 -1.2 0.74 0.94 -0.065 -0.29 0.93 -0.99 -1.1 0.72 -0.82 -2.2 -0.5 -0.24 0.22 -0.83 0.16 0.5 -0.97 0.34 -0.29 0.7 0.46 -1.7 1.4 -1.6 -0.22 0.23 -0.43 0.33 0.32 -1.9 1.2 -0.77 0.22 -1.1 -0.3 1.1 -0.08 0.017 0.45 -0.42 -1.3 -0.22 -0.66 1.9 -1.1 1.5 -0.44 -0.78 -0.7 0.49 0.51 -0.17 -0.51 -0.51 0.031 1 0.016 -0.62 2.4 -0.8 1.1 0.39 0.93 0.18 -0.61 -0.9 1.7 0.35 1.1 0.068 -1.3 -0.05 0.73 0.89 -0.066 -0.58 0.71 0.18 -2.1;]; 27 | 28 | y = X'*c_true + 0.1*(sqrt(n))... 29 | *[-0.8;-0.46;0.19;0.89;-1.6;-0.32;-0.7;-0.74;0.37;1.4;0.46;0.66;1.1;0.98;-1.3;-0.023;0.13;2.4;0.9;0.076;0.36;-2.1;-2.3;-0.37;1.3;0.56;-0.18;-0.036;1.9;1.3;-0.36;-0.33;0.083;0.43;-1.2;-2.7;-0.54;2.2;-0.61;1.4;1.2;0.74;-0.12;-0.031;-1;-1;0.63;0.87;2.1;-0.59;0.59;1.5;2.1;-0.75;-1.5;-0.21;0.49;-0.32;0.82;-1.4;1.1;0.98;0.49;1.3;0.59;0.45;-1.4;-0.92;-1.4;-1.5;0.097;-0.23;-0.32;0.63;0.039;-0.016;0.45;-0.52;-1.1;-0.048;-0.5;1.6;-0.24;-0.96;-0.15;1.7;1.6;-1.1;0.097;0.25;2.2;0.31;2;-0.12;-0.27;1.4;-0.074;-0.36;-0.78;-0.25]; 30 | 31 | % Reorder measurements, then censor 32 | [y, sort_ind] = sort(y); 33 | X = X(:,sort_ind); 34 | D = (y(M)+y(M+1))/2; 35 | y = y(1:M); 36 | -------------------------------------------------------------------------------- /w8-geometric_problems/quad_metric_data_norng.m: -------------------------------------------------------------------------------- 1 | % data for learning a quadratic metric 2 | n = 5; 3 | N = 100; 4 | N_test = 10; 5 | 6 | 7 | 8 | X = [1.164954 1.696142 -1.446172 -0.360030 -0.044881 0.513478 0.375041 -0.322940 0.847649 -0.557094 -1.098195 -0.977814 -0.507700 -0.612911 1.133000 0.924159 0.394600 -0.137414 0.039885 -0.786457 -0.127443 -0.620214 -0.262681 0.499521 0.438705 0.292315 -0.759697 0.670292 -0.902031 0.846106 0.526163 0.889164 -1.010674 -1.406949 -0.165923 0.041430 -0.844414 0.336297 1.487524 0.786430 -0.702669 1.802440 -1.931134 -1.238566 1.547177 -2.526706 0.899884 -0.382918 -0.594524 1.879957 0.396727 -0.031243 0.251078 0.862500 0.968992 0.536007 1.524681 1.157783 -1.898778 -1.454749 0.418469 1.766708 -0.159448 -1.179060 -1.226502 0.658153 -0.397914 1.271743 -1.389722 -0.797533 -0.268286 1.939318 1.338814 0.420989 0.139860 -2.014986 -0.267458 0.605540 0.186747 0.434313 0.149996 1.136805 -1.378907 0.086932 -0.066596 0.642066 0.565239 -0.591204 0.393682 0.667201 -0.514013 1.289554 -0.227852 -0.904204 -1.586917 -0.047555 -0.391039 -0.956374 1.042360 -1.024905; 9 | 0.626839 0.059060 -0.701165 -0.135576 -0.798945 0.396681 1.125162 0.317988 0.268101 -0.336706 1.122648 -1.021466 0.885299 -0.209144 0.149994 -1.814115 0.639406 0.615770 -2.482843 0.634809 0.554172 0.237149 0.976490 -1.055375 -1.247344 2.565910 -0.674721 0.420146 -2.053257 -0.184538 -0.184454 -1.299152 -0.960498 1.030812 0.300907 -1.098050 -0.311630 -0.221361 -0.836821 -1.461639 0.356429 -0.642984 0.660300 -1.889236 0.644933 -0.312981 -0.200899 0.155083 0.130246 -1.003849 -0.527115 0.778212 -0.310471 -1.034706 -0.747317 0.298451 -0.195261 0.161908 1.822525 0.466546 0.247349 -0.382104 2.704026 -0.277776 0.069600 0.491314 0.864280 -0.035344 0.229328 -0.936741 -1.082140 -0.895840 1.222299 -0.433373 -0.748089 0.491717 -0.570245 -0.624481 1.594939 -0.386207 0.542038 0.391314 -0.260172 1.955674 0.373381 0.923087 -0.610781 1.691546 -0.905427 -0.067794 1.896261 -0.530575 0.376770 0.698670 -0.920783 -0.614736 -1.382045 0.451807 1.209120 -1.056846; 10 | 0.075080 1.797072 1.245982 -1.349338 -0.765172 0.756219 0.728642 -0.511172 -0.923489 0.415227 0.581667 0.317688 -0.248094 0.562148 0.703144 0.034973 0.874213 0.977894 1.158655 0.820410 -1.097344 -1.586847 0.977815 -0.450743 0.324667 -0.457816 -1.171687 -2.872751 0.089086 1.030714 0.198783 1.182573 0.691160 -0.759874 -0.322467 1.566724 0.397810 0.016649 -1.300982 1.554466 0.652636 0.109555 -1.102510 -0.973585 -2.148359 -0.593618 -0.233735 -0.964648 0.035014 -0.497446 0.344571 2.180484 -0.923004 -0.192673 -2.796024 0.284043 0.017260 1.557064 -1.518415 0.545437 0.704110 -0.911425 -0.198500 -1.581053 -0.396516 0.800734 -0.177618 -1.501329 0.271190 -0.002433 2.014134 -0.304158 -1.595978 0.706252 -0.628975 -1.554975 -0.187267 0.572228 0.321307 -0.112564 0.254409 1.605148 0.994768 0.161454 0.217314 -1.555108 1.231111 0.953356 -1.274473 -1.735660 -0.253230 -0.692971 1.221556 0.482598 -0.614274 0.240362 1.076292 -1.253778 0.780955 2.887723; 11 | 0.351607 0.264069 -0.638977 -1.270450 0.861735 0.400486 -2.377454 -0.002041 -0.070499 1.557814 -0.271354 1.516108 -0.726249 -1.063923 -0.052412 -1.807862 1.752402 -1.115348 -1.026279 -0.176027 -0.731301 -0.401485 1.170021 1.270378 0.390070 -1.610827 2.032930 1.685874 2.087099 -1.527623 1.590427 1.817472 -0.758618 0.874127 -0.368411 -1.048423 1.049786 -1.192361 1.574132 -0.597535 0.215671 -0.719038 -0.102971 0.212116 -1.028845 0.332322 1.449907 0.038756 -0.624674 -1.504397 -0.723291 0.437814 -0.384776 -1.299723 0.696732 0.959664 0.246340 -0.193544 -1.051071 1.320319 0.631939 -0.996090 -0.141405 1.049022 1.388807 -0.767269 1.874381 0.365373 -0.366360 0.396086 1.944031 0.555253 -1.067730 0.227857 1.394831 -0.140609 1.208557 -0.724410 0.866841 -0.964333 -0.307241 0.825892 1.834034 -0.628688 -0.179457 0.663594 0.994300 -1.930055 0.346546 0.806349 -0.174531 -0.859806 1.098288 0.811696 -0.334672 0.125017 1.269466 0.256435 -1.179900 -0.267744; 12 | -0.696513 0.871673 0.577350 0.984570 -0.056225 -1.341381 -0.273782 1.606511 0.147891 -2.444299 0.414191 0.749432 -0.445040 0.351589 2.018496 1.028193 -0.320051 -0.550021 1.153487 0.562474 1.404732 -0.770692 0.159311 0.898694 -0.405138 -2.669524 0.968481 0.027925 0.365118 0.964939 0.032192 -0.584302 -0.096972 0.761127 1.147895 0.422724 -0.340796 -0.131646 1.166040 -1.210568 -0.263896 0.420628 -1.059802 0.493442 -0.141582 0.558851 1.836132 0.765458 -0.539775 -0.095449 1.268193 1.333329 1.158181 0.306596 3.206908 2.087593 -0.854485 1.651301 0.049931 -0.404494 -0.992362 1.195143 0.411268 0.302689 1.364422 0.364420 0.172400 -0.198660 1.376960 -0.508693 -1.521529 -0.324247 -0.759919 -1.016992 -1.647691 0.244944 -0.638855 1.192196 1.291844 -2.057251 -0.417112 1.470390 -1.715910 -1.438824 0.025673 -0.609500 -0.803475 0.512845 -1.195235 -0.914801 0.978788 0.529038 -0.853014 0.327883 0.080345 -0.223605 0.487268 0.421229 1.001450 -0.488540]; 13 | 14 | Y = [0.419420 -0.611729 -0.506138 -2.122378 -0.673263 -1.350292 0.202680 0.186106 1.408075 0.179925 -0.683631 0.450343 -0.201343 -0.906374 -0.179097 0.067372 1.177170 1.173296 -0.574005 -0.081630 1.662312 1.166705 -0.960461 -0.915962 0.427947 0.213963 0.261843 0.144555 -0.972946 -0.534127 -0.310909 -1.719190 -0.345134 -0.785496 -0.275569 -0.744296 2.680118 -0.583258 -2.068566 0.385524 0.610146 -0.226541 0.263481 -0.988875 -0.130638 -1.266094 -0.768533 1.100780 -0.328912 -1.555024 0.698124 1.361879 -1.159160 -1.450383 -1.304731 1.000335 0.125589 -0.260304 -1.212525 -0.265477 -1.474263 -2.366324 1.195417 1.966075 2.955089 -1.133640 -2.032843 -0.902634 -1.327697 0.323356 0.096060 -0.875772 -1.672760 -1.548104 -0.426525 1.189467 0.750603 -1.340946 -0.876102 0.982860 0.016264 -0.934128 0.660062 0.131692 1.855048 -0.835704 -1.685751 -0.632046 1.599021 -0.245918 1.132966 -0.997240 -0.242387 0.082218 0.836056 -2.938220 1.116575 0.750101 -1.146451 -0.040269; 15 | -0.742745 0.100402 -0.983137 -0.850378 0.989850 0.092861 -0.421487 0.667174 -1.476422 0.029963 0.813277 -0.119641 -1.236394 -0.515380 0.168926 0.184150 -1.298539 1.611950 1.362962 -1.223518 -0.256088 -0.659430 0.130287 1.490409 0.357348 0.100046 -0.440424 0.810685 1.150638 0.688182 0.177537 0.090665 -1.015168 -0.242882 2.150379 -0.200403 -0.384886 -1.039926 1.674866 -0.784288 0.478515 1.329307 0.647336 1.036852 0.015360 1.299368 1.038744 -0.393357 1.739255 0.853390 0.623186 -0.105752 1.466680 0.475028 0.443241 -0.010695 1.233429 1.302974 0.593773 1.787029 0.268053 -0.869979 -1.700570 -2.301185 1.874092 0.462473 0.564390 1.279284 0.786037 0.615560 -0.395321 0.665887 -0.027883 0.612520 -0.508498 -0.363852 0.547452 0.181441 -1.256036 1.333580 -1.287073 0.035241 -1.229566 1.644603 2.084384 -1.018327 0.358190 0.783785 0.990909 1.646456 -0.492347 1.105070 -0.445072 0.732257 -1.958656 -1.104897 -1.372281 -0.728689 1.365443 -0.281505; 16 | 0.572912 -1.112905 -0.437222 -0.424095 -0.577293 1.390198 1.218891 -0.429976 -0.809376 -0.711098 0.137012 0.299881 0.576187 1.181516 0.796552 -0.056014 -0.410115 2.318210 1.148564 -1.084040 -0.807258 0.385522 0.893410 2.148636 -0.513751 0.721376 -1.816264 -0.617132 0.042902 1.722587 -0.976696 1.660189 0.805136 -0.020406 -1.161508 -0.276941 -0.057147 -1.398089 -0.582561 0.384008 0.620631 -1.655171 0.814268 -0.057835 -0.673946 0.531241 0.639814 -0.222340 -1.587151 -0.779175 0.625826 -2.359716 0.255200 -0.586808 -0.400561 -0.832345 1.059862 -1.358675 0.597249 0.034741 1.765786 -0.699984 -0.585549 -0.994563 0.109977 -0.046205 1.156908 -0.346535 1.117178 -1.723877 -1.651466 0.937835 0.438835 0.193465 0.241914 -0.228302 -1.737480 0.030161 -0.269798 -0.926771 -1.553320 1.576796 -0.570628 -0.236055 -0.227784 -0.945054 -0.671933 0.240538 0.066988 1.561130 -0.336589 -1.148196 0.748184 -0.962436 -0.779537 -1.547502 2.074836 -0.648186 -0.632439 -1.227824; 17 | -0.143680 0.957172 1.640728 -1.029596 -1.278514 0.153271 -1.751493 -1.146584 -0.196892 0.456901 0.403788 0.643104 0.933544 0.200532 -0.464512 0.674326 -1.847284 -0.814667 -0.378804 -0.440653 -0.077400 -0.187585 0.348338 1.129853 -0.086701 0.091444 0.528719 -1.191801 0.664119 -1.273178 -0.765922 -0.653223 -1.287555 -1.440631 -0.001266 -0.975253 -1.728765 0.844010 -0.785796 0.839488 -1.770459 -0.060916 -0.709093 -0.823255 -0.525083 -0.719614 -0.661728 -0.259005 1.013978 0.844147 -1.408990 -0.777909 -0.146252 0.305144 -0.621204 -1.181208 -1.755407 0.359975 -0.294485 0.325478 -1.768180 0.859512 2.205565 0.106741 -1.372973 -1.298474 1.253641 0.081997 -0.540079 1.241964 -1.098188 1.563100 0.800308 -1.093574 -0.408954 -0.824891 -0.296406 0.381845 -0.732456 1.154125 -0.059866 -1.532254 -0.682228 -0.706986 -0.265914 -0.865398 -1.021033 -1.470454 1.993145 -0.758432 0.059077 -0.473563 -2.271209 0.876035 1.224104 -1.434385 1.485758 0.138927 1.959653 1.159358; 18 | -0.156844 -0.577886 0.530367 -0.349036 0.281387 0.801381 2.945547 -1.873306 -1.127700 -1.077242 1.288723 -1.775968 1.131390 -0.364330 -0.814449 2.557545 -0.899062 0.853069 -2.380476 -1.522343 -1.595641 -1.598445 0.760433 -1.434479 -0.668682 -0.002400 -0.351920 -2.082037 1.577989 -1.126164 -2.372749 2.050284 2.145294 0.780599 -1.142249 0.397122 1.280767 0.248825 -0.594914 0.471760 -0.351909 -0.470341 -0.669992 1.956099 0.711471 -0.107439 -0.170603 -1.262057 1.207766 -0.643000 -1.170419 -0.059081 -0.108548 1.017417 1.200154 -0.766562 -0.554640 1.314540 -0.935959 0.565107 1.627511 -1.296415 -0.827220 1.375143 -1.354662 0.080468 0.309434 -1.756170 -0.920435 0.568060 -1.103339 -0.756307 -0.974407 0.977825 -1.021484 0.610675 -0.369597 1.463439 0.279208 2.135796 0.582646 -0.254342 0.254562 -0.588331 -0.021790 -0.808710 2.498869 0.334877 -0.633251 -0.903983 -0.991926 0.333565 -1.080591 -0.633172 0.576336 0.128015 -0.043598 -0.153634 0.431631 -1.274743]; 19 | 20 | X_test = [-0.186647 -0.636795 -0.242266 -1.000994 -1.094656 -0.088627 0.357200 1.236353 -0.657828 -1.081924; 21 | -2.001022 0.007438 0.545292 1.134810 1.258890 -0.459909 1.365078 -1.397270 -0.864847 0.965412; 22 | -1.028858 -0.002620 1.980639 0.758663 0.336024 -0.261001 -0.718739 -1.722115 -2.186815 0.701975; 23 | 0.545433 0.056516 0.111102 0.291716 1.496372 0.085050 0.415523 -1.234309 -0.785680 -1.487673; 24 | 0.224813 -1.022040 3.533658 2.245274 -0.665822 -0.009841 0.179097 0.494105 -0.905888 -0.197859]; 25 | 26 | Y_test = [-0.294687 -0.689915 -2.285898 -0.938975 0.035156 -0.430063 0.352267 -0.385081 -0.752931 -0.014699; 27 | -0.222314 0.699612 -0.264101 -0.767007 -0.183959 1.502923 -0.280606 1.081048 0.391797 -0.011185; 28 | 1.801455 0.772426 -0.945537 0.322200 0.057886 0.579556 0.961475 -1.396751 0.404377 -0.063508; 29 | -0.133797 0.497607 0.310190 0.612367 -0.926959 -0.457190 1.309079 -1.575387 -1.116767 -1.027934; 30 | -2.243783 0.391663 0.852659 0.069602 2.284313 -0.057675 -1.306810 -0.515741 -1.484789 0.988259]; 31 | 32 | d = [3.105698 9.303920 6.834464 8.535880 6.895867 2.084421 5.802307 6.078630 7.676743 7.889291 1.747867 5.421094 8.056460 5.403059 6.134915 9.260686 11.292909 6.465282 12.659127 6.716904 8.247420 7.677115 2.345364 10.289954 7.556104 9.927747 2.885653 8.667243 10.105910 8.164997 4.403754 10.905269 6.736946 7.881454 9.098149 5.616785 13.511874 8.607833 10.158668 7.828967 6.669338 10.942197 7.102851 12.512170 1.693926 5.316018 6.161766 7.008868 8.568092 13.728702 4.080557 10.282838 6.515821 11.142170 8.083361 4.659479 7.252958 11.903167 9.148000 7.844158 7.144369 12.485157 16.621630 13.365911 10.855162 4.169473 3.658437 6.554199 5.956399 6.189959 15.132870 8.958080 11.450199 6.767207 6.598192 8.818651 8.531837 5.173845 8.337579 10.310235 6.315191 1.352438 12.100806 2.871881 5.391262 5.899694 12.221590 4.330038 5.430671 8.585915 9.817138 8.901824 9.322942 3.233721 4.747448 5.238966 4.640416 5.379597 11.164867 10.616969]; 33 | 34 | d_test = [7.600672 4.423181 9.997974 8.315172 12.786013 7.426758 11.055029 8.688143 6.585704 4.253190]; 35 | -------------------------------------------------------------------------------- /w6-approximation_and_fitting/cens_data2.m: -------------------------------------------------------------------------------- 1 | % data for censored fitting problem. 2 | n = 20; % dimension of x's 3 | M = 25; % number of non-censored data points 4 | K = 100; % total number of points 5 | c_true=[-0.43;-1.7;0.13;0.29;-1.1;1.2;1.2;-0.038;0.33;0.17;-0.19;0.73;-0.59;2.2;-0.14;0.11;1.1;0.059;-0.096;-0.83]; 6 | 7 | X = [0.29 -1.6 4.3e-05 0.62 -1.2 0.13 -0.33 0.47 -0.46 0.64 -1 0.49 -0.54 1.1 -0.9 0.84 -0.65 -0.98 1.1 -0.072 1.4 -0.78 -0.89 -2.4 -1.4 0.083 -0.57 -0.072 -0.026 1.1 2.1 -0.51 0.72 -0.089 -0.37 0.53 -0.027 -0.51 -0.61 0.76 -1.4 0.76 0.2 -0.42 0.44 -1.6 -0.85 -1 0.49 0.72 -0.15 1.3 0.66 -0.26 1.7 -0.25 0.26 0.56 -0.8 -0.073 -0.3 0.18 -0.67 -1.4 -0.76 0.74 -0.33 -0.5 0.045 0.53 0.81 -0.8 0.25 0.31 -2.1 0.56 0.54 0.93 1.4 0.81 -0.4 -1.3 0.14 -0.023 -1.3 -1.8 -1.7 -1.2 0.8 -0.25 -0.3 -0.53 0.87 0.72 0.87 0.45 0.91 0.37 -2.3 -0.6; 8 | -1.3 0.26 -0.32 0.8 -2.2 0.66 -0.84 -0.9 0.37 -0.6 -0.18 -0.005 -1.3 -0.25 0.14 -0.72 -1.1 -0.69 2.4 0.28 1.3 -0.77 0.14 -0.22 0.61 0.77 -1.5 -2.4 -1.1 1.6 0.08 0.25 0.039 -1 -0.83 1 0.17 0.23 0.69 -0.69 -0.69 -1.7 0.26 -0.61 0.57 0.43 0.35 -0.66 -0.87 1.6 0.32 0.67 -1.6 -1.4 0.12 -1.3 -0.19 0.48 0.31 -0.99 -0.17 0.23 -0.9 -0.3 -0.6 0.81 -2 0.21 2.4 2.1 -1 -0.0083 -1.4 1.4 -0.66 -2 0.68 1.2 0.42 1.9 0.91 1.2 -0.016 0.11 -0.3 -1.5 0.48 0.3 0.88 -0.15 1.2 0.54 -0.8 -0.28 2.4 0.89 1.5 -0.55 1.4 -0.99; 9 | 0.71 -1.1 1.1 0.94 0.99 -1.2 0.5 0.036 0.73 0.55 1.5 -0.28 1.1 -1.5 -0.14 -0.72 -0.048 1.3 0.23 1.4 -0.91 -0.11 -0.24 0.058 -1.3 2.2 -0.05 -0.69 0.75 1.4 -0.94 0.37 1.5 0.94 0.29 -1.1 0.88 -0.6 0.02 0.68 0.33 1.5 2.1 0.72 -0.69 -0.74 0.11 0.56 0.08 -2.1 1.3 -0.28 -3 -1.3 0.65 1.2 -0.079 -0.68 1.1 -0.75 0.18 -1.2 -0.15 -0.57 0.81 -0.14 1.6 0.75 -0.31 0.35 1.3 0.63 0.97 0.33 1.5 -0.76 -0.59 -0.25 0.069 0.4 -0.14 1.5 -0.94 0.81 -2.6 0.82 -0.45 -0.73 -0.22 -1.2 1 0.68 -0.75 -1.4 0.11 1.1 -1.1 0.58 1.4 1.2; 10 | 1.6 1.4 -1.9 -0.99 -0.52 -0.46 1.5 -0.63 2.1 -1.1 -0.038 1.3 -0.71 0.0097 -1.2 -0.2 0.38 -0.91 -0.27 0.18 -2.3 -0.98 -0.075 -0.42 -0.66 0.33 0.55 -1.4 0.5 -0.76 0.64 0.18 -1.7 -1.1 -1.8 0.36 0.18 0.021 1.1 -1.1 -1 -1.6 -2.3 0.34 0.83 0.56 -1.1 -1.2 -0.52 -0.74 -2.2 -0.023 0.54 -0.89 2.1 1.5 0.7 0.28 -1.7 -0.031 0.42 0.7 0.95 -0.12 0.07 -0.1 0.23 -0.95 0.19 0.23 0.015 0.15 1.6 0.48 -0.33 -2.4 -0.26 -0.71 0.29 -0.86 1.3 1.8 -1.5 -1 0.78 -0.23 0.39 -1.1 0.3 -0.022 2.1 0.54 -0.75 0.46 0.026 -0.1 1.8 -2 -0.45 2.4; 11 | -0.69 -0.81 0.43 0.21 0.33 -0.26 -0.55 0.54 -1.4 0.086 1.2 1.9 -0.011 0.071 1.2 -0.02 -0.33 -0.41 0.7 -0.54 1.8 -0.96 -0.36 -0.2 -0.15 0.86 0.083 0.33 -0.52 0.44 1.7 -0.037 -1 -0.71 -1.6 -0.037 0.76 0.42 -1.3 0.9 0.29 1.1 0.34 0.88 -2.2 -1.4 -0.68 -0.78 -1.4 0.18 1.3 -0.91 -1 0.59 -0.34 0.24 -0.8 -1.3 -1.1 0.99 1.7 0.43 1.6 -0.39 -1.8 -0.8 0.65 0.61 0.95 1.3 0.22 2.6 -1.4 0.4 2.7 -0.66 1.5 -0.59 0.47 2.4 0.32 0.63 0.36 -1 0.6 -1.6 0.053 -1.4 -0.52 0.62 -0.66 -0.51 -0.31 1.1 0.97 1.5 0.15 0.52 -0.63 2.3; 12 | 0.86 0.53 0.9 0.24 0.23 -1.2 -0.85 0.55 -1 -2 -0.7 -0.52 -0.00082 0.32 -0.015 0.28 -0.5 -0.51 -0.49 1.6 0.39 -2.4 -2.1 -1.5 0.25 0.68 1.6 0.6 -0.56 0.91 0.59 -1.6 -0.76 -1.2 2 -1.2 0.51 1.2 0.48 -2.1 1.1 -1.1 0.29 0.28 1.1 0.46 -0.28 0.27 -0.38 0.53 -0.38 -1 0.91 1.8 0.73 -1.4 -0.8 -0.72 -0.19 -0.6 2 1.5 0.43 -0.84 1.8 0.49 -1.1 1.8 -0.53 -0.013 1.7 -1.3 -1.5 -0.073 -1.6 -0.11 1 -0.26 1.8 -0.84 -0.48 0.083 0.48 0.28 0.94 -0.32 -0.49 -0.59 -0.16 1.9 -1.1 -1.3 -1.5 -1 -0.0053 0.17 -0.28 1.6 1 2.3; 13 | 1.3 0.22 0.73 -1 0.021 -1.3 -0.25 -0.2 1 -0.49 0.0075 0.1 -0.25 0.5 0.54 1.1 -0.036 1.6 1.9 0.83 0.02 -0.84 -0.14 -1.1 -0.077 0.55 -0.33 0.15 -0.75 -1.1 0.79 0.34 2.2 1.1 -0.072 -0.28 0.13 0.77 -1.6 0.28 0.24 0.39 0.66 -0.15 -0.0016 0.63 0.65 1.5 -0.46 -0.55 0.0025 0.37 1.6 1.3 0.29 0.66 -0.0076 -0.66 0.46 1.5 0.7 -0.51 -0.56 -1.7 0.65 1.2 0.2 0.089 -1.1 -1.3 -2.1 1 -0.075 1.3 -0.54 0.3 0.3 1.2 0.26 0.28 0.076 2.1 0.32 0.29 -1 -0.8 0.24 0.52 -0.098 0.082 0.95 -0.61 0.83 -2.8 1.4 0.8 1.7 0.73 -0.49 -0.27; 14 | -1.6 -0.92 0.58 -0.74 -1 0.93 0.66 -2.1 -0.39 0.46 -0.78 -0.81 0.4 1.3 -0.72 0.62 -0.17 0.081 1.1 0.23 -0.41 0.26 1.4 -0.82 1.7 1 0.8 -0.1 0.93 0.2 0.11 -0.13 0.43 -0.68 2.6 -0.16 0.28 -2.6 -1.4 -0.73 0.16 0.97 -0.58 -0.09 -1.6 0.38 -1.2 -1.1 -0.29 0.3 0.88 0.9 -0.98 -0.49 -0.85 -2.6 -0.73 0.2 -0.92 -0.81 -1.4 -0.0067 0.18 -0.45 -1.5 1.3 1.7 2.6 -1.6 -0.56 0.11 0.78 0.081 0.98 0.55 -0.58 -0.82 -1.5 1.5 0.82 -0.11 1.3 -1.9 -0.25 -0.068 0.69 0.72 -1.5 1 1.6 -0.4 -0.57 -0.61 1 1.8 -1.7 0.67 0.56 -0.4 0.5; 15 | -1.4 -2.2 0.04 1.1 -0.95 0.011 -0.85 0.13 -1.4 -0.32 0.59 0.68 -0.26 -0.55 -0.66 -1.8 -0.96 -1.1 -1.2 0.67 -1.5 -0.18 0.65 0.37 1.6 1.3 -0.78 -2.6 -0.25 0.76 -0.16 0.49 -0.44 -1.7 -0.24 -1.1 -0.98 0.29 0.29 -0.77 0.41 0.82 0.89 0.29 -1.2 -1 -0.6 0.63 -0.3 -1.2 0.58 1.3 1 -2.2 -2.5 -0.53 -1.5 -1.8 -0.65 0.65 0.36 -0.53 -0.77 -1.5 -0.38 -0.28 0.73 -0.68 1.2 0.76 -1.1 -0.83 -0.84 1.7 1.5 -3.1 -0.49 -0.39 0.32 1.2 1.4 -1.8 0.68 -0.22 0.082 -0.042 0.15 -0.087 0.43 -0.38 -0.38 0.086 0.96 0.077 0.89 0.17 -0.35 -0.81 1.4 -0.12; 16 | 0.57 -0.059 0.68 -0.13 -0.37 -0.65 -1.2 1.6 0.32 1.2 -0.25 -2.4 -1.7 0.26 0.31 0.7 1.3 -1.1 -0.67 -0.51 0.22 -0.17 -0.38 -0.59 0.63 0.044 -1.3 0.028 -0.15 -1.3 0.87 0.6 0.03 0.81 0.17 -2 -0.94 0.83 -0.14 0.15 1.2 0.037 0.17 1.2 0.21 -0.35 -0.48 -0.8 -1.6 -0.19 -1.6 -0.13 0.16 0.24 -2.4 3.2 0.87 -1.4 0.62 -1.3 -0.57 0.72 -0.94 -0.096 0.21 0.22 0.79 2.8 0.49 -0.91 -1.6 -0.59 -0.56 -0.41 -0.46 1.6 0.87 0.28 0.8 -0.064 0.71 -0.014 0.23 0.9 -1.8 0.32 0.13 -0.013 -0.026 -1.3 0.48 0.69 2 -1.9 1.1 -0.48 -0.26 -0.54 0.28 -0.0019; 17 | -0.4 -1 0.57 0.39 -1.2 0.81 -0.12 1 1.6 -0.63 0.48 0.99 -1 -0.013 0.11 0.81 0.44 1.7 1.3 0.86 -1.4 -0.12 -0.66 1.5 0.092 -0.31 0.67 -0.88 -1.3 -0.95 -0.19 -0.086 -0.32 1.4 0.92 -0.91 -0.013 -0.0081 -1.1 -0.34 1.4 -0.93 0.85 0.81 0.22 0.44 0.98 -0.31 1.1 -0.3 -1.5 0.61 -0.59 -0.74 -0.35 0.44 -0.27 0.2 -1.3 -0.87 -1 1.1 -1.4 0.91 -0.77 0.63 0.6 -0.017 1.6 1.4 0.64 0.066 -0.028 0.57 -0.5 -0.41 0.36 0.83 0.58 0.65 0.37 2.5 1.2 -0.64 -1.8 0.51 -1 -0.35 -0.38 -0.72 0.44 2.1 0.29 -0.61 0.4 -0.73 -0.64 1.3 1.9 -0.43; 18 | 0.69 0.61 -0.26 0.088 -1.1 0.23 -0.065 -1.6 0.71 -2.3 0.67 0.22 0.24 -0.58 1.8 0.64 1.3 1.9 0.39 0.27 -0.84 0.17 0.25 0.14 -0.81 0.23 -1.4 -0.27 0.31 0.78 0.075 0.33 0.98 0.67 -0.18 -0.0056 0.35 0.86 -0.29 0.97 -1 -0.11 0.96 -1.4 -1 -1.6 1.8 -0.6 1.3 0.96 0.57 2 1.6 -1.8 -0.61 -1.1 -1.6 1.5 1 -0.47 0.7 0.5 -1.9 2.4 -0.11 -0.55 -0.058 0.27 -0.45 0.25 -0.4 -0.012 -1.2 0.74 1.2 1.4 -0.08 -0.98 1.8 -1.8 -0.6 -0.67 0.13 -0.18 -0.66 -1 1.3 0.99 -0.24 -0.56 0.38 -0.0029 -0.39 1.2 0.19 -2.1 -0.91 -0.48 -0.8 -0.19; 19 | 0.82 0.51 -0.38 -0.64 1.5 -0.99 0.49 -0.079 2 -1.2 -0.078 0.26 -1.3 2.1 -0.28 1.3 -0.5 1.6 0.39 0.62 -0.21 -0.5 -0.38 -1.9 -0.46 1 -1.3 -0.33 2.7 -0.0063 -0.53 -0.34 0.018 0.14 -0.52 -1.7 -0.89 0.77 -0.58 -0.11 0.21 -0.8 1.3 0.12 -0.45 -0.7 1.4 1.3 -0.13 -0.53 -0.91 2.3 -0.52 0.45 -0.41 0.89 -0.39 0.37 0.86 0.22 0.48 2.8 -0.065 0.52 0.34 0.23 -1.1 -0.91 1 0.12 -0.4 -0.077 0.73 0.22 0.041 -1.4 0.75 -0.1 -0.94 0.06 -0.85 0.26 0.18 0.72 1.4 0.099 1.2 0.64 -1.2 0.62 1.1 -0.09 0.58 -0.58 -0.69 -1.8 0.72 -1.8 -0.56 0.99; 20 | 0.71 1.7 -0.3 -0.56 0.056 1.3 -0.6 -0.68 0.5 1.1 0.89 1.2 -0.35 -0.26 2.2 0.33 -1.1 -1.3 -1.7 -1 0.76 -0.71 -0.53 -0.45 -1.4 1.2 -0.61 -1.2 0.29 0.52 -0.69 -0.32 0.82 -0.86 1.4 1.3 0.81 1.3 -0.9 1 0.59 -1.7 -0.064 -0.22 1.4 -1.1 0.91 0.86 -0.74 -0.9 -1.6 -0.37 1.2 0.58 -1.4 -0.28 -0.14 -0.44 -0.64 1.9 -0.19 -0.16 0.67 0.41 1 0.36 2.1 -2 2 0.38 0.084 -1.6 0.06 1.3 0.75 0.9 -1.8 0.13 -0.92 -0.76 0.66 -0.37 -0.61 0.3 2.1 -0.12 -2.6 2.9 0.76 -1.3 0.86 -0.25 -1.1 -0.26 0.011 -0.052 -0.0073 -0.93 1.7 0.47; 21 | 1.3 0.59 -1.5 0.44 -1.2 0.29 -0.15 -1 1.9 -0.11 2.3 -0.27 -0.94 -1.4 1.5 -0.67 0.81 -0.21 0.23 1.5 0.38 0.51 0.055 -0.65 -0.37 -0.54 -1.5 0.58 -1.4 1.4 -0.27 -0.38 0.7 -0.75 -0.87 -0.6 0.11 1.2 0.25 -0.48 -0.26 -0.9 1.3 0.57 -0.46 1 0.33 -2.1 0.21 -0.89 -0.36 2.2 1.6 0.86 0.23 1 -2.3 -0.049 0.66 0.11 -0.38 0.43 0.21 1.1 -1.4 0.52 -1.4 -0.32 0.6 -0.07 -0.44 1.7 0.15 0.63 1.2 0.54 1.2 0.063 0.38 -1.7 1.5 1.3 -1 1.5 0.17 0.69 -2.1 -1.2 1.2 -0.12 -1.2 -0.87 0.061 0.0047 -1.1 -0.086 -2.8 0.92 -0.64 -1.4; 22 | 0.67 -0.64 -0.23 -0.95 -0.041 1.5 -0.43 -1.2 -0.34 0.38 0.52 -0.13 -1.2 1.8 -1.9 -0.15 0.041 -0.2 0.69 0.43 -1.3 -0.42 1.3 0.1 -0.47 0.91 0.56 0.24 0.25 0.48 -1.2 -0.95 -0.23 1.2 0.81 -2.1 2.7 0.96 -1.5 0.069 2.5 0.59 0.23 -0.3 0.033 1.7 0.07 -0.36 -0.4 0.28 -0.4 -0.16 -2.1 -0.27 0.21 -0.36 -1.4 0.078 1.3 -0.41 -0.89 -2 -0.0081 0.43 -1 -0.62 0.46 0.59 0.018 -0.58 -0.56 -0.47 1.6 -1.1 0.3 -0.47 -0.061 0.37 0.91 1.1 -0.81 -0.65 -0.3 -0.044 -1.7 1.9 0.39 0.16 -1 -1.1 0.4 0.42 0.012 -0.039 1.9 -1.2 -0.15 0.041 -1.4 0.27; 23 | 1.2 0.38 0.12 0.78 -1.1 1.1 -0.079 0.29 -1.1 0.94 -0.012 -1.3 -1 0.33 -1.7 -2.4 -0.76 0.31 -0.64 -1.9 1.5 0.23 -2.5 -0.22 1.8 -0.17 -0.28 -0.35 -1.4 -0.79 0.25 0.23 -0.11 1.2 -0.51 0.11 0.41 -1.7 0.31 0.4 0.86 0.55 -1.4 1.1 0.8 0.71 -1.5 0.55 0.065 -0.75 -1.2 -0.7 2.9 -0.42 -0.72 1.3 -1.8 2 0.31 0.51 -1.8 -0.55 0.02 1.3 -0.64 1.3 0.39 0.83 -1.6 0.47 0.88 0.095 -0.78 -0.45 1.1 2.4 -0.39 -0.1 0.15 1.5 -1.3 0.062 1.5 -0.03 0.28 0.33 -0.86 0.79 1.7 -2.8 -0.58 -0.13 -0.17 -0.51 -0.17 -0.75 0.58 0.41 -0.76 2.6; 24 | -1.2 -1 0.31 0.57 -1.3 -0.68 1.5 -0.43 -0.21 -2.1 0.91 -1.7 -0.4 -1.1 -0.57 0.47 -0.089 -0.57 -1 0.47 0.033 -0.96 0.58 -0.28 0.75 -0.34 -1.3 0.89 0.15 0.75 0.1 1.2 0.13 -0.61 0.74 1.5 -1.3 -0.99 -2 1.1 -0.85 -0.42 -0.15 -0.18 0.9 -0.75 -0.42 -1.6 -1.8 1.6 -1.1 0.56 1.4 -0.21 0.76 1 1.1 -0.073 0.86 -1.2 -1.6 -1.9 -0.56 -0.19 0.17 0.97 2.1 -1.7 1.2 1.3 -0.81 0.29 1.6 -0.73 0.77 2 0.61 -0.7 -0.2 0.24 1.7 -0.74 1.8 -0.38 0.39 -0.095 -1.2 -0.58 -0.49 0.25 -0.98 0.54 -0.69 -1.2 -0.73 0.95 0.75 0.6 0.16 -0.054; 25 | -0.02 -0.02 1.4 -0.82 -0.26 -1.3 -0.61 0.056 1.2 -0.64 0.056 -0.7 0.17 0.62 -0.19 0.12 -2 -0.98 -0.19 1.3 1.9 -0.15 -1 -0.73 0.065 0.54 -0.89 1.6 -1.7 -0.17 -0.041 -0.58 -0.8 0.81 0.85 0.053 0.38 0.69 0.53 0.62 0.81 0.062 -0.5 -1.5 0.14 0.23 -0.021 -0.21 1.7 0.57 0.29 -0.05 1.1 -0.17 -1.6 0.21 -0.14 0.94 0.13 -0.096 -1.2 -0.11 1.9 0.13 1.3 -2.4 -0.32 -1.9 0.68 1.6 -0.26 0.92 1.1 0.35 -1.3 0.8 0.64 -0.39 1.5 -1.1 0.1 -0.18 -1.3 -0.55 -0.099 0.031 2.6 0.53 0.17 -0.86 0.12 0.88 -0.99 0.71 -0.99 -0.18 -0.17 1.9 0.38 0.47; 26 | -0.16 -0.048 -0.35 -0.27 0.95 -0.073 -1.3 -0.37 -1.1 -0.7 -1.1 0.28 -0.12 1.3 0.0089 -0.59 1.1 -0.45 -1.1 0.64 -1.2 0.74 0.94 -0.065 -0.29 0.93 -0.99 -1.1 0.72 -0.82 -2.2 -0.5 -0.24 0.22 -0.83 0.16 0.5 -0.97 0.34 -0.29 0.7 0.46 -1.7 1.4 -1.6 -0.22 0.23 -0.43 0.33 0.32 -1.9 1.2 -0.77 0.22 -1.1 -0.3 1.1 -0.08 0.017 0.45 -0.42 -1.3 -0.22 -0.66 1.9 -1.1 1.5 -0.44 -0.78 -0.7 0.49 0.51 -0.17 -0.51 -0.51 0.031 1 0.016 -0.62 2.4 -0.8 1.1 0.39 0.93 0.18 -0.61 -0.9 1.7 0.35 1.1 0.068 -1.3 -0.05 0.73 0.89 -0.066 -0.58 0.71 0.18 -2.1;]; 27 | 28 | y = X'*c_true + 0.1*(sqrt(n))... 29 | *[-0.8;-0.46;0.19;0.89;-1.6;-0.32;-0.7;-0.74;0.37;1.4;0.46;0.66;1.1;0.98;-1.3;-0.023;0.13;2.4;0.9;0.076;0.36;-2.1;-2.3;-0.37;1.3;0.56;-0.18;-0.036;1.9;1.3;-0.36;-0.33;0.083;0.43;-1.2;-2.7;-0.54;2.2;-0.61;1.4;1.2;0.74;-0.12;-0.031;-1;-1;0.63;0.87;2.1;-0.59;0.59;1.5;2.1;-0.75;-1.5;-0.21;0.49;-0.32;0.82;-1.4;1.1;0.98;0.49;1.3;0.59;0.45;-1.4;-0.92;-1.4;-1.5;0.097;-0.23;-0.32;0.63;0.039;-0.016;0.45;-0.52;-1.1;-0.048;-0.5;1.6;-0.24;-0.96;-0.15;1.7;1.6;-1.1;0.097;0.25;2.2;0.31;2;-0.12;-0.27;1.4;-0.074;-0.36;-0.78;-0.25]; 30 | 31 | % Reorder measurements, then censor 32 | [y, sort_ind] = sort(y); 33 | X = X(:,sort_ind); 34 | D = (y(M)+y(M+1))/2; 35 | y = y(1:M); 36 | sd = sqrt(var(y)); 37 | 38 | 39 | X= X'; 40 | 41 | cvx_begin 42 | variable c(n) 43 | variable cen(K-M) 44 | minimize( norm(y - X(1:M,:)*c ,2) + norm( cen - X(M+1:K,:)*c ,2) ) 45 | subject to 46 | cen >= D 47 | cvx_end 48 | 49 | res = norm(c_true - c,2 )/norm(c_true,2); 50 | 51 | cvx_begin 52 | variable c(n) 53 | minimize( norm(y - X(1:M,:)*c ,2)) 54 | cvx_end 55 | 56 | res_ls = norm(c_true - c,2 )/norm(c_true,2); 57 | -------------------------------------------------------------------------------- /w6-approximation_and_fitting/censored_data.m: -------------------------------------------------------------------------------- 1 | % data for censored fitting problem. 2 | n = 20; % dimension of x's 3 | M = 25; % number of non-censored data points 4 | K = 100; % total number of points 5 | c_true=[-0.43;-1.7;0.13;0.29;-1.1;1.2;1.2;-0.038;0.33;0.17;-0.19;0.73;-0.59;2.2;-0.14;0.11;1.1;0.059;-0.096;-0.83]; 6 | 7 | X = [0.29 -1.6 4.3e-05 0.62 -1.2 0.13 -0.33 0.47 -0.46 0.64 -1 0.49 -0.54 1.1 -0.9 0.84 -0.65 -0.98 1.1 -0.072 1.4 -0.78 -0.89 -2.4 -1.4 0.083 -0.57 -0.072 -0.026 1.1 2.1 -0.51 0.72 -0.089 -0.37 0.53 -0.027 -0.51 -0.61 0.76 -1.4 0.76 0.2 -0.42 0.44 -1.6 -0.85 -1 0.49 0.72 -0.15 1.3 0.66 -0.26 1.7 -0.25 0.26 0.56 -0.8 -0.073 -0.3 0.18 -0.67 -1.4 -0.76 0.74 -0.33 -0.5 0.045 0.53 0.81 -0.8 0.25 0.31 -2.1 0.56 0.54 0.93 1.4 0.81 -0.4 -1.3 0.14 -0.023 -1.3 -1.8 -1.7 -1.2 0.8 -0.25 -0.3 -0.53 0.87 0.72 0.87 0.45 0.91 0.37 -2.3 -0.6; 8 | -1.3 0.26 -0.32 0.8 -2.2 0.66 -0.84 -0.9 0.37 -0.6 -0.18 -0.005 -1.3 -0.25 0.14 -0.72 -1.1 -0.69 2.4 0.28 1.3 -0.77 0.14 -0.22 0.61 0.77 -1.5 -2.4 -1.1 1.6 0.08 0.25 0.039 -1 -0.83 1 0.17 0.23 0.69 -0.69 -0.69 -1.7 0.26 -0.61 0.57 0.43 0.35 -0.66 -0.87 1.6 0.32 0.67 -1.6 -1.4 0.12 -1.3 -0.19 0.48 0.31 -0.99 -0.17 0.23 -0.9 -0.3 -0.6 0.81 -2 0.21 2.4 2.1 -1 -0.0083 -1.4 1.4 -0.66 -2 0.68 1.2 0.42 1.9 0.91 1.2 -0.016 0.11 -0.3 -1.5 0.48 0.3 0.88 -0.15 1.2 0.54 -0.8 -0.28 2.4 0.89 1.5 -0.55 1.4 -0.99; 9 | 0.71 -1.1 1.1 0.94 0.99 -1.2 0.5 0.036 0.73 0.55 1.5 -0.28 1.1 -1.5 -0.14 -0.72 -0.048 1.3 0.23 1.4 -0.91 -0.11 -0.24 0.058 -1.3 2.2 -0.05 -0.69 0.75 1.4 -0.94 0.37 1.5 0.94 0.29 -1.1 0.88 -0.6 0.02 0.68 0.33 1.5 2.1 0.72 -0.69 -0.74 0.11 0.56 0.08 -2.1 1.3 -0.28 -3 -1.3 0.65 1.2 -0.079 -0.68 1.1 -0.75 0.18 -1.2 -0.15 -0.57 0.81 -0.14 1.6 0.75 -0.31 0.35 1.3 0.63 0.97 0.33 1.5 -0.76 -0.59 -0.25 0.069 0.4 -0.14 1.5 -0.94 0.81 -2.6 0.82 -0.45 -0.73 -0.22 -1.2 1 0.68 -0.75 -1.4 0.11 1.1 -1.1 0.58 1.4 1.2; 10 | 1.6 1.4 -1.9 -0.99 -0.52 -0.46 1.5 -0.63 2.1 -1.1 -0.038 1.3 -0.71 0.0097 -1.2 -0.2 0.38 -0.91 -0.27 0.18 -2.3 -0.98 -0.075 -0.42 -0.66 0.33 0.55 -1.4 0.5 -0.76 0.64 0.18 -1.7 -1.1 -1.8 0.36 0.18 0.021 1.1 -1.1 -1 -1.6 -2.3 0.34 0.83 0.56 -1.1 -1.2 -0.52 -0.74 -2.2 -0.023 0.54 -0.89 2.1 1.5 0.7 0.28 -1.7 -0.031 0.42 0.7 0.95 -0.12 0.07 -0.1 0.23 -0.95 0.19 0.23 0.015 0.15 1.6 0.48 -0.33 -2.4 -0.26 -0.71 0.29 -0.86 1.3 1.8 -1.5 -1 0.78 -0.23 0.39 -1.1 0.3 -0.022 2.1 0.54 -0.75 0.46 0.026 -0.1 1.8 -2 -0.45 2.4; 11 | -0.69 -0.81 0.43 0.21 0.33 -0.26 -0.55 0.54 -1.4 0.086 1.2 1.9 -0.011 0.071 1.2 -0.02 -0.33 -0.41 0.7 -0.54 1.8 -0.96 -0.36 -0.2 -0.15 0.86 0.083 0.33 -0.52 0.44 1.7 -0.037 -1 -0.71 -1.6 -0.037 0.76 0.42 -1.3 0.9 0.29 1.1 0.34 0.88 -2.2 -1.4 -0.68 -0.78 -1.4 0.18 1.3 -0.91 -1 0.59 -0.34 0.24 -0.8 -1.3 -1.1 0.99 1.7 0.43 1.6 -0.39 -1.8 -0.8 0.65 0.61 0.95 1.3 0.22 2.6 -1.4 0.4 2.7 -0.66 1.5 -0.59 0.47 2.4 0.32 0.63 0.36 -1 0.6 -1.6 0.053 -1.4 -0.52 0.62 -0.66 -0.51 -0.31 1.1 0.97 1.5 0.15 0.52 -0.63 2.3; 12 | 0.86 0.53 0.9 0.24 0.23 -1.2 -0.85 0.55 -1 -2 -0.7 -0.52 -0.00082 0.32 -0.015 0.28 -0.5 -0.51 -0.49 1.6 0.39 -2.4 -2.1 -1.5 0.25 0.68 1.6 0.6 -0.56 0.91 0.59 -1.6 -0.76 -1.2 2 -1.2 0.51 1.2 0.48 -2.1 1.1 -1.1 0.29 0.28 1.1 0.46 -0.28 0.27 -0.38 0.53 -0.38 -1 0.91 1.8 0.73 -1.4 -0.8 -0.72 -0.19 -0.6 2 1.5 0.43 -0.84 1.8 0.49 -1.1 1.8 -0.53 -0.013 1.7 -1.3 -1.5 -0.073 -1.6 -0.11 1 -0.26 1.8 -0.84 -0.48 0.083 0.48 0.28 0.94 -0.32 -0.49 -0.59 -0.16 1.9 -1.1 -1.3 -1.5 -1 -0.0053 0.17 -0.28 1.6 1 2.3; 13 | 1.3 0.22 0.73 -1 0.021 -1.3 -0.25 -0.2 1 -0.49 0.0075 0.1 -0.25 0.5 0.54 1.1 -0.036 1.6 1.9 0.83 0.02 -0.84 -0.14 -1.1 -0.077 0.55 -0.33 0.15 -0.75 -1.1 0.79 0.34 2.2 1.1 -0.072 -0.28 0.13 0.77 -1.6 0.28 0.24 0.39 0.66 -0.15 -0.0016 0.63 0.65 1.5 -0.46 -0.55 0.0025 0.37 1.6 1.3 0.29 0.66 -0.0076 -0.66 0.46 1.5 0.7 -0.51 -0.56 -1.7 0.65 1.2 0.2 0.089 -1.1 -1.3 -2.1 1 -0.075 1.3 -0.54 0.3 0.3 1.2 0.26 0.28 0.076 2.1 0.32 0.29 -1 -0.8 0.24 0.52 -0.098 0.082 0.95 -0.61 0.83 -2.8 1.4 0.8 1.7 0.73 -0.49 -0.27; 14 | -1.6 -0.92 0.58 -0.74 -1 0.93 0.66 -2.1 -0.39 0.46 -0.78 -0.81 0.4 1.3 -0.72 0.62 -0.17 0.081 1.1 0.23 -0.41 0.26 1.4 -0.82 1.7 1 0.8 -0.1 0.93 0.2 0.11 -0.13 0.43 -0.68 2.6 -0.16 0.28 -2.6 -1.4 -0.73 0.16 0.97 -0.58 -0.09 -1.6 0.38 -1.2 -1.1 -0.29 0.3 0.88 0.9 -0.98 -0.49 -0.85 -2.6 -0.73 0.2 -0.92 -0.81 -1.4 -0.0067 0.18 -0.45 -1.5 1.3 1.7 2.6 -1.6 -0.56 0.11 0.78 0.081 0.98 0.55 -0.58 -0.82 -1.5 1.5 0.82 -0.11 1.3 -1.9 -0.25 -0.068 0.69 0.72 -1.5 1 1.6 -0.4 -0.57 -0.61 1 1.8 -1.7 0.67 0.56 -0.4 0.5; 15 | -1.4 -2.2 0.04 1.1 -0.95 0.011 -0.85 0.13 -1.4 -0.32 0.59 0.68 -0.26 -0.55 -0.66 -1.8 -0.96 -1.1 -1.2 0.67 -1.5 -0.18 0.65 0.37 1.6 1.3 -0.78 -2.6 -0.25 0.76 -0.16 0.49 -0.44 -1.7 -0.24 -1.1 -0.98 0.29 0.29 -0.77 0.41 0.82 0.89 0.29 -1.2 -1 -0.6 0.63 -0.3 -1.2 0.58 1.3 1 -2.2 -2.5 -0.53 -1.5 -1.8 -0.65 0.65 0.36 -0.53 -0.77 -1.5 -0.38 -0.28 0.73 -0.68 1.2 0.76 -1.1 -0.83 -0.84 1.7 1.5 -3.1 -0.49 -0.39 0.32 1.2 1.4 -1.8 0.68 -0.22 0.082 -0.042 0.15 -0.087 0.43 -0.38 -0.38 0.086 0.96 0.077 0.89 0.17 -0.35 -0.81 1.4 -0.12; 16 | 0.57 -0.059 0.68 -0.13 -0.37 -0.65 -1.2 1.6 0.32 1.2 -0.25 -2.4 -1.7 0.26 0.31 0.7 1.3 -1.1 -0.67 -0.51 0.22 -0.17 -0.38 -0.59 0.63 0.044 -1.3 0.028 -0.15 -1.3 0.87 0.6 0.03 0.81 0.17 -2 -0.94 0.83 -0.14 0.15 1.2 0.037 0.17 1.2 0.21 -0.35 -0.48 -0.8 -1.6 -0.19 -1.6 -0.13 0.16 0.24 -2.4 3.2 0.87 -1.4 0.62 -1.3 -0.57 0.72 -0.94 -0.096 0.21 0.22 0.79 2.8 0.49 -0.91 -1.6 -0.59 -0.56 -0.41 -0.46 1.6 0.87 0.28 0.8 -0.064 0.71 -0.014 0.23 0.9 -1.8 0.32 0.13 -0.013 -0.026 -1.3 0.48 0.69 2 -1.9 1.1 -0.48 -0.26 -0.54 0.28 -0.0019; 17 | -0.4 -1 0.57 0.39 -1.2 0.81 -0.12 1 1.6 -0.63 0.48 0.99 -1 -0.013 0.11 0.81 0.44 1.7 1.3 0.86 -1.4 -0.12 -0.66 1.5 0.092 -0.31 0.67 -0.88 -1.3 -0.95 -0.19 -0.086 -0.32 1.4 0.92 -0.91 -0.013 -0.0081 -1.1 -0.34 1.4 -0.93 0.85 0.81 0.22 0.44 0.98 -0.31 1.1 -0.3 -1.5 0.61 -0.59 -0.74 -0.35 0.44 -0.27 0.2 -1.3 -0.87 -1 1.1 -1.4 0.91 -0.77 0.63 0.6 -0.017 1.6 1.4 0.64 0.066 -0.028 0.57 -0.5 -0.41 0.36 0.83 0.58 0.65 0.37 2.5 1.2 -0.64 -1.8 0.51 -1 -0.35 -0.38 -0.72 0.44 2.1 0.29 -0.61 0.4 -0.73 -0.64 1.3 1.9 -0.43; 18 | 0.69 0.61 -0.26 0.088 -1.1 0.23 -0.065 -1.6 0.71 -2.3 0.67 0.22 0.24 -0.58 1.8 0.64 1.3 1.9 0.39 0.27 -0.84 0.17 0.25 0.14 -0.81 0.23 -1.4 -0.27 0.31 0.78 0.075 0.33 0.98 0.67 -0.18 -0.0056 0.35 0.86 -0.29 0.97 -1 -0.11 0.96 -1.4 -1 -1.6 1.8 -0.6 1.3 0.96 0.57 2 1.6 -1.8 -0.61 -1.1 -1.6 1.5 1 -0.47 0.7 0.5 -1.9 2.4 -0.11 -0.55 -0.058 0.27 -0.45 0.25 -0.4 -0.012 -1.2 0.74 1.2 1.4 -0.08 -0.98 1.8 -1.8 -0.6 -0.67 0.13 -0.18 -0.66 -1 1.3 0.99 -0.24 -0.56 0.38 -0.0029 -0.39 1.2 0.19 -2.1 -0.91 -0.48 -0.8 -0.19; 19 | 0.82 0.51 -0.38 -0.64 1.5 -0.99 0.49 -0.079 2 -1.2 -0.078 0.26 -1.3 2.1 -0.28 1.3 -0.5 1.6 0.39 0.62 -0.21 -0.5 -0.38 -1.9 -0.46 1 -1.3 -0.33 2.7 -0.0063 -0.53 -0.34 0.018 0.14 -0.52 -1.7 -0.89 0.77 -0.58 -0.11 0.21 -0.8 1.3 0.12 -0.45 -0.7 1.4 1.3 -0.13 -0.53 -0.91 2.3 -0.52 0.45 -0.41 0.89 -0.39 0.37 0.86 0.22 0.48 2.8 -0.065 0.52 0.34 0.23 -1.1 -0.91 1 0.12 -0.4 -0.077 0.73 0.22 0.041 -1.4 0.75 -0.1 -0.94 0.06 -0.85 0.26 0.18 0.72 1.4 0.099 1.2 0.64 -1.2 0.62 1.1 -0.09 0.58 -0.58 -0.69 -1.8 0.72 -1.8 -0.56 0.99; 20 | 0.71 1.7 -0.3 -0.56 0.056 1.3 -0.6 -0.68 0.5 1.1 0.89 1.2 -0.35 -0.26 2.2 0.33 -1.1 -1.3 -1.7 -1 0.76 -0.71 -0.53 -0.45 -1.4 1.2 -0.61 -1.2 0.29 0.52 -0.69 -0.32 0.82 -0.86 1.4 1.3 0.81 1.3 -0.9 1 0.59 -1.7 -0.064 -0.22 1.4 -1.1 0.91 0.86 -0.74 -0.9 -1.6 -0.37 1.2 0.58 -1.4 -0.28 -0.14 -0.44 -0.64 1.9 -0.19 -0.16 0.67 0.41 1 0.36 2.1 -2 2 0.38 0.084 -1.6 0.06 1.3 0.75 0.9 -1.8 0.13 -0.92 -0.76 0.66 -0.37 -0.61 0.3 2.1 -0.12 -2.6 2.9 0.76 -1.3 0.86 -0.25 -1.1 -0.26 0.011 -0.052 -0.0073 -0.93 1.7 0.47; 21 | 1.3 0.59 -1.5 0.44 -1.2 0.29 -0.15 -1 1.9 -0.11 2.3 -0.27 -0.94 -1.4 1.5 -0.67 0.81 -0.21 0.23 1.5 0.38 0.51 0.055 -0.65 -0.37 -0.54 -1.5 0.58 -1.4 1.4 -0.27 -0.38 0.7 -0.75 -0.87 -0.6 0.11 1.2 0.25 -0.48 -0.26 -0.9 1.3 0.57 -0.46 1 0.33 -2.1 0.21 -0.89 -0.36 2.2 1.6 0.86 0.23 1 -2.3 -0.049 0.66 0.11 -0.38 0.43 0.21 1.1 -1.4 0.52 -1.4 -0.32 0.6 -0.07 -0.44 1.7 0.15 0.63 1.2 0.54 1.2 0.063 0.38 -1.7 1.5 1.3 -1 1.5 0.17 0.69 -2.1 -1.2 1.2 -0.12 -1.2 -0.87 0.061 0.0047 -1.1 -0.086 -2.8 0.92 -0.64 -1.4; 22 | 0.67 -0.64 -0.23 -0.95 -0.041 1.5 -0.43 -1.2 -0.34 0.38 0.52 -0.13 -1.2 1.8 -1.9 -0.15 0.041 -0.2 0.69 0.43 -1.3 -0.42 1.3 0.1 -0.47 0.91 0.56 0.24 0.25 0.48 -1.2 -0.95 -0.23 1.2 0.81 -2.1 2.7 0.96 -1.5 0.069 2.5 0.59 0.23 -0.3 0.033 1.7 0.07 -0.36 -0.4 0.28 -0.4 -0.16 -2.1 -0.27 0.21 -0.36 -1.4 0.078 1.3 -0.41 -0.89 -2 -0.0081 0.43 -1 -0.62 0.46 0.59 0.018 -0.58 -0.56 -0.47 1.6 -1.1 0.3 -0.47 -0.061 0.37 0.91 1.1 -0.81 -0.65 -0.3 -0.044 -1.7 1.9 0.39 0.16 -1 -1.1 0.4 0.42 0.012 -0.039 1.9 -1.2 -0.15 0.041 -1.4 0.27; 23 | 1.2 0.38 0.12 0.78 -1.1 1.1 -0.079 0.29 -1.1 0.94 -0.012 -1.3 -1 0.33 -1.7 -2.4 -0.76 0.31 -0.64 -1.9 1.5 0.23 -2.5 -0.22 1.8 -0.17 -0.28 -0.35 -1.4 -0.79 0.25 0.23 -0.11 1.2 -0.51 0.11 0.41 -1.7 0.31 0.4 0.86 0.55 -1.4 1.1 0.8 0.71 -1.5 0.55 0.065 -0.75 -1.2 -0.7 2.9 -0.42 -0.72 1.3 -1.8 2 0.31 0.51 -1.8 -0.55 0.02 1.3 -0.64 1.3 0.39 0.83 -1.6 0.47 0.88 0.095 -0.78 -0.45 1.1 2.4 -0.39 -0.1 0.15 1.5 -1.3 0.062 1.5 -0.03 0.28 0.33 -0.86 0.79 1.7 -2.8 -0.58 -0.13 -0.17 -0.51 -0.17 -0.75 0.58 0.41 -0.76 2.6; 24 | -1.2 -1 0.31 0.57 -1.3 -0.68 1.5 -0.43 -0.21 -2.1 0.91 -1.7 -0.4 -1.1 -0.57 0.47 -0.089 -0.57 -1 0.47 0.033 -0.96 0.58 -0.28 0.75 -0.34 -1.3 0.89 0.15 0.75 0.1 1.2 0.13 -0.61 0.74 1.5 -1.3 -0.99 -2 1.1 -0.85 -0.42 -0.15 -0.18 0.9 -0.75 -0.42 -1.6 -1.8 1.6 -1.1 0.56 1.4 -0.21 0.76 1 1.1 -0.073 0.86 -1.2 -1.6 -1.9 -0.56 -0.19 0.17 0.97 2.1 -1.7 1.2 1.3 -0.81 0.29 1.6 -0.73 0.77 2 0.61 -0.7 -0.2 0.24 1.7 -0.74 1.8 -0.38 0.39 -0.095 -1.2 -0.58 -0.49 0.25 -0.98 0.54 -0.69 -1.2 -0.73 0.95 0.75 0.6 0.16 -0.054; 25 | -0.02 -0.02 1.4 -0.82 -0.26 -1.3 -0.61 0.056 1.2 -0.64 0.056 -0.7 0.17 0.62 -0.19 0.12 -2 -0.98 -0.19 1.3 1.9 -0.15 -1 -0.73 0.065 0.54 -0.89 1.6 -1.7 -0.17 -0.041 -0.58 -0.8 0.81 0.85 0.053 0.38 0.69 0.53 0.62 0.81 0.062 -0.5 -1.5 0.14 0.23 -0.021 -0.21 1.7 0.57 0.29 -0.05 1.1 -0.17 -1.6 0.21 -0.14 0.94 0.13 -0.096 -1.2 -0.11 1.9 0.13 1.3 -2.4 -0.32 -1.9 0.68 1.6 -0.26 0.92 1.1 0.35 -1.3 0.8 0.64 -0.39 1.5 -1.1 0.1 -0.18 -1.3 -0.55 -0.099 0.031 2.6 0.53 0.17 -0.86 0.12 0.88 -0.99 0.71 -0.99 -0.18 -0.17 1.9 0.38 0.47; 26 | -0.16 -0.048 -0.35 -0.27 0.95 -0.073 -1.3 -0.37 -1.1 -0.7 -1.1 0.28 -0.12 1.3 0.0089 -0.59 1.1 -0.45 -1.1 0.64 -1.2 0.74 0.94 -0.065 -0.29 0.93 -0.99 -1.1 0.72 -0.82 -2.2 -0.5 -0.24 0.22 -0.83 0.16 0.5 -0.97 0.34 -0.29 0.7 0.46 -1.7 1.4 -1.6 -0.22 0.23 -0.43 0.33 0.32 -1.9 1.2 -0.77 0.22 -1.1 -0.3 1.1 -0.08 0.017 0.45 -0.42 -1.3 -0.22 -0.66 1.9 -1.1 1.5 -0.44 -0.78 -0.7 0.49 0.51 -0.17 -0.51 -0.51 0.031 1 0.016 -0.62 2.4 -0.8 1.1 0.39 0.93 0.18 -0.61 -0.9 1.7 0.35 1.1 0.068 -1.3 -0.05 0.73 0.89 -0.066 -0.58 0.71 0.18 -2.1;]; 27 | 28 | y = X'*c_true + 0.1*(sqrt(n))... 29 | *[-0.8;-0.46;0.19;0.89;-1.6;-0.32;-0.7;-0.74;0.37;1.4;0.46;0.66;1.1;0.98;-1.3;-0.023;0.13;2.4;0.9;0.076;0.36;-2.1;-2.3;-0.37;1.3;0.56;-0.18;-0.036;1.9;1.3;-0.36;-0.33;0.083;0.43;-1.2;-2.7;-0.54;2.2;-0.61;1.4;1.2;0.74;-0.12;-0.031;-1;-1;0.63;0.87;2.1;-0.59;0.59;1.5;2.1;-0.75;-1.5;-0.21;0.49;-0.32;0.82;-1.4;1.1;0.98;0.49;1.3;0.59;0.45;-1.4;-0.92;-1.4;-1.5;0.097;-0.23;-0.32;0.63;0.039;-0.016;0.45;-0.52;-1.1;-0.048;-0.5;1.6;-0.24;-0.96;-0.15;1.7;1.6;-1.1;0.097;0.25;2.2;0.31;2;-0.12;-0.27;1.4;-0.074;-0.36;-0.78;-0.25]; 30 | 31 | % Reorder measurements, then censor 32 | [y, sort_ind] = sort(y); 33 | sd = std(y(M+1:K)); 34 | X = X(:,sort_ind); 35 | D = (y(M)+y(M+1))/2; 36 | y = y(1:M); 37 | X = X'; 38 | 39 | 40 | % % plain least squares 41 | cvx_begin 42 | variable c_ls(n) 43 | minimize(sum( (X(1:M,:)*c_ls - y_unc).^2 )) 44 | cvx_end 45 | pure_ls = norm(c_true-c_ls,2)/norm(c_true,2) 46 | 47 | 48 | % for cencored data 49 | cvx_begin 50 | variable c_hat(n) 51 | variable z(K-M) 52 | minimize( sum( (X(M+1:K,:)*c_hat - z).^2) + sum((X(1:M,:)*c_hat - y_unc).^2)) 53 | subject to 54 | z >= D 55 | cvx_end 56 | 57 | cencored_ls = norm(c_true-c_hat,2)/norm(c_true,2) 58 | -------------------------------------------------------------------------------- /w8-geometric_problems/max_vol_box_norng.m: -------------------------------------------------------------------------------- 1 | m=70; n=40; 2 | 3 | A=[1.2 -1.5 0.29 -1.1 0.47 -0.53 1.9 -0.59 0.4 -0.38 0.73 -0.49 0.17 0.66 -0.3 0.43 -0.17 -0.74 1.7 -1.7 0.25 1.1 -0.49 -0.033 -0.5 0.33 -0.68 0.6 -0.47 -1.8 -0.38 1.3 -1.2 0.71 0.22 -0.75 0.65 -0.64 1.6 0.42; 4 | 0.56 -0.69 1.1 -0.94 0.64 -1.6 -0.32 -0.032 -0.43 -1.7 -1 -0.85 -0.25 -0.18 1 1 0.13 0.14 -0.34 0.11 0.5 0.75 -1.3 -0.41 -1.3 0.23 -1.8 -0.84 0.95 0.62 0.019 0.44 0.53 -0.6 0.77 0.72 -0.93 0.22 -1.8 -0.084; 5 | 0.34 -0.022 -0.33 0.088 1.4 0.82 -0.25 -1.7 -1.5 0.64 -0.73 -1.4 -1.3 -0.85 -1.4 -0.39 0.085 -1.7 -1.6 0.76 0.0044 -1 -1.4 -1.5 0.6 -0.72 -0.16 -1.5 0.71 0.61 -0.12 -2.2 0.48 -0.029 0.13 -0.83 1.6 -0.13 0.7 0.54; 6 | 1.4 0.18 -0.074 -0.7 0.043 0.45 -0.31 0.81 0.66 1.2 0.23 -0.48 -0.36 1.6 0.27 -0.18 -0.42 -0.63 0.41 -0.89 -0.55 1.8 -1.3 0.52 0.37 0.014 -0.63 0.079 -1.3 -1 -0.73 -0.034 -1.7 0.95 -0.39 0.71 0.95 -0.88 -0.92 -1.8; 7 | -0.49 -0.53 -0.16 0.3 -0.025 -1.3 2.1 0.5 -0.27 0.91 -1.3 -0.86 -0.36 -0.56 -0.13 2.1 -0.71 0.29 1 -0.085 -1.1 0.24 0.054 0.38 -0.082 -0.88 -0.19 1.2 0.36 0.056 0.95 -0.16 0.11 0.33 -0.38 1.3 1.1 -0.34 1.7 -0.14; 8 | -0.067 -1.3 0.83 -0.064 -0.56 0.46 0.47 2.4 -0.32 -0.37 1 -0.41 0.74 -1.9 -0.095 1 -1.8 -0.78 1.6 0.32 -1 -1.9 2.1 -0.83 0.14 -1.4 0.46 -0.21 -1.3 -1.4 -0.39 0.99 0.17 -0.33 1.6 0.87 -0.25 -0.69 -0.88 -0.34; 9 | -1.9 -0.21 -2.8 0.64 -1.1 0.21 -0.21 0.84 0.56 -1 -0.74 -1.3 0.91 -0.53 -0.53 -0.00074 0.99 2.5 -1.2 -0.4 0.21 2.7 0.83 -0.97 2.4 -2.2 -0.96 -0.077 0.31 0.39 0.9 0.14 0.64 -1.7 -0.48 -0.87 1.1 0.8 -0.5 -0.27; 10 | -0.34 0.34 1.5 -0.36 -0.6 0.61 -1.6 0.71 -0.27 -0.38 -0.066 0.084 0.26 -0.72 -0.47 -1.4 0.69 -0.3 -0.43 -0.53 0.92 -0.33 1.3 1.5 -0.86 0.86 -0.99 -0.95 -1.4 -1.6 -1.5 0.24 0.85 -0.53 1.1 0.9 0.27 0.56 1.1 0.077; 11 | -0.034 -2.4 0.27 0.25 2 -0.76 -0.82 -1 -0.21 1.5 0.33 -0.64 0.86 0.15 0.028 0.31 0.82 -0.13 1.5 0.76 -1.2 0.39 1.3 -0.98 -2 1.5 -0.9 -0.091 -1 2.8 0.74 0.66 -0.39 0.51 -0.37 -2.5 -0.68 -0.28 -2 1.3; 12 | -0.44 -1.5 0.035 0.27 -0.75 1 -0.16 -0.11 0.25 -0.25 -0.85 -0.78 0.24 0.37 -0.91 0.19 -0.83 1.7 -0.82 0.96 0.27 -0.13 0.4 1.1 0.19 0.13 0.7 -0.75 -1.2 -0.82 -1.1 1.3 -1.4 0.13 0.44 -0.59 1 -0.18 -1.9 -1.2; 13 | -0.16 -2.6 0.44 -1.2 -1.1 0.85 1.6 -0.31 -0.45 -0.17 0.38 0.73 0.52 0.31 -1 1.3 0.63 -0.0079 -0.94 -0.3 -0.51 0.73 0.092 -1.8 -0.85 1.5 0.2 0.3 1.2 -0.52 0.63 -0.78 0.038 0.074 -1.6 -0.59 -0.36 -0.39 -0.0077 -0.91; 14 | -1.4 0.74 -0.98 0.99 1 0.56 0.74 -0.22 1.1 -0.2 0.12 0.012 0.87 -0.088 -0.61 -1.6 -1.9 0.85 -1.4 0.35 0.39 -0.22 -1.3 -0.14 -0.83 -1.5 0.18 0.86 2.2 -0.7 -0.35 -0.72 0.13 -0.26 2.8 -0.84 0.15 0.088 -0.41 -0.3; 15 | -0.39 0.3 -0.2 0.19 -0.86 0.68 -1.3 -1.2 -0.49 0.9 -0.28 1.5 -0.35 0.97 0.62 -0.36 0.081 -0.32 0.8 1.7 1.7 -0.42 -0.22 1 -0.18 1.9 0.39 -1.3 -1.5 -0.36 0.12 2.4 -0.12 0.46 -1.2 -0.76 -0.36 0.51 0.18 -0.55; 16 | 1.6 1.5 -0.27 0.6 0.19 0.41 -0.34 0.69 0.88 0.41 -0.049 -1.8 -1.5 0.46 -0.47 0.65 0.87 1.8 0.084 -1 -0.045 -0.047 -1.8 -2.1 -0.77 -0.88 -1.2 1.4 -0.23 0.39 -0.38 2.3 -0.47 -0.081 -1.4 -0.22 1.7 -0.37 0.19 -0.58; 17 | -0.38 -0.72 -0.42 1.2 0.11 2.2 -0.58 -0.26 0.25 -0.76 -0.025 -0.67 1.4 0.82 -1.5 1 1.2 -0.22 0.12 -1.3 0.52 -1.6 0.2 -0.59 0.4 -0.089 -1.5 -0.14 0.54 -0.2 -0.75 2.8 0.44 1.5 2.1 0.94 0.77 0.75 -1.1 1.1; 18 | 1.7 -0.71 0.49 -0.11 -0.41 1.9 -1.1 -0.01 -0.26 -1.3 -1.3 -0.72 0.51 -1.8 1.2 0.18 0.98 1.1 0.49 0.11 -0.84 1.1 -1 -1 -1.8 -0.6 -0.97 -0.3 -0.46 -2 -0.95 0.42 0.53 -1.1 -0.46 0.19 -0.8 -1.2 -0.98 1.7; 19 | 1 -1.9 -0.4 -1.6 1 0.75 -0.17 0.82 2.9 -0.76 -0.71 -0.1 -0.93 0.92 -0.96 0.8 0.56 -0.36 0.73 0.68 -1 1.1 -0.84 0.96 0.19 0.7 0.67 1.2 -1.8 0.11 0.041 -0.15 -0.57 2.4 0.057 0.49 -0.57 1.3 0.59 -1; 20 | 0.063 -0.29 -0.14 0.21 -1.2 -1.6 1.1 -0.18 0.95 -1.3 0.21 -0.16 -0.85 -1.9 0.51 0.32 1.4 -1.4 -0.16 0.82 -0.39 -1.3 -0.41 -1.1 0.76 1.3 -1.4 -1.1 0.36 -0.98 0.13 -1 -0.56 0.94 -1.1 -2.6 0.28 0.34 1.2 0.13; 21 | -1 0.019 -0.1 1.1 -1.3 -0.64 1.3 -0.73 -1.6 0.72 0.39 0.24 -1.4 -0.52 -0.2 -1.8 -1.3 0.67 0.69 -1.9 -0.74 -1.2 -0.52 -0.025 0.022 0.097 0.04 3 0.86 -0.96 -1.3 1.4 -1.9 -1.5 -0.4 -0.62 0.49 0.85 -3.8 -0.41; 22 | -1.4 -0.34 0.2 1.4 -0.21 -0.58 0.46 0.33 -0.027 0.69 -0.13 0.76 0.98 1 0.27 0.12 -0.19 0.2 -0.37 1.1 0.9 -1.2 0.26 0.6 -1.7 0.073 -0.51 -0.24 1.2 0.75 -0.2 0.66 -0.63 -0.34 -0.16 -0.4 1.1 0.64 1.1 -1.2; 23 | 1.4 -1.8 -0.12 -0.26 -1.3 -0.45 -0.96 -0.17 -0.88 -0.18 -0.94 0.86 0.022 -0.065 -1.3 -0.93 0.47 0.59 -0.27 0.021 0.92 0.22 -1 -0.32 0.12 -0.49 -0.65 -0.1 -1.3 0.095 1.4 -0.82 -0.47 -0.33 -0.056 0.54 -1 -1.3 -0.47 1.1; 24 | -0.66 0.35 -0.16 1.3 0.81 -0.17 -1 0.3 -0.23 1.3 0.96 -0.9 0.92 -0.1 -0.53 -0.55 0.33 1.3 -1.6 -2.3 -0.28 -1.4 -0.1 -0.45 0.11 -0.83 -1 -0.47 0.1 0.12 -0.7 1.9 -0.61 0.24 1.1 -1.2 0.41 -0.67 1 0.18; 25 | -1.6 -1.3 -0.49 0.19 1.2 -0.42 -0.23 -1.3 0.93 0.96 0.62 0.12 -0.19 0.44 -0.28 0.5 -0.46 -0.34 -1.2 -0.42 -0.028 0.36 -0.064 -1.8 -0.82 0.8 0.9 -0.8 3.1 0.72 -0.21 -0.82 0.49 -1.2 -1.8 0.012 -0.37 0.18 0.5 -2.7; 26 | 0.14 0.77 -0.52 -0.06 -1.2 -0.63 0.53 -1.1 -0.066 -0.37 -0.25 -0.22 -0.082 -0.62 -1.6 0.039 -0.85 0.3 -0.3 0.21 -0.17 0.63 0.7 -0.79 0.33 1.4 -1.8 -0.25 -0.95 0.15 -0.69 -0.49 -0.3 0.7 -1.1 -1.2 -0.96 -1.7 -1.2 0.23; 27 | -2.6 0.075 -0.16 -0.53 -0.36 -0.18 0.66 0.47 0.69 -0.86 -1.1 -0.45 0.65 0.25 0.028 0.0015 1.5 0.58 -1.5 0.43 0.3 -1.3 0.76 -1.1 0.61 -0.28 0.46 -0.11 1.4 -0.92 -0.49 2 1.2 0.77 1.1 0.84 -0.017 1.4 0.25 -0.75; 28 | -0.55 2.5 0.022 0.24 -1 0.22 2.1 0.96 -0.49 0.75 0.61 0.87 0.66 0.083 0.36 0.017 0.56 -0.63 1.7 0.12 -0.37 0.071 -1.2 -0.37 -0.96 -1.2 -0.23 1.2 -0.87 1.1 0.21 -1.9 1.9 0.1 0.31 0.69 1.3 0.98 0.38 0.86; 29 | 1 1.1 -1.1 0.82 -0.76 0.54 -1.4 -3 -0.62 0.74 0.16 -0.11 1.4 -0.65 0.39 1.7 -1.9 0.52 -2 0.18 -0.82 -2.3 2.8 2 0.6 0.09 -1.5 0.45 2.4 -0.25 2 2.3 -0.52 -0.0025 -1.9 -0.76 -0.29 0.53 -2.4 -0.066; 30 | 2 1 -0.046 1.7 1.7 -1.4 -0.082 1.3 -0.2 0.96 -0.7 0.93 -0.45 0.54 0.051 1.1 -0.83 -1 -1.4 0.089 -0.74 0.1 0.26 -0.5 -1.2 -0.044 1.1 -0.0037 -1.1 -0.84 -1.2 -1.3 -1.5 -1.1 1.6 0.24 1.2 -1.5 -0.74 -1.3; 31 | 2.2 1.7 -0.56 0.94 -0.46 -0.24 0.2 -1.2 -0.1 0.19 -0.5 0.069 0.26 0.81 -0.83 -0.22 0.76 -0.1 0.26 0.54 1.7 -1.6 0.16 -0.019 0.3 -0.2 0.83 -0.57 0.6 0.73 0.54 -0.11 -0.52 0.32 0.57 -0.36 1.3 -1.4 0.72 -0.59; 32 | 1 1.3 0.4 -0.0041 0.51 0.36 -1.7 0.4 1.5 -0.35 0.0038 0.24 0.12 0.76 0.84 0.75 -1.2 -0.31 0.18 -1.9 -0.58 -1.2 3.8 0.073 -1.2 -0.52 -0.56 0.12 -0.028 1.2 -1.4 -0.42 0.65 -1.3 -0.57 -0.3 2 0.32 1 -0.051; 33 | -0.16 -0.9 0.32 0.66 -1.2 -0.67 -0.53 0.77 -1.6 -0.62 0.4 -0.33 -1.4 -0.21 -0.86 -0.19 -0.35 -1.1 -0.24 0.88 -0.85 -0.57 0.95 -1.1 0.19 -0.45 -0.95 0.34 2.3 0.25 -2 0.0073 -0.83 -0.25 0.058 0.49 0.58 -0.34 0.18 -0.83; 34 | 1.1 0.66 0.18 -0.3 -0.35 -0.92 0.69 0.63 1.3 1 -0.11 0.23 -0.53 0.97 0.65 -0.57 -1.8 2.5 1.8 -0.2 -0.4 0.66 0.25 0.11 -2.2 0.15 -0.34 -0.51 0.96 0.84 1 -1.9 -0.22 -0.35 0.4 0.76 -0.27 0.17 -0.15 0.83; 35 | 0.13 0.075 -0.7 -1.1 -0.072 1.1 0.037 0.23 -0.0054 -1.3 -0.55 1.3 -1.7 -0.026 -2.2 -0.65 -1.1 2 0.58 0.63 0.72 0.16 -0.18 2.1 -0.43 -0.97 0.62 0.28 1.3 0.1 2.3 -0.073 -0.3 -3.1 -1.3 0.37 2 1.4 -2.2 0.77; 36 | -0.018 0.19 0.66 0.0003 0.63 -1.5 -0.56 0.31 -0.28 -0.048 0.7 1.2 0.15 -0.41 1.1 -0.88 -0.66 0.71 -0.55 0.83 2.2 0.64 -0.25 1.2 0.88 0.24 0.93 -0.91 -1 -0.27 -0.41 2.2 0.36 1.2 0.076 0.39 -0.59 -0.46 -0.54 0.99; 37 | 0.43 -0.52 -0.12 -0.15 0.45 -0.34 1.2 0.25 -0.68 0.97 -0.84 2.3 2 1.1 -1.3 -0.77 0.65 1.5 -0.6 1 0.38 -0.76 -0.014 3 -2 0.82 -0.068 0.67 1.1 -1.3 0.27 -0.83 -2.1 -0.6 -0.99 -1.4 0.79 0.22 0.13 1.3; 38 | -1.4 1 -0.49 0.21 0.18 -0.87 -0.55 0.012 -1 1.8 -0.47 0.71 0.46 -0.31 1 -2 0.5 -0.19 0.36 -0.42 1.5 -0.91 -1.3 0.45 1.9 -0.1 -0.11 -0.92 0.89 0.64 0.17 0.89 -0.17 -0.17 -1.2 1.1 0.6 0.99 0.28 0.14; 39 | 1.7 0.94 1.4 1.7 -1 -1.8 -1.5 1.5 1 -0.6 0.27 -0.27 -0.77 0.31 0.34 1.4 -0.36 -2 1.2 1.5 -1.5 -0.2 0.29 -0.47 -0.27 0.018 1.2 1.1 -2.1 -1.4 -0.83 -0.28 0.48 0.25 1.2 -1.3 -0.31 0.41 0.059 2.2; 40 | 0.66 -1.1 0.63 -1.7 -0.38 1.2 -0.87 1 -0.52 -0.84 -0.19 -0.58 1 0.36 -0.27 -0.16 0.97 -1.1 -0.89 -1.1 -0.67 0.33 0.42 1.5 0.15 0.21 1.1 -1.9 -0.2 -1.9 1.4 0.031 -1.5 -0.4 -0.57 -0.9 -0.68 0.1 -1.3 1; 41 | -0.71 -0.65 0.12 1.1 1 -0.39 1.2 0.18 -0.23 -0.22 -0.72 0.044 0.17 1.8 -1.2 -0.17 -0.55 -0.5 0.16 0.17 0.71 0.49 0.59 0.6 -0.77 -0.57 1.1 -0.76 0.52 -2.5 0.69 0.97 1.5 1.3 -0.0059 -0.01 -0.36 -1.1 2.3 -1.5; 42 | 1.6 1.7 0.76 -1.5 0.42 0.3 -0.66 0.27 -0.065 1.6 -0.77 1.2 -0.093 -1.1 0.68 -0.89 0.16 2.2 -1.6 0.58 -1.7 1.1 0.29 0.15 0.35 0.076 0.66 -1.1 -0.19 -0.75 -0.23 2.7 -0.83 -0.74 1.3 0.13 -0.42 -0.029 0.59 0.013; 43 | -2 0.28 -1.3 0.17 0.062 -0.099 0.66 -0.87 0.49 0.087 1.6 -0.25 1.2 0.059 -0.19 0.32 0.41 0.21 -1.1 -0.55 -0.69 -1.4 1.4 1 0.35 -0.27 0.42 0.11 -1.4 -0.88 -0.44 2.7 0.56 0.29 -0.34 2 -0.65 1.3 -0.35 -0.76; 44 | 0.81 -0.083 -0.098 -0.37 0.45 -0.81 -0.22 -1.6 1 0.029 -1.3 0.12 1.9 -0.27 -1.4 0.71 0.0037 -0.42 -0.7 -0.76 -0.27 -0.72 0.34 0.96 0.36 0.58 -1.7 -1.3 0.42 0.26 -0.66 -2.9 -1.8 -0.92 -0.68 0.72 1.2 0.98 0.64 0.078; 45 | 0.24 0.37 -0.32 0.57 0.55 0.74 0.74 1.2 0.28 0.37 -1.2 0.32 -0.6 0.89 1.3 -0.63 -0.84 2.2 0.15 0.98 0.65 0.31 -0.92 -0.25 -1.8 -0.28 -0.3 0.81 0.95 -2 1.1 0.93 -2.1 -0.88 -0.21 -0.39 -0.0034 -0.04 0.8 0.23; 46 | 0.32 -0.81 -1.7 0.51 0.44 1 0.65 0.49 -1.6 -0.16 0.59 -0.31 -0.85 0.32 1.2 0.3 -0.8 -0.17 0.3 -1.6 -0.34 -0.43 -1.2 1.2 -1.2 -0.33 1 1.9 1.7 0.55 -2.4 -1.7 0.56 -0.2 0.81 1.3 -1.3 1 -0.57 0.72; 47 | 0.52 1.9 0.38 0.47 0.28 0.8 0.039 0.28 -1.2 -1 1.8 -0.7 -1.1 -0.47 0.98 -0.73 1 -0.37 -0.78 -0.88 0.34 -1.4 -0.74 0.47 1.2 0.12 0.53 0.79 -0.93 1.6 1.9 -1.8 -0.16 2.1 0.2 -1.6 -0.54 -0.98 0.15 0.75; 48 | 0.15 1.9 0.12 0.67 0.25 0.63 0.58 0.071 -0.09 -0.099 -0.13 -0.1 0.14 1.2 -1.4 -2.6 -1.3 0.5 0.52 0.2 1.6 -0.31 0.5 0.2 0.72 -1.6 0.38 -0.38 -0.15 0.31 1.3 0.82 0.059 1.3 0.29 0.37 -1.2 -0.94 0.84 0.036; 49 | -0.43 -2.5 -0.45 -1.6 0.87 -2.3 -0.36 0.27 -4.1 -0.93 -2.3 1.3 -1.5 -1.3 0.5 1.7 1.3 0.15 1.1 0.19 -0.48 0.52 0.46 0.69 0.32 -1.2 -0.72 -1.9 -0.23 -0.49 0.81 -0.97 -0.0034 -0.036 1.1 -0.22 0.56 0.6 -0.44 0.93; 50 | 0.68 -0.4 -0.71 -1.7 -0.6 -0.24 0.61 -1.1 -0.21 0.37 0.047 -2.2 0.33 0.13 0.18 -0.95 0.079 -1 -0.14 -0.73 0.18 -0.22 1.6 0.47 1.3 -0.94 -1.1 -0.69 1.6 -0.84 -0.12 0.22 -0.68 -0.096 0.4 -0.22 0.93 -1 -0.57 0.42; 51 | 0.84 0.11 0.47 0.0061 -0.073 -0.81 1.1 -0.26 -0.37 1.5 -1.9 0.0028 -1.1 0.85 0.52 -0.4 -0.05 0.71 0.17 1.7 0.28 0.0051 0.85 -0.39 -2.1 -0.34 -0.58 0.29 -2.8 0.22 0.16 -0.39 -0.77 -0.043 2.2 0.065 1.5 -1.5 0.35 0.23; 52 | 0.24 0.52 -1.9 -0.43 -0.76 0.38 -0.55 -0.74 -0.28 -0.57 -1.1 -0.7 1.1 1 -1.2 -0.076 0.95 0.39 0.012 -1.3 0.56 -1.2 -1.6 -0.37 2.4 -0.93 -1.1 -1.1 0.3 0.47 -1.1 -0.8 0.32 -0.54 0.16 0.54 -0.71 0.57 0.39 -0.068; 53 | 0.32 -0.59 -0.24 -0.83 -1.2 -0.19 -0.69 0.6 0.86 -1.1 -0.51 2.3 -0.95 1.2 -0.86 0.9 -1.6 1 -1.1 0.13 -0.59 2.1 0.95 -1.5 0.41 0.55 0.12 0.43 -2.1 -0.59 -2.4 1.1 0.79 -0.49 1.3 -0.26 0.14 -1.3 -0.55 -0.36; 54 | 0.73 -1.1 -0.71 0.11 -1.7 -0.58 -0.45 -0.41 -0.42 1.2 -1.9 -0.64 -1.6 -0.4 0.69 -0.87 -0.33 0.86 1.4 -0.69 -1.3 -1.3 0.84 -1.7 -0.25 0.21 -1.4 -0.14 -0.4 -1.7 0.43 -1.5 0.72 -0.19 0.26 -0.91 -0.96 -0.08 -1.3 -0.17; 55 | 0.099 -1.2 0.23 0.44 -0.41 0.14 0.74 -1.9 0.8 0.74 1.7 -0.31 -0.96 0.4 0.81 2.6 -2.5 -0.58 1 -0.47 -0.13 0.37 -1.1 -0.28 -0.13 -0.044 1.6 -0.13 -0.26 -1.1 0.29 0.22 0.21 1 0.56 -1.6 -0.28 2.2 -0.23 -0.56; 56 | 0.056 0.087 0.098 -0.078 -1.6 -0.59 -2.4 -0.21 1.8 0.93 1.3 -0.81 -0.65 -1.5 0.59 -0.51 -0.66 0.22 0.28 -0.24 0.69 0.27 -1.3 -0.89 -0.64 -0.36 -0.2 0.054 0.59 -0.35 -1.1 -0.082 1.2 0.3 0.18 0.06 1.6 1.3 0.41 -0.4; 57 | -0.61 0.2 0.044 -0.26 0.33 0.73 -0.74 -0.96 0.35 0.48 -1.8 -0.77 1.7 0.63 0.38 -0.16 -1.9 0.61 0.19 -1.2 -1 0.041 0.45 1.2 1.7 -0.67 -0.89 0.39 -1.9 -1.4 -0.58 0.14 1.7 0.095 0.6 -0.13 0.53 -0.2 -0.82 0.37; 58 | -0.27 1.7 -0.55 0.96 0.28 -0.8 1.9 0.81 -1 -0.84 1.4 1.3 -0.074 -0.89 3.1 -0.76 0.2 -1.1 -0.11 0.54 0.23 -0.15 -1.9 1.9 -0.1 1.3 -2.3 -1.2 0.15 0.34 3 1.5 -0.41 -0.75 1.5 -2.5 0.82 -0.75 0.26 -0.035; 59 | -0.031 0.71 1.9 0.86 -0.26 -1.1 0.58 -2.1 1.4 1.8 -1.4 -1.9 -0.98 0.068 -0.024 -0.49 -1.6 -1.4 1.1 0.96 1.7 0.67 -0.89 -1.3 0.71 1.2 0.2 0.15 -0.56 1.4 -0.88 -0.57 -1.4 0.76 1 -0.02 -0.67 -1 0.94 0.25; 60 | -1.3 0.012 0.18 1.7 0.86 0.17 -0.064 0.27 0.058 0.2 -0.73 -0.1 -0.43 0.15 -0.12 2.1 0.26 1 2.6 -0.16 0.76 -1.1 -1.4 0.69 -0.56 -0.26 1.5 0.063 -0.7 0.59 0.62 -0.84 0.74 1.5 -0.66 -1.8 1.4 0.53 -0.92 0.44; 61 | 1.4 1.6 -0.86 0.71 -0.0056 -2 0.39 1.5 0.55 1.2 -0.61 -1.3 0.99 -0.045 1.8 0.85 0.26 0.039 -1.5 -0.69 -0.29 0.98 0.27 -0.23 1.3 0.46 1.1 -0.47 -1.1 0.42 0.58 0.71 1 -0.28 1.5 -1.8 -0.77 -0.2 0.74 -0.58; 62 | 0.54 -1.1 -1 0.5 1.3 1.1 0.03 -0.93 1.1 0.91 -1.3 -0.016 2.2 -0.45 -0.85 -0.82 0.4 1.1 0.47 -0.46 0.5 0.4 -1.5 0.86 -1.4 -0.39 -1.9 0.64 -0.6 0.013 0.71 2.1 -1.4 -0.57 -0.41 -0.034 -2.5 0.35 -0.2 0.15; 63 | 0.032 0.41 -0.34 -1.1 -0.8 0.26 -1 1 -1.5 0.1 0.2 1.1 1.6 -0.62 0.66 -0.4 -1.2 0.055 -0.16 -0.28 2.1 1.5 -0.3 1 -0.5 0.24 0.7 1 0.064 -1 0.65 -0.69 0.87 0.44 0.063 0.23 0.44 -0.55 -1.1 -0.46; 64 | -0.88 0.3 -1.4 -0.015 0.53 0.39 2.7 -0.82 2.2 -0.017 2.2 -0.87 0.82 -0.43 -1.4 0.63 0.22 0.24 -0.91 0.53 0.45 -0.01 1.3 1.2 -0.28 -0.21 1.1 0.14 -0.86 -0.93 1.5 -1.1 -0.73 0.4 -0.86 0.23 -0.89 1.3 0.31 -0.5; 65 | 1.8 0.85 -0.32 1.3 -0.24 0.16 0.22 0.6 0.25 1.2 -0.26 -2.2 0.06 0.71 0.27 -0.16 -0.83 0.7 0.45 1.6 -0.44 1.6 -0.18 -0.28 -1.6 1.6 1.4 0.16 -1.4 0.61 -0.64 0.27 1.5 0.42 1.8 0.4 -1.2 -0.046 -0.99 0.65; 66 | 1.5 0.9 -2.1 -0.58 0.6 -1.1 -2.6 0.44 -0.21 -0.44 1.8 0.092 0.53 -0.85 1.9 1.4 -0.6 -1.4 0.31 0.43 -1.5 0.35 -0.87 -0.4 -0.49 -1.6 1.8 -1.5 -1.5 -1.9 2.1 1.4 0.63 1.3 1.6 -2.3 -0.27 -0.55 1.1 0.13; 67 | 0.42 1.8 0.92 -0.59 -1.9 0.034 -1.9 0.95 -1.1 0.97 1.2 -1.4 -0.74 0.5 1.3 -0.66 1 0.44 0.87 1.8 -0.42 -0.11 0.65 0.71 -0.021 0.078 -1 -0.34 1.1 -0.32 -2 -0.36 -0.44 0.041 -0.37 0.59 -0.34 0.41 0.7 1.7; 68 | -0.45 -0.051 -0.74 0.35 0.69 -1.1 0.12 0.3 1.5 -0.12 1.3 1.6 -1.2 -0.41 -0.31 -0.099 -0.14 -1.3 0.43 -0.72 -0.52 0.49 0.24 -0.89 -0.59 -0.64 -0.65 0.33 0.037 0.52 1.1 -0.62 -0.57 -0.036 0.79 -2.6 0.72 -0.0043 0.82 -1; 69 | 1.2 1.6 0.51 -0.11 0.67 -0.58 0.32 0.23 -0.35 -1.2 0.72 1.8 0.81 0.83 -0.11 0.67 0.41 0.038 1.1 1.2 0.65 0.047 -0.32 -0.1 1.9 2.4 0.69 -2.2 -0.91 -0.68 2.4 0.99 -0.29 -0.33 0.92 -0.53 -0.6 -0.034 0.3 1.1; 70 | -0.29 0.1 -0.17 -1.3 -1 -0.42 0.52 -0.75 1.4 2 -1.3 -1.8 0.72 1.9 0.0061 0.12 0.67 -1.7 0.3 0.14 -0.15 0.9 -0.51 -0.56 -0.055 0.57 0.86 -1.9 -0.01 -0.16 -0.45 0.68 -1.1 -1 0.19 -1.2 -0.85 0.18 -1.3 -0.67; 71 | 3.8 1.3 0.062 -1.6 0.59 0.21 0.78 1.9 0.6 -0.12 -0.39 -0.17 1.2 -1 1.2 0.064 -1.3 0.13 -0.081 0.09 0.32 -1.4 0.056 -0.5 0.12 0.36 0.58 -0.067 1.1 -0.6 1 -2.2 -1.6 -0.0027 0.33 -0.83 0.42 -1.1 0.32 -0.75; 72 | -0.48 1.6 -0.47 -1.2 0.29 1.6 -0.16 -0.35 -1.3 -0.51 -0.51 -0.84 0.56 0.62 0.68 -0.11 -0.14 0.44 0.039 -0.41 -0.73 -0.97 -0.38 -0.27 -0.41 -0.54 0.48 -0.75 -0.97 0.01 -1.2 -0.26 0.83 -1.7 0.0063 -0.87 -2.3 0.61 2.1 0.57;]; 73 | 74 | b=[4.9e+002;2.3e+002;1.4e+002;6e+002;2.1e+002;29;8.3e+002;1.4e+002;14;2e+002;68;5.9e+002;1.7e+002;7e+002;1.7e+002;7.3e+002;4.6e+002;27;4.3e+002;6e+002;6e+002;2.9e+002;6.9e+002;62;1.1e+003;2.3e+002;4.3e+002;8.6e+002;9.5e+002;4.4e+002;68;4.8e+002;55;7.5;1.8e+002;6e+002;7.5e+002;2.9e+002;3e+002;6.6e+002;8.7e+002;3.5e+002;1e+002;1.4e+002;2.2e+002;66;1.8e+002;2.9e+002;3.6e+002;1e+002;1.3e+002;3.1e+002;43;23;2.6e+002;1.2e+002;13;5.6e+002;5.9e+002;2.3e+002;14;3.8e+002;7.7e+002;6.2e+002;1.8e+002;2e+002;5.1e+002;1.2e+002;1.6e+003;2.1e+002;]; -------------------------------------------------------------------------------- /w10-unconstrained/ranking/rank_aggr_data.m: -------------------------------------------------------------------------------- 1 | % Data file for rank aggregation problem 2 | 3 | % The data was generated using the following code: 4 | % rand('state',0); 5 | % numobjects=50; 6 | % numpref=1000; 7 | % 8 | % true_ranking=randperm(numobjects); %object ids sorted according to their rank 9 | % map_object_to_ranking(true_ranking)=[1:numobjects]; %ranks sorted according to object id 10 | % objects=randint(numpref,2,numobjects)+1; %pairs of objects in the preferences 11 | % 12 | % %remove pairs with the same object 13 | % pos=find(objects(:,1)==objects(:,2)); 14 | % while(~isempty(pos)) 15 | % objects(pos,:)=randint(length(pos),2,numobjects)+1; 16 | % pos=find(objects(:,1)==objects(:,2)); 17 | % end 18 | % 19 | % %coherent preferences 20 | % for i=1:numpref*.9 21 | % ranking1=map_object_to_ranking(objects(i,1)); 22 | % ranking2=map_object_to_ranking(objects(i,2)); 23 | % if ranking1 < ranking2 24 | % preferences(i,:)=[objects(i,2) objects(i,1)]; 25 | % elseif ranking1 > ranking2 26 | % preferences(i,:)=[objects(i,1) objects(i,2)]; 27 | % end 28 | % end 29 | % %incoherent measurements 30 | % for j=i+1:numpref 31 | % ranking1=map_object_to_ranking(objects(j,1)); 32 | % ranking2=map_object_to_ranking(objects(j,2)); 33 | % if ranking1 < ranking2 34 | % preferences(j,:)=[objects(j,1) objects(j,2)]; 35 | % elseif ranking1 > ranking2 36 | % preferences(j,:)=[objects(j,2) objects(j,1)]; 37 | % end 38 | % end 39 | % 40 | % %scramble measurements 41 | % preferences=preferences(randperm(numpref),:); 42 | 43 | n=50; 44 | m=1000; 45 | 46 | % one preference per row 47 | % first column is i; second column is j 48 | preferences =[ 49 | 40 7 50 | 35 22 51 | 46 50 52 | 32 8 53 | 47 26 54 | 5 26 55 | 28 10 56 | 41 30 57 | 34 21 58 | 46 33 59 | 3 44 60 | 22 50 61 | 46 8 62 | 34 39 63 | 32 48 64 | 1 39 65 | 1 28 66 | 41 31 67 | 11 21 68 | 37 42 69 | 26 5 70 | 32 30 71 | 2 27 72 | 43 46 73 | 34 47 74 | 45 44 75 | 37 9 76 | 14 23 77 | 23 2 78 | 27 31 79 | 37 33 80 | 38 31 81 | 5 10 82 | 16 39 83 | 17 50 84 | 27 8 85 | 1 49 86 | 2 15 87 | 44 48 88 | 35 30 89 | 41 10 90 | 32 23 91 | 48 50 92 | 37 4 93 | 35 36 94 | 33 16 95 | 19 38 96 | 1 38 97 | 34 47 98 | 22 25 99 | 27 49 100 | 36 19 101 | 46 30 102 | 45 22 103 | 41 24 104 | 19 21 105 | 10 48 106 | 41 9 107 | 1 41 108 | 40 48 109 | 1 48 110 | 36 31 111 | 15 21 112 | 38 10 113 | 37 49 114 | 50 8 115 | 1 35 116 | 41 35 117 | 14 15 118 | 6 33 119 | 44 26 120 | 47 49 121 | 41 31 122 | 11 2 123 | 20 47 124 | 5 11 125 | 34 7 126 | 3 4 127 | 38 46 128 | 1 26 129 | 47 10 130 | 7 26 131 | 37 34 132 | 28 46 133 | 11 16 134 | 9 28 135 | 18 33 136 | 5 12 137 | 35 49 138 | 18 25 139 | 37 21 140 | 7 21 141 | 10 19 142 | 1 32 143 | 9 26 144 | 47 2 145 | 13 30 146 | 20 21 147 | 18 12 148 | 32 28 149 | 2 5 150 | 35 21 151 | 12 7 152 | 46 48 153 | 45 31 154 | 24 36 155 | 34 3 156 | 47 21 157 | 20 18 158 | 9 29 159 | 37 24 160 | 10 21 161 | 26 40 162 | 38 30 163 | 23 15 164 | 14 28 165 | 5 42 166 | 7 31 167 | 4 36 168 | 40 24 169 | 10 31 170 | 5 43 171 | 41 45 172 | 37 26 173 | 41 27 174 | 7 33 175 | 5 30 176 | 39 31 177 | 41 27 178 | 17 31 179 | 47 29 180 | 49 27 181 | 36 26 182 | 2 42 183 | 17 26 184 | 13 49 185 | 9 15 186 | 35 29 187 | 41 3 188 | 32 42 189 | 5 27 190 | 12 11 191 | 17 34 192 | 29 39 193 | 19 8 194 | 5 50 195 | 17 12 196 | 46 30 197 | 20 40 198 | 1 17 199 | 1 45 200 | 20 11 201 | 28 9 202 | 11 15 203 | 14 41 204 | 17 2 205 | 23 16 206 | 23 14 207 | 45 4 208 | 9 8 209 | 20 38 210 | 23 11 211 | 13 28 212 | 28 39 213 | 34 14 214 | 40 11 215 | 3 30 216 | 10 49 217 | 32 27 218 | 7 26 219 | 20 4 220 | 33 10 221 | 37 9 222 | 25 49 223 | 3 27 224 | 23 27 225 | 28 10 226 | 22 31 227 | 16 50 228 | 18 14 229 | 3 30 230 | 1 3 231 | 34 35 232 | 35 21 233 | 13 25 234 | 48 21 235 | 37 32 236 | 28 24 237 | 43 44 238 | 36 42 239 | 27 10 240 | 48 29 241 | 1 10 242 | 14 26 243 | 1 11 244 | 48 2 245 | 12 28 246 | 32 33 247 | 12 44 248 | 1 33 249 | 6 42 250 | 37 38 251 | 49 31 252 | 1 24 253 | 49 29 254 | 7 24 255 | 43 16 256 | 20 42 257 | 33 21 258 | 13 9 259 | 45 11 260 | 29 26 261 | 9 40 262 | 14 8 263 | 22 15 264 | 20 22 265 | 32 48 266 | 20 45 267 | 5 39 268 | 34 41 269 | 47 49 270 | 19 39 271 | 5 29 272 | 49 44 273 | 13 2 274 | 48 34 275 | 17 38 276 | 12 4 277 | 46 2 278 | 13 24 279 | 14 24 280 | 6 29 281 | 24 12 282 | 32 38 283 | 45 22 284 | 44 37 285 | 7 16 286 | 17 44 287 | 9 38 288 | 2 30 289 | 6 25 290 | 13 25 291 | 27 13 292 | 7 50 293 | 24 13 294 | 7 10 295 | 25 5 296 | 37 10 297 | 38 22 298 | 13 23 299 | 38 33 300 | 47 15 301 | 28 16 302 | 34 12 303 | 1 40 304 | 5 21 305 | 13 50 306 | 4 5 307 | 43 4 308 | 45 1 309 | 45 32 310 | 6 20 311 | 39 27 312 | 32 35 313 | 18 9 314 | 41 8 315 | 33 49 316 | 28 7 317 | 12 48 318 | 41 29 319 | 36 30 320 | 45 4 321 | 41 32 322 | 4 48 323 | 46 33 324 | 41 8 325 | 19 16 326 | 6 28 327 | 13 10 328 | 4 40 329 | 25 16 330 | 14 8 331 | 34 42 332 | 1 19 333 | 30 31 334 | 14 50 335 | 42 12 336 | 11 33 337 | 35 33 338 | 36 39 339 | 3 21 340 | 35 50 341 | 23 26 342 | 23 44 343 | 37 32 344 | 38 2 345 | 10 21 346 | 5 10 347 | 38 2 348 | 3 50 349 | 20 31 350 | 43 20 351 | 3 29 352 | 4 27 353 | 22 8 354 | 40 29 355 | 11 8 356 | 7 31 357 | 11 42 358 | 40 36 359 | 1 15 360 | 20 7 361 | 13 2 362 | 42 36 363 | 17 49 364 | 1 22 365 | 23 12 366 | 47 13 367 | 32 43 368 | 33 37 369 | 34 33 370 | 45 6 371 | 36 50 372 | 23 4 373 | 28 43 374 | 45 31 375 | 20 43 376 | 36 39 377 | 26 3 378 | 50 42 379 | 25 1 380 | 16 22 381 | 9 16 382 | 4 22 383 | 14 4 384 | 4 29 385 | 6 3 386 | 17 48 387 | 34 28 388 | 5 45 389 | 12 16 390 | 7 29 391 | 19 24 392 | 16 27 393 | 40 44 394 | 8 50 395 | 18 32 396 | 18 20 397 | 46 22 398 | 9 47 399 | 44 27 400 | 32 13 401 | 17 19 402 | 3 19 403 | 12 28 404 | 13 28 405 | 43 44 406 | 41 10 407 | 12 24 408 | 38 7 409 | 13 41 410 | 9 26 411 | 7 29 412 | 33 32 413 | 3 33 414 | 37 12 415 | 8 28 416 | 3 7 417 | 4 48 418 | 43 26 419 | 43 35 420 | 31 5 421 | 41 33 422 | 40 11 423 | 9 12 424 | 45 23 425 | 13 29 426 | 10 39 427 | 32 24 428 | 20 6 429 | 39 24 430 | 11 21 431 | 30 31 432 | 2 26 433 | 28 16 434 | 41 9 435 | 21 45 436 | 17 37 437 | 7 30 438 | 46 25 439 | 25 1 440 | 18 37 441 | 6 2 442 | 47 8 443 | 19 16 444 | 9 2 445 | 49 39 446 | 2 47 447 | 13 19 448 | 34 23 449 | 45 25 450 | 1 28 451 | 49 33 452 | 16 29 453 | 17 23 454 | 48 29 455 | 37 50 456 | 37 29 457 | 26 49 458 | 14 8 459 | 35 36 460 | 46 31 461 | 38 15 462 | 47 46 463 | 32 4 464 | 30 50 465 | 34 15 466 | 33 14 467 | 22 15 468 | 37 45 469 | 40 21 470 | 47 38 471 | 32 48 472 | 45 14 473 | 38 8 474 | 23 3 475 | 40 22 476 | 14 44 477 | 37 47 478 | 5 44 479 | 22 42 480 | 38 20 481 | 22 39 482 | 14 19 483 | 9 16 484 | 5 33 485 | 3 37 486 | 35 39 487 | 13 25 488 | 12 41 489 | 34 3 490 | 20 14 491 | 33 49 492 | 23 19 493 | 1 39 494 | 33 25 495 | 9 23 496 | 1 50 497 | 7 31 498 | 1 12 499 | 40 15 500 | 37 19 501 | 10 24 502 | 14 16 503 | 11 8 504 | 14 4 505 | 43 36 506 | 34 44 507 | 3 7 508 | 33 8 509 | 46 40 510 | 16 27 511 | 28 2 512 | 17 31 513 | 48 15 514 | 1 2 515 | 29 39 516 | 26 7 517 | 3 27 518 | 36 21 519 | 12 35 520 | 13 10 521 | 45 36 522 | 32 31 523 | 13 31 524 | 20 7 525 | 34 47 526 | 39 49 527 | 50 39 528 | 2 25 529 | 5 22 530 | 32 16 531 | 10 19 532 | 34 49 533 | 19 49 534 | 47 19 535 | 37 29 536 | 20 15 537 | 35 31 538 | 45 49 539 | 40 29 540 | 46 16 541 | 5 29 542 | 49 25 543 | 46 50 544 | 6 24 545 | 6 35 546 | 26 30 547 | 34 13 548 | 12 39 549 | 18 24 550 | 1 32 551 | 1 36 552 | 28 35 553 | 47 44 554 | 45 8 555 | 43 35 556 | 18 35 557 | 13 35 558 | 19 15 559 | 14 27 560 | 21 5 561 | 20 4 562 | 33 26 563 | 34 42 564 | 38 46 565 | 44 15 566 | 1 41 567 | 18 11 568 | 12 7 569 | 40 10 570 | 17 13 571 | 14 33 572 | 19 16 573 | 48 42 574 | 5 12 575 | 17 11 576 | 4 25 577 | 22 24 578 | 9 27 579 | 13 15 580 | 40 50 581 | 28 49 582 | 4 17 583 | 9 29 584 | 45 21 585 | 40 2 586 | 26 39 587 | 40 2 588 | 37 49 589 | 2 12 590 | 4 27 591 | 22 8 592 | 45 50 593 | 33 19 594 | 46 15 595 | 3 30 596 | 44 15 597 | 37 36 598 | 23 32 599 | 5 40 600 | 23 15 601 | 20 28 602 | 36 8 603 | 41 44 604 | 48 24 605 | 20 41 606 | 45 35 607 | 45 47 608 | 49 12 609 | 32 39 610 | 10 21 611 | 1 41 612 | 6 47 613 | 9 50 614 | 18 12 615 | 23 46 616 | 18 49 617 | 6 16 618 | 40 50 619 | 23 16 620 | 6 16 621 | 19 48 622 | 46 19 623 | 42 8 624 | 13 27 625 | 44 31 626 | 13 12 627 | 40 48 628 | 1 13 629 | 6 42 630 | 20 32 631 | 40 22 632 | 17 4 633 | 28 50 634 | 26 42 635 | 16 39 636 | 37 39 637 | 25 36 638 | 44 24 639 | 17 49 640 | 1 44 641 | 46 43 642 | 41 18 643 | 21 13 644 | 4 24 645 | 3 24 646 | 40 16 647 | 3 15 648 | 33 42 649 | 43 19 650 | 43 42 651 | 19 2 652 | 49 42 653 | 40 11 654 | 11 3 655 | 37 40 656 | 45 18 657 | 1 36 658 | 19 2 659 | 12 48 660 | 50 39 661 | 37 48 662 | 10 21 663 | 29 26 664 | 47 38 665 | 41 16 666 | 45 42 667 | 23 29 668 | 40 16 669 | 45 50 670 | 44 15 671 | 16 7 672 | 7 21 673 | 5 39 674 | 14 10 675 | 36 39 676 | 17 40 677 | 19 27 678 | 20 25 679 | 5 48 680 | 43 40 681 | 17 5 682 | 35 26 683 | 16 42 684 | 17 9 685 | 33 39 686 | 1 46 687 | 16 27 688 | 36 17 689 | 18 16 690 | 5 23 691 | 36 49 692 | 6 7 693 | 45 48 694 | 48 18 695 | 23 26 696 | 14 25 697 | 42 23 698 | 44 42 699 | 37 41 700 | 37 24 701 | 25 24 702 | 11 3 703 | 43 50 704 | 19 30 705 | 12 8 706 | 44 31 707 | 27 15 708 | 33 15 709 | 3 21 710 | 32 44 711 | 1 25 712 | 34 10 713 | 3 2 714 | 47 44 715 | 37 44 716 | 18 42 717 | 16 50 718 | 13 36 719 | 20 8 720 | 44 48 721 | 19 2 722 | 37 41 723 | 18 8 724 | 28 44 725 | 13 32 726 | 35 25 727 | 6 32 728 | 11 49 729 | 9 39 730 | 9 33 731 | 22 49 732 | 36 11 733 | 26 25 734 | 18 31 735 | 47 42 736 | 5 45 737 | 9 47 738 | 48 50 739 | 34 48 740 | 20 19 741 | 35 26 742 | 1 42 743 | 5 42 744 | 1 35 745 | 45 36 746 | 42 48 747 | 35 19 748 | 25 43 749 | 5 40 750 | 18 21 751 | 25 1 752 | 41 21 753 | 28 48 754 | 37 43 755 | 9 32 756 | 45 21 757 | 48 2 758 | 25 21 759 | 1 49 760 | 19 26 761 | 7 27 762 | 27 3 763 | 1 32 764 | 10 15 765 | 7 43 766 | 21 42 767 | 7 30 768 | 5 49 769 | 6 10 770 | 49 30 771 | 17 45 772 | 18 37 773 | 41 42 774 | 19 49 775 | 11 10 776 | 16 21 777 | 34 4 778 | 49 42 779 | 11 29 780 | 30 23 781 | 9 14 782 | 17 35 783 | 19 31 784 | 16 22 785 | 38 21 786 | 17 33 787 | 46 33 788 | 17 4 789 | 30 24 790 | 3 23 791 | 12 11 792 | 13 20 793 | 35 33 794 | 34 50 795 | 32 8 796 | 12 50 797 | 10 31 798 | 35 30 799 | 37 30 800 | 20 26 801 | 11 24 802 | 14 10 803 | 7 17 804 | 49 31 805 | 1 21 806 | 12 19 807 | 19 29 808 | 23 36 809 | 15 26 810 | 10 31 811 | 21 38 812 | 12 7 813 | 35 21 814 | 3 48 815 | 3 30 816 | 12 38 817 | 2 39 818 | 17 22 819 | 37 33 820 | 7 36 821 | 21 34 822 | 17 24 823 | 48 25 824 | 20 50 825 | 35 42 826 | 43 15 827 | 3 22 828 | 5 42 829 | 18 46 830 | 16 40 831 | 13 20 832 | 22 24 833 | 33 29 834 | 5 26 835 | 34 36 836 | 35 29 837 | 1 10 838 | 23 11 839 | 32 42 840 | 1 8 841 | 22 31 842 | 12 33 843 | 16 41 844 | 19 31 845 | 28 16 846 | 14 30 847 | 1 24 848 | 11 48 849 | 20 31 850 | 50 15 851 | 15 8 852 | 18 11 853 | 12 15 854 | 13 33 855 | 43 30 856 | 32 36 857 | 5 41 858 | 20 24 859 | 4 49 860 | 28 50 861 | 43 36 862 | 37 6 863 | 17 41 864 | 37 39 865 | 13 7 866 | 35 21 867 | 34 11 868 | 23 4 869 | 2 9 870 | 41 23 871 | 48 25 872 | 36 24 873 | 47 46 874 | 44 25 875 | 49 32 876 | 9 23 877 | 3 31 878 | 29 27 879 | 33 48 880 | 32 1 881 | 39 27 882 | 39 50 883 | 7 15 884 | 7 49 885 | 35 21 886 | 21 42 887 | 5 41 888 | 10 22 889 | 5 3 890 | 32 43 891 | 41 30 892 | 45 34 893 | 16 44 894 | 9 42 895 | 7 25 896 | 34 23 897 | 9 3 898 | 18 10 899 | 37 45 900 | 44 30 901 | 33 34 902 | 34 29 903 | 34 5 904 | 4 29 905 | 46 15 906 | 41 8 907 | 11 3 908 | 34 36 909 | 47 48 910 | 34 26 911 | 23 21 912 | 7 31 913 | 1 27 914 | 28 29 915 | 37 19 916 | 6 35 917 | 39 34 918 | 47 25 919 | 17 29 920 | 19 22 921 | 48 22 922 | 46 24 923 | 19 25 924 | 7 36 925 | 19 31 926 | 34 32 927 | 17 47 928 | 20 32 929 | 10 22 930 | 47 19 931 | 3 35 932 | 39 22 933 | 49 41 934 | 1 32 935 | 14 44 936 | 1 42 937 | 41 44 938 | 14 40 939 | 9 15 940 | 6 11 941 | 41 42 942 | 42 34 943 | 9 44 944 | 12 19 945 | 6 42 946 | 43 24 947 | 32 21 948 | 16 39 949 | 23 29 950 | 23 16 951 | 9 25 952 | 46 27 953 | 18 20 954 | 18 17 955 | 40 46 956 | 14 24 957 | 1 9 958 | 1 25 959 | 49 15 960 | 48 21 961 | 28 38 962 | 35 39 963 | 32 33 964 | 9 10 965 | 3 35 966 | 16 50 967 | 4 24 968 | 44 17 969 | 15 31 970 | 34 21 971 | 33 27 972 | 48 32 973 | 47 50 974 | 23 15 975 | 47 26 976 | 37 39 977 | 9 19 978 | 5 25 979 | 13 25 980 | 32 22 981 | 4 10 982 | 3 44 983 | 31 18 984 | 28 19 985 | 11 28 986 | 19 25 987 | 35 30 988 | 23 48 989 | 9 16 990 | 25 8 991 | 14 22 992 | 1 41 993 | 34 3 994 | 44 30 995 | 23 9 996 | 32 2 997 | 1 11 998 | 28 37 999 | 14 38 1000 | 38 42 1001 | 32 42 1002 | 33 25 1003 | 8 24 1004 | 3 4 1005 | 4 39 1006 | 29 25 1007 | 18 20 1008 | 41 47 1009 | 34 15 1010 | 9 4 1011 | 6 22 1012 | 23 48 1013 | 14 48 1014 | 41 49 1015 | 6 32 1016 | 30 50 1017 | 16 45 1018 | 12 36 1019 | 44 50 1020 | 12 16 1021 | 7 48 1022 | 42 8 1023 | 30 42 1024 | 47 43 1025 | 19 30 1026 | 6 26 1027 | 15 24 1028 | 30 15 1029 | 37 4 1030 | 3 38 1031 | 14 26 1032 | 41 11 1033 | 4 42 1034 | 12 32 1035 | 44 20 1036 | 8 24 1037 | 45 29 1038 | 46 25 1039 | 13 50 1040 | 7 19 1041 | 49 30 1042 | 47 4 1043 | 45 19 1044 | 10 48 1045 | 7 48 1046 | 16 27 1047 | 40 21 1048 | 14 47]; 1049 | --------------------------------------------------------------------------------