Question: Please fill the parts with @: clearvars clc addpath('../Generation') addpath('../Basic_blocks') addpath('../Algorithms') % Loading scenarios % =========================== scenario=2; [data_class set_up]=scenarios_classification(scenario); % Definition of the problem %===================================

Please fill the parts with "@":

clearvars clc addpath('../Generation') addpath('../Basic_blocks') addpath('../Algorithms')

% Loading scenarios % =========================== scenario=2; [data_class set_up]=scenarios_classification(scenario);

% Definition of the problem %=================================== loss_logistic_L2 = @(N,U,x,y,lambda) (1/N*sum(log(1+exp(-diag(y)*(U*x))))+lambda/2*x'*x); grad_logistic_L2 = @(N,U,x,y,lambda) (1/N*(sum((diag(-y)*U)./kron(1+exp(diag(y)*(U*x)),ones(1,length(x)))))'+lambda*x); hess_logistic_L2 = @calculation_Hessian_logistic; grad_logistic_proximal = @(N,U,x,y,ro,a) (1/N*(sum((diag(-y)*U)./kron(1+exp(diag(y)*(U*x)),ones(1,length(x)))))'+ro*(x-a));

% Solution of the empirical risk using CVX %========================================= x_L2_cvx=solver_cvx(set_up,@(N,A,x,y,lambda) loss_logistic_L2(N,A,x,y,lambda)); loss_opt=loss_logistic_L2(set_up.Niter_train,set_up.Utrain(:,1:set_up.M+1),x_L2_cvx,set_up.ytrain(:,1),set_up.Lambda);

% Gradient descent out_gd =grad_FOM(set_up,@(N,A,x,y,lambda) grad_logistic_L2(N,A,x,y,lambda)); loss_grad=eval_loss(out_gd,set_up,@(N,A,x,y,lambda) loss_logistic_L2(N,A,x,y,lambda));

% Newton algorithm out_hess =grad_SOM(set_up,@(N,A,x,y,lambda) grad_logistic_L2(N,A,x,y,lambda),@(N,A,x,y,lambda) hess_logistic_L2(N,A,x,y,lambda)); loss_hess=eval_loss(out_hess,set_up,@(N,A,x,y,lambda) loss_logistic_L2(N,A,x,y,lambda));

% BFGS algorithm out_BFGS =BFGS(set_up,@(N,A,x,y,lambda) grad_logistic_L2(N,A,x,y,lambda)); loss_BFGS=eval_loss(out_BFGS,set_up,@(N,A,x,y,lambda) loss_logistic_L2(N,A,x,y,lambda));

% Plot of learning curves figure(1) plot(1:set_up.Niter_train,10*log10(sum((loss_grad-loss_opt*ones(1,set_up.Niter_train)).^2,1)),'b','LineWidth',3), hold plot(1:set_up.Niter_train,10*log10(sum((loss_hess-loss_opt*ones(1,set_up.Niter_train)).^2,1)),'r','LineWidth',3), plot(1:set_up.Niter_train,10*log10(sum((loss_BFGS-loss_opt*ones(1,set_up.Niter_train)).^2,1)),'g','LineWidth',3), hold off legend('Gradient','Hess','BFGS'),grid xlabel('Iterations') ylabel('MSE') title('Logistic-L2. Different implementations')

% Let's make a zoom % Plot of learning curves figure(2) show=30; plot(1:show,10*log10(sum((loss_grad(1,show)-loss_opt*ones(1,show)).^2,1)),'b','LineWidth',3), hold plot(1:show,10*log10(sum((loss_hess(1:show)-loss_opt*ones(1,show)).^2,1)),'r','LineWidth',3), plot(1:show,10*log10((loss_BFGS(1:show)-loss_opt*ones(1,show)).^2),'g','LineWidth',3), hold off grid xlabel('Iterations') ylabel('MSE') title('Logistic Algorithm (Zoom)') legend('Gradient','Newton','BFGS')

Step by Step Solution

There are 3 Steps involved in it

1 Expert Approved Answer
Step: 1 Unlock blur-text-image
Question Has Been Solved by an Expert!

Get step-by-step solutions from verified subject matter experts

Step: 2 Unlock
Step: 3 Unlock

Students Have Also Explored These Related Databases Questions!