-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathDGD_DIMINISHING.m
46 lines (36 loc) · 1.6 KB
/
DGD_DIMINISHING.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
function [xminuxbar, sq_grad,time] = DGD_DIMINISHING(W_aug, x, n,N,gc,lambda,aalpha, features, labels,bs, iter_num)
fprintf('Starting DGD_DIMINISHING\n');
sq_grad = zeros(iter_num, 1);
xminuxbar = zeros(iter_num, 1);
time = zeros(iter_num, 1);
upd = textprogressbar(iter_num);
eta1 = 4;%[1-4]
for t = 2:iter_num
tic;
upd(t);
eta = eta1 / sqrt(t+24);
gradient = zeros(N*n,1);
for ii = 1 : N
for jj=(ii-1)*bs+1:ii*bs
gradient((ii-1)*n+1:ii*n) = gradient((ii-1)*n+1:ii*n) + gc(x((ii-1)*n+1:ii*n,t-1),lambda,aalpha, features(:,jj), labels(jj),bs, N); % This is compute the gradient in each node, batch_size works here.
end
end
x(:, t) = W_aug* (x(:, t-1) - eta * gradient) ;
xs = reshape(x(:,t), [n, N]);
x_avg = sum(xs, 2) / N;
for k = 1:N
xminuxbar(t) = xminuxbar(t)+(norm(xs(:, k)-x_avg))^2;
% fminufstar(t) = fminufstar(t)+loss_func(x_avg, y{k}, a_Re{k}, a_Im{k});
end
temp_grad = zeros(N*n,1);
for ii = 1 : N
for jj=(ii-1)*bs+1:ii*bs
temp_grad((ii-1)*n+1:ii*n) = temp_grad((ii-1)*n+1:ii*n) + gc(x_avg,lambda,aalpha, features(:,jj), labels(jj),bs, N); % This is compute the gradient in each node, batch_size works here.
end
end
g = reshape(temp_grad, [n, N]);
sq_grad(t) = sum(sum(g, 2).^2);
t_temp = toc;
time(t) = time(t - 1) + t_temp;
end
end