-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathRCD.R
160 lines (126 loc) · 4.02 KB
/
RCD.R
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
# This .R code file consists of:
# 1. RCDM (Algorithm 1): Coordinate Descent method with randomized / cyclic rules and fixed step size
# for solving quadratic form objective function
# 2. Gradient Descent as baseline model
# Arthurs: STA 243 Final Project Group Members:
# Han Chen, Ninghui Li, Chenghan Sun
#####Randomized Coordinate Descent Method #####
RCDM = function(A, b, xs, xk = NULL, cr = NULL, iter_k = NULL,
alpha=0.001, tol=10^-2, maxIter=10^7, rule="random") {
# Params:
# A the input matrix, b vector, xs the true parameters
# xk: initial value of the optimization problem
# iter_k: variants index of RCD method
# alpha: fixed step size
# tol: tolerance, maxIter: maximum iteration
# rule: variants rule for iter_k, random or cyclic
# columns of A
n = ncol(A)
# set k as the counter
k = 1
# denote cr = norm(xk-xs)/norm(xs) as the a criterion
cr = c(1)
# initialize xk as the estimates' vector
if (is.null(xk)){
xk = zeros(n, 1)
}
# Define the objective function
quadratic_obj = function(y, yhat){
fun_val = 0.5*norm((y - yhat), "2")^2
return(fun_val)
}
# initialize objective function vector
fstar = quadratic_obj(A%*%xs, b) # true function value
fx = c(quadratic_obj(A%*%xk, b) - fstar)
error = c() # initialize error vector
if (rule == "random") {
iter_k = sample(ncol(A), 1)
} else if (rule == "cyclic") {
iter_k = 1
} else {
print(paste("Need to specify variants of CD method."))
break
}
# main loop
while (fx[k] >= tol) {
# update the gradient
u = A%*%xk - b
A1 = t(A)
gd_k = A1[iter_k, ]%*%u
# update xk
xk[iter_k] = xk[iter_k] - alpha*gd_k
# update criterion cr
cr[k+1] = norm(xk-xs, "2") / norm(xs, "2")
#if (mod(k, 1000) == 0) {
# print(c(paste("step", k), paste("error", cr[k+1])))
#}
# update k
k = k+1
# update estimated function value and error
fx = c(fx, quadratic_obj(A%*%xk, b) - fstar)
error = c(error, norm((xk - xs), "2"))
# update iter_k based on random / cyclic rules
if (rule == "cyclic") {
iter_k = mod(iter_k, n) + 1
} else if (rule == "random") {
iter_k = sample(ncol(A), 1)
}
# set algorithm iter bound
if (k > maxIter) {
print(paste("Algorithm unfinished by reaching the maximum iterations."))
break
}
}
return(list(k = k, cr = cr, error = error, fx = fx))
}
#### Gradient Descent ####
GD = function(A, b, xs, xk = NULL, cr = NULL, alpha=0.001, tol=10^-2, maxIter=10^7) {
# Params:
# A the input matrix, b vector, xs the true parameters
# xk: initial value of the optimization problem
# alpha: fixed step size
# tol: tolerance, maxIter: maximum iteration
# columns of A
n = ncol(A)
# set k as the counter
k = 1
# denote cr = norm(xk-xs)/norm(xs) as the a criterion
cr = c(1)
# initialize xk as the estimates' vector
if (is.null(xk)){
xk = zeros(n, 1)
}
# Define the objective function
quadratic_obj = function(y, yhat){
fun_val = 0.5*norm((y - yhat), "2")^2
return(fun_val)
}
# initialize objective function vector
fstar = quadratic_obj(A%*%xs, b) # true function value
fx = c(quadratic_obj(A%*%xk, b) - fstar)
error = c() # initialize error vector
# main loop
while ( fx[k] >= tol) {
# update the gradient
u = A%*%xk - b
gd_k = t(A)%*%u
# update xk
xk = xk - alpha * gd_k
# update criterion cr
cr[k+1] = norm(xk-xs, "2") / norm(xs, "2")
# if (mod(k, 1000) == 0) {
# print(c(paste("step", k), paste("error", cr[k+1])))
# }
# update k
k = k+1
# update estimated function value and error
fx = c(fx, quadratic_obj(A%*%xk, b) - fstar)
error = c(error, norm((xk - xs), "2"))
# set algorithm iter bound
if (k > maxIter) {
print(paste("Algorithm unfinished by reaching the maximum iterations."))
break
}
}
return(list(k = k, cr = cr, error = error, fx = fx))
}