Skip to content

Commit 5b4fb28

Browse files
committed
0.5 release
1 parent 460ad89 commit 5b4fb28

4 files changed

Lines changed: 22 additions & 28 deletions

File tree

copt/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
__version__ = '0.4.0-dev'
1+
__version__ = '0.5.0'
22

33
from .proxgrad import minimize_PGD, minimize_APGD
44
from .splitting import minimize_TOS, minimize_PDHG

copt/datasets.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -294,7 +294,7 @@ def load_kdd12(md5_check=True, verbose=0):
294294
if not os.path.exists(DATA_DIR):
295295
os.makedirs(DATA_DIR)
296296
if not os.path.exists(file_path):
297-
print('URL dataset is not present in data folder. Downloading it ...')
297+
print('KDD12 dataset is not present in data folder. Downloading it ...')
298298
url = 'https://www.csie.ntu.edu.tw/~cjlin/libsvmtools/datasets/binary/kdd12.bz2'
299299
urllib.request.urlretrieve(url, file_path)
300300
print('Finished downloading')

copt/frank_wolfe.py

Lines changed: 13 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -8,25 +8,6 @@
88
from . import utils
99

1010

11-
def _backtrack(
12-
f_t, f_grad, x_t, d_t, g_t, L_t,
13-
gamma_max=1, ratio_increase=2., ratio_decrease=0.999,
14-
max_iter=100):
15-
# could be included inside minimize_FW
16-
d2_t = splinalg.norm(d_t) ** 2
17-
for i in range(max_iter):
18-
step_size = min(g_t / (d2_t * L_t), gamma_max)
19-
rhs = f_t - step_size * g_t + 0.5 * (step_size**2) * L_t * d2_t
20-
f_next, grad_next = f_grad(x_t + step_size * d_t)
21-
if f_next <= rhs:
22-
if i == 0:
23-
L_t *= ratio_decrease
24-
break
25-
else:
26-
L_t *= ratio_increase
27-
return step_size, L_t, f_next, grad_next
28-
29-
3011
def minimize_FW(
3112
f_grad, lmo, x0, L=None, max_iter=1000, tol=1e-12,
3213
backtracking=True, callback=None, verbose=0):
@@ -55,11 +36,21 @@ def minimize_FW(
5536
g_t = g_t[0]
5637
if g_t <= tol:
5738
break
39+
d2_t = splinalg.norm(d_t) ** 2
5840
if backtracking:
59-
step_size, L_t, f_next, grad_next = _backtrack(
60-
f_t, f_grad, x_t, d_t, g_t, L_t)
41+
ratio_decrease = 0.999
42+
ratio_increase = 2
43+
for i in range(max_iter):
44+
step_size = min(g_t / (d2_t * L_t), 1)
45+
rhs = f_t - step_size * g_t + 0.5 * (step_size**2) * L_t * d2_t
46+
f_next, grad_next = f_grad(x_t + step_size * d_t)
47+
if f_next <= rhs + 1e-6:
48+
if i == 0:
49+
L_t *= ratio_decrease
50+
break
51+
else:
52+
L_t *= ratio_increase
6153
else:
62-
d2_t = splinalg.norm(d_t) ** 2
6354
step_size = min(g_t / (d2_t * L_t), 1)
6455
f_next, grad_next = f_grad(x_t + step_size * d_t)
6556
x_t += step_size * d_t

copt/utils.py

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -32,13 +32,15 @@ def __call__(self, x):
3232
def init_lipschitz(f_grad, x0):
3333
L0 = 1e-3
3434
f0, grad0 = f_grad(x0)
35-
if sparse.issparse(grad0):
35+
if sparse.issparse(grad0) and not sparse.issparse(x0):
3636
x0 = sparse.csc_matrix(x0).T
37-
elif sparse.issparse(x0):
37+
elif sparse.issparse(x0) and not sparse.issparse(grad0):
3838
grad0 = sparse.csc_matrix(grad0).T
3939
x_tilde = x0 - (1./L0)*grad0
4040
f_tilde = f_grad(x_tilde)[0]
41-
while f_tilde > f0:
41+
for _ in range(100):
42+
if f_tilde <= f0:
43+
break
4244
L0 *= 10
4345
x_tilde = x0 - (1./L0)*grad0
4446
f_tilde = f_grad(x_tilde)[0]
@@ -182,6 +184,7 @@ def lipschitz(self):
182184
s = splinalg.svds(self.A, k=1, return_singular_vectors=False)[0]
183185
return (s * s) / self.A.shape[0] + self.alpha
184186

187+
185188
class HuberLoss:
186189
"""Huber loss"""
187190
def __init__(self, A, b, alpha=0, delta=1):
@@ -205,7 +208,7 @@ def f_grad(self, x, return_gradient=True):
205208
grad = self.A[idx].T.dot(z[idx]) / self.A.shape[0] + self.alpha * x.T
206209
grad = np.asarray(grad)
207210
grad += self.A[~idx].T.dot(self.delta * np.sign(z[~idx]))/ self.A.shape[0]
208-
return loss, grad
211+
return loss, np.asarray(grad).ravel()
209212

210213
def lipschitz(self):
211214
s = splinalg.svds(self.A, k=1, return_singular_vectors=False)[0]

0 commit comments

Comments
 (0)