-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathconjugategradient.py
More file actions
71 lines (60 loc) · 2.42 KB
/
conjugategradient.py
File metadata and controls
71 lines (60 loc) · 2.42 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
from multilevelmethod import *
from math import sqrt
from scipy.sparse import diags
def PCG(graph, b=0, x0=0, smoother=fgs, c_factor=2, max_iter=1000,
epsilon=1e-6):
"""
Preconditioned Conjugate Gradient Method
:param graph: The weighted or unweighted vertex-vertex adjacency matrix
in COO format (or any scipy sparse format, although testing should be
done to confirm)
:param x0: initial guess. if none given, random vector of correct size will
be made and used
:param b: known right hand side of equation Ax = b, if none given,
random vector of correct size will be used
:param smoother: smoother method from iterativemethods.py to use as M.
:param c_factor: coarsening factor to determine when to stop coarsening.
It is the ratio of the number of original vertices divided by the number
in the resulting coarse graph).
:param max_iter: maximum number of iterations
:param epsilon: tolerance level for how small the residual norm needs to
:return: x (the approximate solution), curr_iter (number of iterations used),
delta_0 (norm of initial residual), and delta (norm of final residual)
"""
# going to just assume the special laplacian was passed in for now
A = graph.copy()
# if no guess was given, make a vector of random values for initial
# "guess" x0, for solving Ax0 = b
if x0 == 0:
x0 = np.random.rand(A.shape[1])
x = x0
# compute residual
r = b - A @ x
# precondition
# !this(in else statement) computes r again so may be inefficient but i need r
# in this algorithm and I am not sure if it's worth it to return from
# smoother
d=r_hat=0 #just have to initialize the variables outside of if/else...
if smoother == 'diag':
d = diags(A.diagonal())
r_hat = spsolve(d,b)
else:
r_hat = smoother(A, b, x, max_iter=1)[0]
delta = delta0 = r_hat.dot(r)
p = r_hat
curr_iter = 0
while delta > epsilon * delta0 and curr_iter < max_iter:
g = A @ p
alpha = delta / (p.dot(g))
x = x + alpha * p
r = r - alpha * g
if smoother == 'diag':
r_hat = spsolve(d, r)
else:
r_hat = smoother(A, r, x, max_iter=1)[0]
delta_old = delta
delta = r_hat.dot(r)
beta = delta/delta_old
p = r_hat + beta * p
curr_iter += 1
return x, curr_iter, delta0, delta