-
Notifications
You must be signed in to change notification settings - Fork 106
/
Copy pathdeconvolution_1d.py
61 lines (44 loc) · 1.56 KB
/
deconvolution_1d.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
"""Example of a deconvolution problem with different solvers (CPU)."""
import numpy as np
import matplotlib.pyplot as plt
import scipy.signal
import odl
class Convolution(odl.Operator):
def __init__(self, kernel, adjkernel=None):
self.kernel = kernel
self.adjkernel = (adjkernel if adjkernel is not None
else kernel.space.element(kernel[::-1].copy()))
self.norm = float(np.sum(np.abs(self.kernel)))
super(Convolution, self).__init__(
domain=kernel.space, range=kernel.space, linear=True)
def _call(self, x):
return scipy.signal.convolve(x, self.kernel, mode='same')
@property
def adjoint(self):
return Convolution(self.adjkernel, self.kernel)
def opnorm(self):
return self.norm
# Discretization
discr_space = odl.uniform_discr(0, 10, 500, impl='numpy')
# Complicated functions to check performance
kernel = discr_space.element(lambda x: np.exp(x / 2) * np.cos(x * 1.172))
phantom = discr_space.element(lambda x: x ** 2 * np.sin(x) ** 2 * (x > 5))
# Create operator
conv = Convolution(kernel)
# Dampening parameter for landweber
iterations = 100
omega = 1 / conv.opnorm() ** 2
# Display callback
def callback(x):
plt.plot(conv(x))
# Test CGN
plt.figure()
plt.plot(phantom)
odl.solvers.conjugate_gradient_normal(conv, discr_space.zero(), phantom,
iterations, callback)
# Landweber
plt.figure()
plt.plot(phantom)
odl.solvers.landweber(conv, discr_space.zero(), phantom,
iterations, omega, callback)
plt.show()