scikit-image

Форк
0
/
benchmark_morphology.py 
290 строк · 10.9 Кб
1
"""Benchmarks for `skimage.morphology`.
2

3
See "Writing benchmarks" in the asv docs for more information.
4
"""
5

6
import numpy as np
7
from numpy.lib import NumpyVersion as Version
8
import scipy.ndimage
9

10
import skimage
11
from skimage import color, data, morphology, util
12

13

14
class Skeletonize3d:
15
    def setup(self, *args):
16
        try:
17
            # use a separate skeletonize_3d function on older scikit-image
18
            if Version(skimage.__version__) < Version('0.16.0'):
19
                self.skeletonize = morphology.skeletonize_3d
20
            else:
21
                self.skeletonize = morphology.skeletonize
22
        except AttributeError:
23
            raise NotImplementedError("3d skeletonize unavailable")
24

25
        # we stack the horse data 5 times to get an example volume
26
        self.image = np.stack(5 * [util.invert(data.horse())])
27

28
    def time_skeletonize(self):
29
        self.skeletonize(self.image)
30

31
    def peakmem_reference(self, *args):
32
        """Provide reference for memory measurement with empty benchmark.
33

34
        Peakmem benchmarks measure the maximum amount of RAM used by a
35
        function. However, this maximum also includes the memory used
36
        during the setup routine (as of asv 0.2.1; see [1]_).
37
        Measuring an empty peakmem function might allow us to disambiguate
38
        between the memory used by setup and the memory used by target (see
39
        other ``peakmem_`` functions below).
40

41
        References
42
        ----------
43
        .. [1]: https://asv.readthedocs.io/en/stable/writing_benchmarks.html#peak-memory
44
        """
45
        pass
46

47
    def peakmem_skeletonize(self):
48
        self.skeletonize(self.image)
49

50

51
# For binary morphology all functions ultimately are based on a single erosion
52
# function in the scipy.ndimage C code, so only benchmark binary_erosion here.
53

54

55
class BinaryMorphology2D:
56
    # skip rectangle as roughly equivalent to square
57
    param_names = ["shape", "footprint", "radius", "decomposition"]
58
    params = [
59
        ((512, 512),),
60
        ("square", "diamond", "octagon", "disk", "ellipse", "star"),
61
        (1, 3, 5, 15, 25, 40),
62
        (None, "sequence", "separable", "crosses"),
63
    ]
64

65
    def setup(self, shape, footprint, radius, decomposition):
66
        rng = np.random.default_rng(123)
67
        # Make an image that is mostly True, with random isolated False areas
68
        # (so it will not become fully False for any of the footprints).
69
        self.image = rng.standard_normal(shape) < 3.5
70
        fp_func = getattr(morphology, footprint)
71
        allow_sequence = ("rectangle", "square", "diamond", "octagon", "disk")
72
        allow_separable = ("rectangle", "square")
73
        allow_crosses = ("disk", "ellipse")
74
        allow_decomp = tuple(
75
            set(allow_sequence) | set(allow_separable) | set(allow_crosses)
76
        )
77
        footprint_kwargs = {}
78
        if decomposition == "sequence" and footprint not in allow_sequence:
79
            raise NotImplementedError("decomposition unimplemented")
80
        elif decomposition == "separable" and footprint not in allow_separable:
81
            raise NotImplementedError("separable decomposition unavailable")
82
        elif decomposition == "crosses" and footprint not in allow_crosses:
83
            raise NotImplementedError("separable decomposition unavailable")
84
        if footprint in allow_decomp:
85
            footprint_kwargs["decomposition"] = decomposition
86
        if footprint in ["rectangle", "square"]:
87
            size = 2 * radius + 1
88
            self.footprint = fp_func(size, **footprint_kwargs)
89
        elif footprint in ["diamond", "disk"]:
90
            self.footprint = fp_func(radius, **footprint_kwargs)
91
        elif footprint == "star":
92
            # set a so bounding box size is approximately 2*radius + 1
93
            # size will be 2*a + 1 + 2*floor(a / 2)
94
            a = max((2 * radius) // 3, 1)
95
            self.footprint = fp_func(a, **footprint_kwargs)
96
        elif footprint == "octagon":
97
            # overall size is m + 2 * n
98
            # so choose m = n so that overall size is ~ 2*radius + 1
99
            m = n = max((2 * radius) // 3, 1)
100
            self.footprint = fp_func(m, n, **footprint_kwargs)
101
        elif footprint == "ellipse":
102
            if radius > 1:
103
                # make somewhat elliptical
104
                self.footprint = fp_func(radius - 1, radius + 1, **footprint_kwargs)
105
            else:
106
                self.footprint = fp_func(radius, radius, **footprint_kwargs)
107

108
    def time_erosion(self, shape, footprint, radius, *args):
109
        morphology.binary_erosion(self.image, self.footprint)
110

111

112
class BinaryMorphology3D:
113
    # skip rectangle as roughly equivalent to square
114
    param_names = ["shape", "footprint", "radius", "decomposition"]
115
    params = [
116
        ((128, 128, 128),),
117
        ("ball", "cube", "octahedron"),
118
        (1, 3, 5, 10),
119
        (None, "sequence", "separable"),
120
    ]
121

122
    def setup(self, shape, footprint, radius, decomposition):
123
        rng = np.random.default_rng(123)
124
        # make an image that is mostly True, with a few isolated False areas
125
        self.image = rng.standard_normal(shape) > -3
126
        fp_func = getattr(morphology, footprint)
127
        allow_decomp = ("cube", "octahedron", "ball")
128
        allow_separable = ("cube",)
129
        if decomposition == "separable" and footprint != "cube":
130
            raise NotImplementedError("separable unavailable")
131
        footprint_kwargs = {}
132
        if decomposition is not None and footprint not in allow_decomp:
133
            raise NotImplementedError("decomposition unimplemented")
134
        elif decomposition == "separable" and footprint not in allow_separable:
135
            raise NotImplementedError("separable decomposition unavailable")
136
        if footprint in allow_decomp:
137
            footprint_kwargs["decomposition"] = decomposition
138
        if footprint == "cube":
139
            size = 2 * radius + 1
140
            self.footprint = fp_func(size, **footprint_kwargs)
141
        elif footprint in ["ball", "octahedron"]:
142
            self.footprint = fp_func(radius, **footprint_kwargs)
143

144
    def time_erosion(self, shape, footprint, radius, *args):
145
        morphology.binary_erosion(self.image, self.footprint)
146

147

148
class IsotropicMorphology2D:
149
    # skip rectangle as roughly equivalent to square
150
    param_names = ["shape", "radius"]
151
    params = [
152
        ((512, 512),),
153
        (1, 3, 5, 15, 25, 40),
154
    ]
155

156
    def setup(self, shape, radius):
157
        rng = np.random.default_rng(123)
158
        # Make an image that is mostly True, with random isolated False areas
159
        # (so it will not become fully False for any of the footprints).
160
        self.image = rng.standard_normal(shape) < 3.5
161

162
    def time_erosion(self, shape, radius, *args):
163
        morphology.isotropic_erosion(self.image, radius)
164

165

166
# Repeat the same footprint tests for grayscale morphology
167
# just need to call morphology.erosion instead of morphology.binary_erosion
168

169

170
class GrayMorphology2D(BinaryMorphology2D):
171
    def time_erosion(self, shape, footprint, radius, *args):
172
        morphology.erosion(self.image, self.footprint)
173

174

175
class GrayMorphology3D(BinaryMorphology3D):
176
    def time_erosion(self, shape, footprint, radius, *args):
177
        morphology.erosion(self.image, self.footprint)
178

179

180
class GrayReconstruction:
181
    # skip rectangle as roughly equivalent to square
182
    param_names = ["shape", "dtype"]
183
    params = [
184
        ((10, 10), (64, 64), (1200, 1200), (96, 96, 96)),
185
        (np.uint8, np.float32, np.float64),
186
    ]
187

188
    def setup(self, shape, dtype):
189
        rng = np.random.default_rng(123)
190
        # make an image that is mostly True, with a few isolated False areas
191
        rvals = rng.integers(1, 255, size=shape).astype(dtype=dtype)
192

193
        roi1 = tuple(slice(s // 4, s // 2) for s in rvals.shape)
194
        roi2 = tuple(slice(s // 2 + 1, (3 * s) // 4) for s in rvals.shape)
195
        seed = np.full(rvals.shape, 1, dtype=dtype)
196
        seed[roi1] = rvals[roi1]
197
        seed[roi2] = rvals[roi2]
198

199
        # create a mask with a couple of square regions set to seed maximum
200
        mask = np.full(seed.shape, 1, dtype=dtype)
201
        mask[roi1] = 255
202
        mask[roi2] = 255
203

204
        self.seed = seed
205
        self.mask = mask
206

207
    def time_reconstruction(self, shape, dtype):
208
        morphology.reconstruction(self.seed, self.mask)
209

210
    def peakmem_reference(self, *args):
211
        """Provide reference for memory measurement with empty benchmark.
212

213
        Peakmem benchmarks measure the maximum amount of RAM used by a
214
        function. However, this maximum also includes the memory used
215
        during the setup routine (as of asv 0.2.1; see [1]_).
216
        Measuring an empty peakmem function might allow us to disambiguate
217
        between the memory used by setup and the memory used by target (see
218
        other ``peakmem_`` functions below).
219

220
        References
221
        ----------
222
        .. [1]: https://asv.readthedocs.io/en/stable/writing_benchmarks.html#peak-memory  # noqa
223
        """
224
        pass
225

226
    def peakmem_reconstruction(self, shape, dtype):
227
        morphology.reconstruction(self.seed, self.mask)
228

229

230
class LocalMaxima:
231
    param_names = ["connectivity", "allow_borders"]
232
    params = [(1, 2), (False, True)]
233

234
    def setup(self, *args):
235
        # Natural image with small extrema
236
        self.image = data.moon()
237

238
    def time_2d(self, connectivity, allow_borders):
239
        morphology.local_maxima(
240
            self.image, connectivity=connectivity, allow_borders=allow_borders
241
        )
242

243
    def peakmem_reference(self, *args):
244
        """Provide reference for memory measurement with empty benchmark.
245

246
        .. [1] https://asv.readthedocs.io/en/stable/writing_benchmarks.html#peak-memory
247
        """
248
        pass
249

250
    def peakmem_2d(self, connectivity, allow_borders):
251
        morphology.local_maxima(
252
            self.image, connectivity=connectivity, allow_borders=allow_borders
253
        )
254

255

256
class RemoveObjectsByDistance:
257

258
    param_names = ["min_distance"]
259
    params = [5, 100]
260

261
    def setup(self, *args):
262
        image = data.hubble_deep_field()
263
        image = color.rgb2gray(image)
264
        objects = image > 0.18  # Chosen with threshold_li
265
        self.labels, _ = scipy.ndimage.label(objects)
266

267
    def time_remove_near_objects(self, min_distance):
268
        morphology.remove_objects_by_distance(self.labels, min_distance=min_distance)
269

270
    def peakmem_reference(self, *args):
271
        """Provide reference for memory measurement with empty benchmark.
272

273
        Peakmem benchmarks measure the maximum amount of RAM used by a
274
        function. However, this maximum also includes the memory used
275
        during the setup routine (as of asv 0.2.1; see [1]_).
276
        Measuring an empty peakmem function might allow us to disambiguate
277
        between the memory used by setup and the memory used by target (see
278
        other ``peakmem_`` functions below).
279

280
        References
281
        ----------
282
        .. [1]: https://asv.readthedocs.io/en/stable/writing_benchmarks.html#peak-memory
283
        """
284
        pass
285

286
    def peakmem_remove_near_objects(self, min_distance):
287
        morphology.remove_objects_by_distance(
288
            self.labels,
289
            min_distance=min_distance,
290
        )
291

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.