scikit-image
343 строки · 11.7 Кб
1import numpy as np
2from scipy.ndimage import maximum_filter, minimum_filter, convolve
3
4from ..transform import integral_image
5from .corner import structure_tensor
6from ..morphology import octagon, star
7from .censure_cy import _censure_dob_loop
8from ..feature.util import (
9FeatureDetector,
10_prepare_grayscale_input_2D,
11_mask_border_keypoints,
12)
13from .._shared.utils import check_nD
14
15# The paper(Reference [1]) mentions the sizes of the Octagon shaped filter
16# kernel for the first seven scales only. The sizes of the later scales
17# have been extrapolated based on the following statement in the paper.
18# "These octagons scale linearly and were experimentally chosen to correspond
19# to the seven DOBs described in the previous section."
20OCTAGON_OUTER_SHAPE = [
21(5, 2),
22(5, 3),
23(7, 3),
24(9, 4),
25(9, 7),
26(13, 7),
27(15, 10),
28(15, 11),
29(15, 12),
30(17, 13),
31(17, 14),
32]
33OCTAGON_INNER_SHAPE = [
34(3, 0),
35(3, 1),
36(3, 2),
37(5, 2),
38(5, 3),
39(5, 4),
40(5, 5),
41(7, 5),
42(7, 6),
43(9, 6),
44(9, 7),
45]
46
47# The sizes for the STAR shaped filter kernel for different scales have been
48# taken from the OpenCV implementation.
49STAR_SHAPE = [1, 2, 3, 4, 6, 8, 11, 12, 16, 22, 23, 32, 45, 46, 64, 90, 128]
50STAR_FILTER_SHAPE = [
51(1, 0),
52(3, 1),
53(4, 2),
54(5, 3),
55(7, 4),
56(8, 5),
57(9, 6),
58(11, 8),
59(13, 10),
60(14, 11),
61(15, 12),
62(16, 14),
63]
64
65
66def _filter_image(image, min_scale, max_scale, mode):
67response = np.zeros(
68(image.shape[0], image.shape[1], max_scale - min_scale + 1), dtype=np.float64
69)
70
71if mode == 'dob':
72# make response[:, :, i] contiguous memory block
73item_size = response.itemsize
74response.strides = (
75item_size * response.shape[1],
76item_size,
77item_size * response.shape[0] * response.shape[1],
78)
79
80integral_img = integral_image(image)
81
82for i in range(max_scale - min_scale + 1):
83n = min_scale + i
84
85# Constant multipliers for the outer region and the inner region
86# of the bi-level filters with the constraint of keeping the
87# DC bias 0.
88inner_weight = 1.0 / (2 * n + 1) ** 2
89outer_weight = 1.0 / (12 * n**2 + 4 * n)
90
91_censure_dob_loop(
92n, integral_img, response[:, :, i], inner_weight, outer_weight
93)
94
95# NOTE : For the Octagon shaped filter, we implemented and evaluated the
96# slanted integral image based image filtering but the performance was
97# more or less equal to image filtering using
98# scipy.ndimage.filters.convolve(). Hence we have decided to use the
99# later for a much cleaner implementation.
100elif mode == 'octagon':
101# TODO : Decide the shapes of Octagon filters for scales > 7
102
103for i in range(max_scale - min_scale + 1):
104mo, no = OCTAGON_OUTER_SHAPE[min_scale + i - 1]
105mi, ni = OCTAGON_INNER_SHAPE[min_scale + i - 1]
106response[:, :, i] = convolve(image, _octagon_kernel(mo, no, mi, ni))
107
108elif mode == 'star':
109for i in range(max_scale - min_scale + 1):
110m = STAR_SHAPE[STAR_FILTER_SHAPE[min_scale + i - 1][0]]
111n = STAR_SHAPE[STAR_FILTER_SHAPE[min_scale + i - 1][1]]
112response[:, :, i] = convolve(image, _star_kernel(m, n))
113
114return response
115
116
117def _octagon_kernel(mo, no, mi, ni):
118outer = (mo + 2 * no) ** 2 - 2 * no * (no + 1)
119inner = (mi + 2 * ni) ** 2 - 2 * ni * (ni + 1)
120outer_weight = 1.0 / (outer - inner)
121inner_weight = 1.0 / inner
122c = ((mo + 2 * no) - (mi + 2 * ni)) // 2
123outer_oct = octagon(mo, no)
124inner_oct = np.zeros((mo + 2 * no, mo + 2 * no))
125inner_oct[c:-c, c:-c] = octagon(mi, ni)
126bfilter = outer_weight * outer_oct - (outer_weight + inner_weight) * inner_oct
127return bfilter
128
129
130def _star_kernel(m, n):
131c = m + m // 2 - n - n // 2
132outer_star = star(m)
133inner_star = np.zeros_like(outer_star)
134inner_star[c:-c, c:-c] = star(n)
135outer_weight = 1.0 / (np.sum(outer_star - inner_star))
136inner_weight = 1.0 / np.sum(inner_star)
137bfilter = outer_weight * outer_star - (outer_weight + inner_weight) * inner_star
138return bfilter
139
140
141def _suppress_lines(feature_mask, image, sigma, line_threshold):
142Arr, Arc, Acc = structure_tensor(image, sigma, order='rc')
143feature_mask[(Arr + Acc) ** 2 > line_threshold * (Arr * Acc - Arc**2)] = False
144
145
146class CENSURE(FeatureDetector):
147"""CENSURE keypoint detector.
148
149min_scale : int, optional
150Minimum scale to extract keypoints from.
151max_scale : int, optional
152Maximum scale to extract keypoints from. The keypoints will be
153extracted from all the scales except the first and the last i.e.
154from the scales in the range [min_scale + 1, max_scale - 1]. The filter
155sizes for different scales is such that the two adjacent scales
156comprise of an octave.
157mode : {'DoB', 'Octagon', 'STAR'}, optional
158Type of bi-level filter used to get the scales of the input image.
159Possible values are 'DoB', 'Octagon' and 'STAR'. The three modes
160represent the shape of the bi-level filters i.e. box(square), octagon
161and star respectively. For instance, a bi-level octagon filter consists
162of a smaller inner octagon and a larger outer octagon with the filter
163weights being uniformly negative in both the inner octagon while
164uniformly positive in the difference region. Use STAR and Octagon for
165better features and DoB for better performance.
166non_max_threshold : float, optional
167Threshold value used to suppress maximas and minimas with a weak
168magnitude response obtained after Non-Maximal Suppression.
169line_threshold : float, optional
170Threshold for rejecting interest points which have ratio of principal
171curvatures greater than this value.
172
173Attributes
174----------
175keypoints : (N, 2) array
176Keypoint coordinates as ``(row, col)``.
177scales : (N,) array
178Corresponding scales.
179
180References
181----------
182.. [1] Motilal Agrawal, Kurt Konolige and Morten Rufus Blas
183"CENSURE: Center Surround Extremas for Realtime Feature
184Detection and Matching",
185https://link.springer.com/chapter/10.1007/978-3-540-88693-8_8
186:DOI:`10.1007/978-3-540-88693-8_8`
187
188.. [2] Adam Schmidt, Marek Kraft, Michal Fularz and Zuzanna Domagala
189"Comparative Assessment of Point Feature Detectors and
190Descriptors in the Context of Robot Navigation"
191http://yadda.icm.edu.pl/yadda/element/bwmeta1.element.baztech-268aaf28-0faf-4872-a4df-7e2e61cb364c/c/Schmidt_comparative.pdf
192:DOI:`10.1.1.465.1117`
193
194Examples
195--------
196>>> from skimage.data import astronaut
197>>> from skimage.color import rgb2gray
198>>> from skimage.feature import CENSURE
199>>> img = rgb2gray(astronaut()[100:300, 100:300])
200>>> censure = CENSURE()
201>>> censure.detect(img)
202>>> censure.keypoints
203array([[ 4, 148],
204[ 12, 73],
205[ 21, 176],
206[ 91, 22],
207[ 93, 56],
208[ 94, 22],
209[ 95, 54],
210[100, 51],
211[103, 51],
212[106, 67],
213[108, 15],
214[117, 20],
215[122, 60],
216[125, 37],
217[129, 37],
218[133, 76],
219[145, 44],
220[146, 94],
221[150, 114],
222[153, 33],
223[154, 156],
224[155, 151],
225[184, 63]])
226>>> censure.scales
227array([2, 6, 6, 2, 4, 3, 2, 3, 2, 6, 3, 2, 2, 3, 2, 2, 2, 3, 2, 2, 4, 2,
2282])
229
230"""
231
232def __init__(
233self,
234min_scale=1,
235max_scale=7,
236mode='DoB',
237non_max_threshold=0.15,
238line_threshold=10,
239):
240mode = mode.lower()
241if mode not in ('dob', 'octagon', 'star'):
242raise ValueError("`mode` must be one of 'DoB', 'Octagon', 'STAR'.")
243
244if min_scale < 1 or max_scale < 1 or max_scale - min_scale < 2:
245raise ValueError(
246'The scales must be >= 1 and the number of ' 'scales should be >= 3.'
247)
248
249self.min_scale = min_scale
250self.max_scale = max_scale
251self.mode = mode
252self.non_max_threshold = non_max_threshold
253self.line_threshold = line_threshold
254
255self.keypoints = None
256self.scales = None
257
258def detect(self, image):
259"""Detect CENSURE keypoints along with the corresponding scale.
260
261Parameters
262----------
263image : 2D ndarray
264Input image.
265
266"""
267
268# (1) First we generate the required scales on the input grayscale
269# image using a bi-level filter and stack them up in `filter_response`.
270
271# (2) We then perform Non-Maximal suppression in 3 x 3 x 3 window on
272# the filter_response to suppress points that are neither minima or
273# maxima in 3 x 3 x 3 neighborhood. We obtain a boolean ndarray
274# `feature_mask` containing all the minimas and maximas in
275# `filter_response` as True.
276# (3) Then we suppress all the points in the `feature_mask` for which
277# the corresponding point in the image at a particular scale has the
278# ratio of principal curvatures greater than `line_threshold`.
279# (4) Finally, we remove the border keypoints and return the keypoints
280# along with its corresponding scale.
281
282check_nD(image, 2)
283
284num_scales = self.max_scale - self.min_scale
285
286image = np.ascontiguousarray(_prepare_grayscale_input_2D(image))
287
288# Generating all the scales
289filter_response = _filter_image(
290image, self.min_scale, self.max_scale, self.mode
291)
292
293# Suppressing points that are neither minima or maxima in their
294# 3 x 3 x 3 neighborhood to zero
295minimas = minimum_filter(filter_response, (3, 3, 3)) == filter_response
296maximas = maximum_filter(filter_response, (3, 3, 3)) == filter_response
297
298feature_mask = minimas | maximas
299feature_mask[filter_response < self.non_max_threshold] = False
300
301for i in range(1, num_scales):
302# sigma = (window_size - 1) / 6.0, so the window covers > 99% of
303# the kernel's distribution
304# window_size = 7 + 2 * (min_scale - 1 + i)
305# Hence sigma = 1 + (min_scale - 1 + i)/ 3.0
306_suppress_lines(
307feature_mask[:, :, i],
308image,
309(1 + (self.min_scale + i - 1) / 3.0),
310self.line_threshold,
311)
312
313rows, cols, scales = np.nonzero(feature_mask[..., 1:num_scales])
314keypoints = np.column_stack([rows, cols])
315scales = scales + self.min_scale + 1
316
317if self.mode == 'dob':
318self.keypoints = keypoints
319self.scales = scales
320return
321
322cumulative_mask = np.zeros(keypoints.shape[0], dtype=bool)
323
324if self.mode == 'octagon':
325for i in range(self.min_scale + 1, self.max_scale):
326c = (OCTAGON_OUTER_SHAPE[i - 1][0] - 1) // 2 + OCTAGON_OUTER_SHAPE[
327i - 1
328][1]
329cumulative_mask |= _mask_border_keypoints(image.shape, keypoints, c) & (
330scales == i
331)
332elif self.mode == 'star':
333for i in range(self.min_scale + 1, self.max_scale):
334c = (
335STAR_SHAPE[STAR_FILTER_SHAPE[i - 1][0]]
336+ STAR_SHAPE[STAR_FILTER_SHAPE[i - 1][0]] // 2
337)
338cumulative_mask |= _mask_border_keypoints(image.shape, keypoints, c) & (
339scales == i
340)
341
342self.keypoints = keypoints[cumulative_mask]
343self.scales = scales[cumulative_mask]
344