• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1#!/usr/bin/env python
2
3'''
4Affine invariant feature-based image matching sample.
5
6This sample is similar to find_obj.py, but uses the affine transformation
7space sampling technique, called ASIFT [1]. While the original implementation
8is based on SIFT, you can try to use SURF or ORB detectors instead. Homography RANSAC
9is used to reject outliers. Threading is used for faster affine sampling.
10
11[1] http://www.ipol.im/pub/algo/my_affine_sift/
12
13USAGE
14  asift.py [--feature=<sift|surf|orb|brisk>[-flann]] [ <image1> <image2> ]
15
16  --feature  - Feature to use. Can be sift, surf, orb or brisk. Append '-flann'
17               to feature name to use Flann-based matcher instead bruteforce.
18
19  Press left mouse button on a feature point to see its matching point.
20'''
21
22import numpy as np
23import cv2
24
25# built-in modules
26import itertools as it
27from multiprocessing.pool import ThreadPool
28
29# local modules
30from common import Timer
31from find_obj import init_feature, filter_matches, explore_match
32
33
34def affine_skew(tilt, phi, img, mask=None):
35    '''
36    affine_skew(tilt, phi, img, mask=None) -> skew_img, skew_mask, Ai
37
38    Ai - is an affine transform matrix from skew_img to img
39    '''
40    h, w = img.shape[:2]
41    if mask is None:
42        mask = np.zeros((h, w), np.uint8)
43        mask[:] = 255
44    A = np.float32([[1, 0, 0], [0, 1, 0]])
45    if phi != 0.0:
46        phi = np.deg2rad(phi)
47        s, c = np.sin(phi), np.cos(phi)
48        A = np.float32([[c,-s], [ s, c]])
49        corners = [[0, 0], [w, 0], [w, h], [0, h]]
50        tcorners = np.int32( np.dot(corners, A.T) )
51        x, y, w, h = cv2.boundingRect(tcorners.reshape(1,-1,2))
52        A = np.hstack([A, [[-x], [-y]]])
53        img = cv2.warpAffine(img, A, (w, h), flags=cv2.INTER_LINEAR, borderMode=cv2.BORDER_REPLICATE)
54    if tilt != 1.0:
55        s = 0.8*np.sqrt(tilt*tilt-1)
56        img = cv2.GaussianBlur(img, (0, 0), sigmaX=s, sigmaY=0.01)
57        img = cv2.resize(img, (0, 0), fx=1.0/tilt, fy=1.0, interpolation=cv2.INTER_NEAREST)
58        A[0] /= tilt
59    if phi != 0.0 or tilt != 1.0:
60        h, w = img.shape[:2]
61        mask = cv2.warpAffine(mask, A, (w, h), flags=cv2.INTER_NEAREST)
62    Ai = cv2.invertAffineTransform(A)
63    return img, mask, Ai
64
65
66def affine_detect(detector, img, mask=None, pool=None):
67    '''
68    affine_detect(detector, img, mask=None, pool=None) -> keypoints, descrs
69
70    Apply a set of affine transormations to the image, detect keypoints and
71    reproject them into initial image coordinates.
72    See http://www.ipol.im/pub/algo/my_affine_sift/ for the details.
73
74    ThreadPool object may be passed to speedup the computation.
75    '''
76    params = [(1.0, 0.0)]
77    for t in 2**(0.5*np.arange(1,6)):
78        for phi in np.arange(0, 180, 72.0 / t):
79            params.append((t, phi))
80
81    def f(p):
82        t, phi = p
83        timg, tmask, Ai = affine_skew(t, phi, img)
84        keypoints, descrs = detector.detectAndCompute(timg, tmask)
85        for kp in keypoints:
86            x, y = kp.pt
87            kp.pt = tuple( np.dot(Ai, (x, y, 1)) )
88        if descrs is None:
89            descrs = []
90        return keypoints, descrs
91
92    keypoints, descrs = [], []
93    if pool is None:
94        ires = it.imap(f, params)
95    else:
96        ires = pool.imap(f, params)
97
98    for i, (k, d) in enumerate(ires):
99        print 'affine sampling: %d / %d\r' % (i+1, len(params)),
100        keypoints.extend(k)
101        descrs.extend(d)
102
103    print
104    return keypoints, np.array(descrs)
105
106if __name__ == '__main__':
107    print __doc__
108
109    import sys, getopt
110    opts, args = getopt.getopt(sys.argv[1:], '', ['feature='])
111    opts = dict(opts)
112    feature_name = opts.get('--feature', 'sift-flann')
113    try:
114        fn1, fn2 = args
115    except:
116        fn1 = '../data/aero1.jpg'
117        fn2 = '../data/aero3.jpg'
118
119    img1 = cv2.imread(fn1, 0)
120    img2 = cv2.imread(fn2, 0)
121    detector, matcher = init_feature(feature_name)
122
123    if img1 is None:
124        print 'Failed to load fn1:', fn1
125        sys.exit(1)
126
127    if img2 is None:
128        print 'Failed to load fn2:', fn2
129        sys.exit(1)
130
131    if detector is None:
132        print 'unknown feature:', feature_name
133        sys.exit(1)
134
135    print 'using', feature_name
136
137    pool=ThreadPool(processes = cv2.getNumberOfCPUs())
138    kp1, desc1 = affine_detect(detector, img1, pool=pool)
139    kp2, desc2 = affine_detect(detector, img2, pool=pool)
140    print 'img1 - %d features, img2 - %d features' % (len(kp1), len(kp2))
141
142    def match_and_draw(win):
143        with Timer('matching'):
144            raw_matches = matcher.knnMatch(desc1, trainDescriptors = desc2, k = 2) #2
145        p1, p2, kp_pairs = filter_matches(kp1, kp2, raw_matches)
146        if len(p1) >= 4:
147            H, status = cv2.findHomography(p1, p2, cv2.RANSAC, 5.0)
148            print '%d / %d  inliers/matched' % (np.sum(status), len(status))
149            # do not draw outliers (there will be a lot of them)
150            kp_pairs = [kpp for kpp, flag in zip(kp_pairs, status) if flag]
151        else:
152            H, status = None, None
153            print '%d matches found, not enough for homography estimation' % len(p1)
154
155        vis = explore_match(win, img1, img2, kp_pairs, None, H)
156
157
158    match_and_draw('affine find_obj')
159    cv2.waitKey()
160    cv2.destroyAllWindows()
161