Visual Servoing Platform version 3.7.0
Loading...
Searching...
No Matches
XFeatBackend.py
35from typing import List, Optional, Tuple
36import numpy as np
37import time
38import sys
39import os
40from pathlib import Path
41import torch
42import torch.nn.functional as F
43
44from visp.core import ImageRGBa
45
47 def __init__(self, kps, descriptors):
48 assert len(kps) == 0 or isinstance(kps, np.ndarray) and len(kps.shape) == 2 and kps.shape[1] == 2
49 assert len(kps) == len(descriptors)
50 self.keypoints: np.ndarray = kps
51 self.descriptors: torch.Tensor = descriptors
52
53 def split(self, i1, i2) -> Tuple['XFeatRepresentation', 'XFeatRepresentation']:
54 r1, r2 = None, None
55 if len(i1) > 0:
56 kps = self.keypoints[i1]
57 descriptors = self.descriptors[i1]
58 r1 = XFeatRepresentation(kps, descriptors)
59 if len(i2) > 0:
60 kps = self.keypoints[i2]
61 descriptors = self.descriptors[i2]
62 r2 = XFeatRepresentation(kps, descriptors)
63
64 return r1, r2
65
66 def copy(self) -> 'XFeatRepresentation':
67 return XFeatRepresentation(self.keypoints.copy(), self.descriptors.clone())
68
69 def merged_with(self, r: 'XFeatRepresentation') -> 'XFeatRepresentation':
70 keypoints = np.concatenate((self.keypoints, r.keypoints), axis=0)
71 descriptors = torch.concatenate((self.descriptors, r.descriptors), dim=0)
72 return XFeatRepresentation(keypoints, descriptors)
73
75 def __init__(self, kps, descriptors, scales):
76 super().__init__(kps, descriptors)
77 self.repr = {
78 'keypoints': kps,
79 'descriptors': descriptors,
80 'scale': scales
81 }
82 self.scales = scales
83
84 def split(self, i1, i2):
85 repr1 = {k: v[i1] for k, v in self.repr.items()}
86 repr2 = {k: v[i2] for k, v in self.repr.items()}
87 return (XFeatStarRepresentation(*(r[k] for k in ['keypoints', 'descriptors', 'scale'])) for r in (repr1, repr2))
88
89 def add(self, r: 'XFeatStarRepresentation'):
90 raise RuntimeError('Not implemented')
91
92
94 def __init__(self, num_points: int, min_cos: float, use_dense=False, scale_factor = 1.0):
95 try:
96 self.xfeat = torch.hub.load('verlab/accelerated_features', 'XFeat', pretrained = True, top_k = 4096).eval()
97 except:
98 print('Could not load XFeat from torchhub', file=sys.stderr)
99 xfeat_env_var_name = 'XFEAT_PATH'
100 print(f'Looking at {xfeat_env_var_name} environment variable to load XFeat!', file=sys.stderr)
101
102 if not xfeat_env_var_name in os.environ:
103 raise EnvironmentError('you should set the value of the environment variable XFEAT_PATH to the folder containing the xfeat sources')
104
105 xfeat_path = Path(os.environ[xfeat_env_var_name]).absolute()
106 if not xfeat_path.exists():
107 raise EnvironmentError(f'XFeat folder {str(xfeat_path)} does not exist')
108
109 sys.path.append(str(xfeat_path))
110 from modules.xfeat import XFeat
111 self.xfeat = XFeat().eval()
112
113 self.k = num_points
114 self.min_cos = min_cos
115 self.use_dense = use_dense
116 self.scale_factor = scale_factor
117
118 def load_settings(self, d: dict):
119 self.k = d['numPoints']
120 self.min_cos = d['minCos']
121 self.scale_factor = d.get('scaleFactor', 1.0)
122 self.use_dense = d.get('useDense', self.use_dense)
123
124 def refine_dense(self, feats1, feats2, matches, fine_conf=0.0):
125 idx0, idx1 = matches[0]
126
127 #Compute fine offsets
128 offsets = self.xfeat.net.fine_matcher(torch.cat([feats1[idx0], feats2[idx1]],dim=-1))
129 conf = F.softmax(offsets*3, dim=-1).max(dim=-1)[0]
130
131 mask_good = conf > fine_conf
132
133 return idx0[mask_good], idx1[mask_good]
134
135 def match_dense(self, r1: torch.Tensor, descriptors: torch.Tensor, min_cossim=None):
136 if min_cossim is None:
137 min_cossim = self.min_cos
138 with torch.no_grad():
139 idxs_list = self.xfeat.batch_match(r1[None], descriptors[None], min_cossim)
140 indices_1, indices_2 = self.refine_dense(r1, descriptors, matches=idxs_list)
141 return indices_1, indices_2
142
143 def match(self, r1: torch.Tensor, descriptors: torch.Tensor, min_cos=None): # Return indices of matched points
144 if r1 is None:
145 return [], []
146 if min_cos is None:
147 min_cos = self.min_cos
148
149 if len(r1.size()) == 3:
150 assert r1.size(0) == 1
151 r1 = r1[0]
152 if len(descriptors.size()) == 3:
153 assert descriptors.size(0) == 1
154 descriptors = descriptors[0]
155
156 if not self.use_dense:
157 return self.xfeat.match(r1, descriptors, min_cossim=min_cos)
158 else:
159 return self.match_dense(r1, descriptors, min_cos)
160
161 def computeDense(self, image: ImageRGBa, top_k = None):
162 with torch.no_grad():
163 if top_k is None:
164 top_k = self.k
165 rgb = self.xfeat.parse_input(image.numpy())
166 if self.scale_factor != 1:
167 t1 = time.time()
168 rgb = torch.nn.functional.interpolate(rgb, scale_factor=self.scale_factor, mode='nearest', antialias=True, align_corners=True)
169 print('Interp took: ', (time.time() - t1) * 1000)
170
171 t1 = time.time()
172 representation = self.xfeat.detectAndComputeDense(rgb, top_k = top_k, multiscale=True)
173
174 kps = representation['keypoints'][0]
175 if self.scale_factor != 1:
176 kps /= self.scale_factor
177 return kps.cpu().numpy(), representation['descriptors'][0], representation['scales'][0].cpu().numpy()
178
179 def detect(self, IRGB) -> XFeatRepresentation:
180 if self.use_dense:
181 return XFeatStarRepresentation(*self.computeDense(IRGB, self.k))
182 else:
183 rgb = torch.from_numpy(IRGB.numpy()[None, ..., :3]).to(self.xfeat.dev).permute((0, 3, 1, 2)) / 255.0
184
185 if self.scale_factor != 1:
186 rgb = torch.nn.functional.interpolate(rgb, scale_factor=self.scale_factor, mode='nearest')
187 rep = None
188 try:
189 rep = self.xfeat.detectAndCompute(rgb, top_k = self.k)[0]
190 except:
191 rep = {
192 'keypoints': torch.empty((0,)),
193 'descriptors': torch.empty((0,))
194 }
195 kps = rep['keypoints'].cpu().numpy()
196 if self.scale_factor != 1:
197 kps /= self.scale_factor
198 #print('full took: ', (time.time() - detect_start) * 1000)
199 return XFeatRepresentation(kps, rep['descriptors'])
refine_dense(self, feats1, feats2, matches, fine_conf=0.0)
match_dense(self, torch.Tensor r1, torch.Tensor descriptors, min_cossim=None)
computeDense(self, ImageRGBa image, top_k=None)
match(self, torch.Tensor r1, torch.Tensor descriptors, min_cos=None)
__init__(self, int num_points, float min_cos, use_dense=False, scale_factor=1.0)
'XFeatRepresentation' merged_with(self, 'XFeatRepresentation' r)
Tuple[ 'XFeatRepresentation', 'XFeatRepresentation'] split(self, i1, i2)