Visual Servoing Platform version 3.7.0
Loading...
Searching...
No Matches
vpRBKltTracker.h
1/*
2 * ViSP, open source Visual Servoing Platform software.
3 * Copyright (C) 2005 - 2024 by Inria. All rights reserved.
4 *
5 * This software is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License as published by
7 * the Free Software Foundation; either version 2 of the License, or
8 * (at your option) any later version.
9 * See the file LICENSE.txt at the root directory of this source
10 * distribution for additional information about the GNU GPL.
11 *
12 * For using ViSP with software that can not be combined with the GNU
13 * GPL, please contact Inria about acquiring a ViSP Professional
14 * Edition License.
15 *
16 * See https://visp.inria.fr for more information.
17 *
18 * This software was developed at:
19 * Inria Rennes - Bretagne Atlantique
20 * Campus Universitaire de Beaulieu
21 * 35042 Rennes Cedex
22 * France
23 *
24 * If you have questions regarding the use of this file, please contact
25 * Inria at visp@inria.fr
26 *
27 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
28 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
29 */
30
35#ifndef VP_RB_KLT_TRACKER_H
36#define VP_RB_KLT_TRACKER_H
37
38#include <visp3/core/vpConfig.h>
39
40#if (defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_HIGHGUI) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO))
41#define VP_HAVE_RB_KLT_TRACKER
42#endif
43
44#if defined(VP_HAVE_RB_KLT_TRACKER)
45
46#include <map>
47
48#include <visp3/rbt/vpRBFeatureTracker.h>
49#include <visp3/core/vpPoint.h>
50#include <visp3/core/vpTrackingException.h>
51#include <visp3/core/vpRobust.h>
52#include <visp3/klt/vpKltOpencv.h>
53
54#include <opencv2/core/mat.hpp>
55
56
58
71class VISP_EXPORT vpRBKltTracker : public vpRBFeatureTracker
72{
73public:
75
76 virtual ~vpRBKltTracker() = default;
77
78 bool requiresRGB() const VP_OVERRIDE { return false; }
79
80 bool requiresDepth() const VP_OVERRIDE { return false; }
81
82 bool requiresSilhouetteCandidates() const VP_OVERRIDE { return false; }
83
84 void onTrackingIterStart(const vpRBFeatureTrackerInput &/*frame*/, const vpHomogeneousMatrix & /*cMo*/) VP_OVERRIDE { }
85
86 void onTrackingIterEnd(const vpHomogeneousMatrix & /*cMo*/) VP_OVERRIDE { }
87
88 void extractFeatures(const vpRBFeatureTrackerInput &frame, const vpRBFeatureTrackerInput &previousFrame, const vpHomogeneousMatrix &cMo) VP_OVERRIDE;
89
90 void trackFeatures(const vpRBFeatureTrackerInput &frame, const vpRBFeatureTrackerInput &previousFrame, const vpHomogeneousMatrix &cMo) VP_OVERRIDE;
91
92 void initVVS(const vpRBFeatureTrackerInput &frame, const vpRBFeatureTrackerInput &previousFrame, const vpHomogeneousMatrix &cMo) VP_OVERRIDE;
93
94 void computeVVSIter(const vpRBFeatureTrackerInput &frame, const vpHomogeneousMatrix &cMo, unsigned int iteration) VP_OVERRIDE;
95
96 void display(const vpCameraParameters &cam, const vpImage<unsigned char> &I, const vpImage<vpRGBa> &IRGB, const vpImage<unsigned char> &depth) const VP_OVERRIDE;
97
102
108 unsigned int getMinimumNumberOfPoints() const { return m_numPointsReinit; }
109 void setMinimumNumberOfPoints(unsigned int points) { m_numPointsReinit = points; }
110
118 double getMinimumDistanceNewPoints() const { return m_newPointsDistanceThreshold; }
119 void setMinimumDistanceNewPoints(double distance) { m_newPointsDistanceThreshold = distance; }
120
125 unsigned int getFilteringBorderSize() const { return m_border; }
126 void setFilteringBorderSize(unsigned int border) { m_border = border; }
127
135 double getFilteringMaxReprojectionError() const { return m_maxErrorOutliersPixels; }
136 void setFilteringMaxReprojectionError(double maxError) { m_maxErrorOutliersPixels = maxError; }
137
143 bool shouldUseMask() const { return m_useMask; }
144 void setShouldUseMask(bool useMask) { m_useMask = useMask; }
145
151 float getMinimumMaskConfidence() const { return m_minMaskConfidence; }
152 void setMinimumMaskConfidence(float confidence)
153 {
154 if (confidence > 1.f || confidence < 0.f) {
155 throw vpException(vpException::badValue, "Mask confidence should be between 0 and 1");
156 }
157 m_minMaskConfidence = confidence;
158 }
159
163 const vpKltOpencv &getKltTracker() const { return m_klt; }
169 vpKltOpencv &getKltTracker() { return m_klt; }
170
171#if defined(VISP_HAVE_NLOHMANN_JSON)
172 virtual void loadJsonConfiguration(const nlohmann::json &j) VP_OVERRIDE
173 {
175
176 m_klt = j;
177
178 setMinimumNumberOfPoints(j.value("minimumNumPoints", m_numPointsReinit));
179 setMinimumDistanceNewPoints(j.value("newPointsMinPixelDistance", m_newPointsDistanceThreshold));
180 setFilteringMaxReprojectionError(j.value("maxReprojectionErrorPixels", m_maxErrorOutliersPixels));
181 setShouldUseMask(j.value("useMask", m_useMask));
182 setMinimumMaskConfidence(j.value("minMaskConfidence", m_minMaskConfidence));
183 }
184#endif
185
191 {
192 public:
197
199 {
200 const vpHomogeneousMatrix cinitTc = cTo0 * oMc;
201 return cinitTc.getThetaUVector().getTheta();
202 }
203
204 inline double normalDotProd(const vpHomogeneousMatrix &cMo)
205 {
206 vpColVector cameraNormal = cMo.getRotationMatrix() * normal;
207 oX.changeFrame(cMo);
208 vpColVector dir({ -oX.get_X(), -oX.get_Y(), -oX.get_Z() });
209 dir.normalize();
210 return dir * cameraNormal;
211 }
212
213 inline void update(const vpHomogeneousMatrix &cMo)
214 {
215 oX.changeFrame(cMo);
216 oX.projection();
217 }
218
219 inline void error(vpColVector &e, unsigned i) const
220 {
221 e[i * 2] = oX.get_x() - currentPos.get_u();
222 e[i * 2 + 1] = oX.get_y() - currentPos.get_v();
223 }
224
225 inline double distance(const vpTrackedKltPoint &other) const
226 {
227 const double d = sqrt(std::pow(oX.get_oX() - other.oX.get_oX(), 2) +
228 std::pow(oX.get_oY() - other.oX.get_oY(), 2) +
229 std::pow(oX.get_oZ() - other.oX.get_oZ(), 2));
230 return d;
231 }
232
233 inline void interaction(vpMatrix &L, unsigned i) const
234 {
235 double x = oX.get_x(), y = oX.get_y();
236 double xy = x * y;
237 double Zinv = 1.0 / oX.get_Z();
238 L[i * 2][0] = -Zinv;
239 L[i * 2][1] = 0.0;
240 L[i * 2][2] = x * Zinv;
241 L[i * 2][3] = xy;
242 L[i * 2][4] = -(1.0 + x * x);
243 L[i * 2][5] = y;
244
245 L[i * 2 + 1][0] = 0.0;
246 L[i * 2 + 1][1] = -Zinv;
247 L[i * 2 + 1][2] = y * Zinv;
248 L[i * 2 + 1][3] = 1.0 + y * y;
249 L[i * 2 + 1][4] = -xy;
250 L[i * 2 + 1][5] = -x;
251 }
252 };
253
254private:
255
256 void tryAddNewPoint(const vpRBFeatureTrackerInput &frame, std::map<long, vpTrackedKltPoint> &points,
257 long id, const float u, const float v, const vpHomogeneousMatrix &cMo, const vpHomogeneousMatrix &oMc);
258
259 vpRobust m_robust;
260
261 cv::Mat m_I, m_Iprev;
262 vpKltOpencv m_klt;
263
264 unsigned int m_numPointsReinit;
265 double m_newPointsDistanceThreshold;
266 unsigned int m_border;
267
268 double m_maxErrorOutliersPixels;
269
270 std::map<long, vpTrackedKltPoint> m_points;
271
272 bool m_useMask;
273 float m_minMaskConfidence;
274};
275
276END_VISP_NAMESPACE
277
278#endif
279#endif
Generic class defining intrinsic camera parameters.
Implementation of column vector and the associated operations.
vpColVector & normalize()
error that can be emitted by ViSP classes.
Definition vpException.h:60
@ badValue
Used to indicate that a value is not in the allowed range.
Definition vpException.h:73
Implementation of an homogeneous matrix and operations on such kind of matrices.
vpThetaUVector getThetaUVector() const
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Definition of the vpImage class member functions.
Definition vpImage.h:131
Wrapper for the KLT (Kanade-Lucas-Tomasi) feature tracker implemented in OpenCV. Thus to enable this ...
Definition vpKltOpencv.h:83
Implementation of a matrix and operations on matrices.
Definition vpMatrix.h:175
Class that defines a 3D point in the object frame and allows forward projection of a 3D point in the ...
Definition vpPoint.h:79
double get_oX() const
Get the point oX coordinate in the object frame.
Definition vpPoint.cpp:418
double get_oZ() const
Get the point oZ coordinate in the object frame.
Definition vpPoint.cpp:422
double get_oY() const
Get the point oY coordinate in the object frame.
Definition vpPoint.cpp:420
All the data related to a single tracking frame. This contains both the input data (from a real camer...
virtual void loadJsonConfiguration(const nlohmann::json &j)
bool shouldUseMask() const
Returns whether the tracking algorithm should filter out points that are unlikely to be on the object...
void extractFeatures(const vpRBFeatureTrackerInput &frame, const vpRBFeatureTrackerInput &previousFrame, const vpHomogeneousMatrix &cMo) VP_OVERRIDE
Extract features from the frame data and the current pose estimate.
vpKltOpencv & getKltTracker()
Get the underlying KLT tracker. Use this to modify its settings.
void setMinimumMaskConfidence(float confidence)
float getMinimumMaskConfidence() const
Returns the minimum mask confidence that a pixel should have if it should be kept during tracking.
void onTrackingIterEnd(const vpHomogeneousMatrix &) VP_OVERRIDE
Method called after the tracking iteration has finished.
void setFilteringBorderSize(unsigned int border)
virtual ~vpRBKltTracker()=default
double getMinimumDistanceNewPoints() const
Get the minimum distance that a candidate point should have to every other tracked point if it should...
double getFilteringMaxReprojectionError() const
Get the maximum reprojection error, in pixels, for a point to be considered as outlier....
const vpKltOpencv & getKltTracker() const
Get the underlying KLT tracker. Use this to read its settings.
bool requiresRGB() const VP_OVERRIDE
Whether this tracker requires RGB image to extract features.
void onTrackingIterStart(const vpRBFeatureTrackerInput &, const vpHomogeneousMatrix &) VP_OVERRIDE
Method called when starting a tracking iteration.
void setMinimumDistanceNewPoints(double distance)
unsigned int getFilteringBorderSize() const
Return the number of pixels in the image border where points should not be tracked....
bool requiresDepth() const VP_OVERRIDE
Whether this tracker requires depth image to extract features.
void setFilteringMaxReprojectionError(double maxError)
void trackFeatures(const vpRBFeatureTrackerInput &frame, const vpRBFeatureTrackerInput &previousFrame, const vpHomogeneousMatrix &cMo) VP_OVERRIDE
Track the features.
void setShouldUseMask(bool useMask)
unsigned int getMinimumNumberOfPoints() const
Get the minimum acceptable number of points that should be tracked. If KLT tracking has less than thi...
void display(const vpCameraParameters &cam, const vpImage< unsigned char > &I, const vpImage< vpRGBa > &IRGB, const vpImage< unsigned char > &depth) const VP_OVERRIDE
virtual void loadJsonConfiguration(const nlohmann::json &j) VP_OVERRIDE
bool requiresSilhouetteCandidates() const VP_OVERRIDE
Whether this tracker requires Silhouette candidates.
void initVVS(const vpRBFeatureTrackerInput &frame, const vpRBFeatureTrackerInput &previousFrame, const vpHomogeneousMatrix &cMo) VP_OVERRIDE
void computeVVSIter(const vpRBFeatureTrackerInput &frame, const vpHomogeneousMatrix &cMo, unsigned int iteration) VP_OVERRIDE
void setMinimumNumberOfPoints(unsigned int points)
Contains an M-estimator and various influence function.
Definition vpRobust.h:84
double getTheta() const
void interaction(vpMatrix &L, unsigned i) const
vpPoint oX
Initial pose of the object in the camera frame, acquired when the tracked point was first constructed...
double distance(const vpTrackedKltPoint &other) const
double rotationDifferenceToInitial(const vpHomogeneousMatrix &oMc)
Current image coordinates, in normalized image coordinates.
void update(const vpHomogeneousMatrix &cMo)
vpImagePoint currentPos
Surface normal at this point, in the object frame.
void error(vpColVector &e, unsigned i) const
vpColVector normal
Tracked 3D point.
double normalDotProd(const vpHomogeneousMatrix &cMo)