#include <iostream>
#include <visp3/core/vpConfig.h>
#if defined(VISP_HAVE_REALSENSE2) && defined(VISP_HAVE_OPENCV) && defined(VISP_HAVE_NLOHMANN_JSON)
#include <visp3/core/vpDisplay.h>
#include <visp3/core/vpIoTools.h>
#include <visp3/gui/vpDisplayFactory.h>
#include <visp3/mbt/vpMbGenericTracker.h>
#include <visp3/sensor/vpRealSense2.h>
#include VISP_NLOHMANN_JSON(json.hpp)
using json = nlohmann::json;
int main(int argc, char *argv[])
{
#ifdef ENABLE_VISP_NAMESPACE
#endif
std::string config_file = "";
std::string model = "";
std::string init_file = "";
double proj_error_threshold = 25;
for (int i = 1; i < argc; i++) {
if (std::string(argv[i]) == "--config" && i + 1 < argc) {
config_file = std::string(argv[i + 1]);
}
else if (std::string(argv[i]) == "--model" && i + 1 < argc) {
model = std::string(argv[i + 1]);
}
else if (std::string(argv[i]) == "--init_file" && i + 1 < argc) {
init_file = std::string(argv[i + 1]);
}
else if (std::string(argv[i]) == "--proj_error_threshold" && i + 1 < argc) {
proj_error_threshold = std::atof(argv[i + 1]);
}
else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") {
std::cout << "Usage: \n"
<< argv[0]
<< "--config <settings.json>"
<< " --model <object.cao>"
" --init_file <object.init>"
" [--proj_error_threshold <threshold between 0 and 90> (default: "
<< proj_error_threshold
<< ")]"
<< std::endl;
std::cout << "\n** How to track a 4.2 cm width cube with manual initialization:\n"
<< argv[0] << "--config model/cube/rgbd-tracker.json --model model/cube/cube.cao" << std::endl;
return EXIT_SUCCESS;
}
}
std::cout << "Config files: " << std::endl;
std::cout << " JSON config: "
<< "\"" << config_file << "\"" << std::endl;
std::cout << " Model: "
<< "\"" << model << "\"" << std::endl;
std::cout << " Init file: "
<< "\"" << init_file << "\"" << std::endl;
if (config_file.empty()) {
std::cout << "No JSON configuration was provided!" << std::endl;
return EXIT_FAILURE;
}
int width = 640, height = 480;
int fps = 30;
rs2::config config;
config.enable_stream(RS2_STREAM_COLOR, width, height, RS2_FORMAT_RGBA8, fps);
config.enable_stream(RS2_STREAM_DEPTH, width, height, RS2_FORMAT_Z16, fps);
try {
}
std::cout << "Catch an exception: " << e.what() << std::endl;
std::cout << "Check if the Realsense camera is connected..." << std::endl;
return EXIT_SUCCESS;
}
std::cout << "Sensor internal camera parameters for color camera: " << cam_color << std::endl;
std::cout << "Sensor internal camera parameters for depth camera: " << cam_depth << std::endl;
std::vector<vpColVector> pointcloud;
std::map<std::string, vpHomogeneousMatrix> mapOfCameraTransformations;
std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
std::map<std::string, std::string> mapOfInitFiles;
std::map<std::string, const std::vector<vpColVector> *> mapOfPointclouds;
std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
std::map<std::string, vpCameraParameters> mapOfCameraIntrinsics;
tracker.loadConfigFile(config_file);
if (model.empty() && init_file.empty()) {
std::ifstream config(config_file);
const json j = json::parse(config);
config.close();
if (j.contains("model")) {
model = j["model"];
}
else if (j.at("trackers").begin()->contains("model")) {
model = (*j["trackers"].begin())["model"];
}
else {
std::cerr << "No model was provided in either JSON file or arguments" << std::endl;
return EXIT_FAILURE;
}
}
if (init_file.empty()) {
}
std::string color_key = "", depth_key = "";
for (const auto &tracker_type : tracker.getCameraTrackerTypes()) {
std::cout << "tracker key == " << tracker_type.first << std::endl;
color_key = tracker_type.first;
mapOfImages[color_key] = &I_gray;
mapOfInitFiles[color_key] = init_file;
mapOfWidths[color_key] = width;
mapOfHeights[color_key] = height;
mapOfCameraIntrinsics[color_key] = cam_color;
}
depth_key = tracker_type.first;
mapOfImages[depth_key] = &I_depth;
mapOfWidths[depth_key] = width;
mapOfHeights[depth_key] = height;
mapOfCameraIntrinsics[depth_key] = cam_depth;
mapOfCameraTransformations[depth_key] = depth_M_color;
mapOfPointclouds[depth_key] = &pointcloud;
}
}
const bool use_depth = !depth_key.empty();
const bool use_color = !color_key.empty();
if (tracker.getNbPolygon() == 0) {
tracker.loadModel(model);
}
std::cout << "Updating configuration with parameters provided by RealSense SDK..." << std::endl;
tracker.setCameraParameters(mapOfCameraIntrinsics);
if (use_color && use_depth) {
tracker.setCameraTransformationMatrix(mapOfCameraTransformations);
}
unsigned int _posx = 100, _posy = 50;
#if defined(VISP_HAVE_DISPLAY)
#if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
#else
#endif
if (use_color)
display1->init(I_gray, _posx, _posy, "Color stream");
if (use_depth)
display2->init(I_depth, _posx + I_gray.
getWidth() + 10, _posy,
"Depth stream");
#endif
while (true) {
realsense.
acquire((
unsigned char *)I_color.
bitmap, (
unsigned char *)I_depth_raw.
bitmap,
nullptr,
nullptr);
if (use_color) {
break;
}
}
if (use_depth) {
break;
}
}
}
tracker.setProjectionErrorComputation(true);
tracker.initClick(mapOfImages, mapOfInitFiles, true);
std::vector<double> times_vec;
try {
bool quit = false;
double loop_t = 0;
while (!quit) {
bool tracking_failed = false;
realsense.
acquire((
unsigned char *)I_color.
bitmap, (
unsigned char *)I_depth_raw.
bitmap, &pointcloud,
nullptr,
nullptr);
if (use_color) {
}
if (use_depth) {
}
if (use_color && use_depth) {
mapOfImages[color_key] = &I_gray;
mapOfPointclouds[depth_key] = &pointcloud;
mapOfWidths[depth_key] = width;
mapOfHeights[depth_key] = height;
}
else if (use_color) {
mapOfImages[color_key] = &I_gray;
}
else if (use_depth) {
mapOfPointclouds[depth_key] = &pointcloud;
}
try {
if (use_color && use_depth) {
tracker.track(mapOfImages, mapOfPointclouds, mapOfWidths, mapOfHeights);
}
else if (use_color) {
tracker.track(I_gray);
}
else if (use_depth) {
tracker.track(mapOfImages, mapOfPointclouds, mapOfWidths, mapOfHeights);
}
}
std::cout << "Tracker exception: " << e.getStringMessage() << std::endl;
tracking_failed = true;
}
cMo = tracker.getPose();
double proj_error = 0;
proj_error = tracker.getProjectionError();
}
else {
proj_error = tracker.computeCurrentProjectionError(I_gray, cMo, cam_color);
}
if (proj_error > proj_error_threshold) {
std::cout << "Tracker needs to restart (projection error detected: " << proj_error << ")" << std::endl;
}
if (!tracking_failed) {
tracker.setDisplayFeatures(true);
if (use_color && use_depth) {
tracker.display(I_gray, I_depth, cMo, depth_M_color * cMo, cam_color, cam_depth,
vpColor::red, 3);
}
else if (use_color) {
}
else if (use_depth) {
}
{
std::stringstream ss;
ss << "Nb features: " << tracker.getError().size();
}
{
std::stringstream ss;
ss << "Features: edges " << tracker.getNbFeaturesEdge() << ", klt " << tracker.getNbFeaturesKlt()
<< ", depth dense " << tracker.getNbFeaturesDepthDense() << ", depth normal" << tracker.getNbFeaturesDepthNormal();
}
}
std::stringstream ss;
times_vec.push_back(loop_t);
ss << "Loop time: " << loop_t << " ms";
if (use_color) {
quit = true;
}
}
}
if (use_depth) {
quit = true;
}
}
}
}
std::cout << "Caught an exception: " << e.what() << std::endl;
}
if (!times_vec.empty()) {
<< std::endl;
}
#if defined(VISP_HAVE_DISPLAY) && (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
if (display != nullptr) {
delete display;
}
#endif
return EXIT_SUCCESS;
}
#elif defined(VISP_HAVE_REALSENSE2) && defined(VISP_HAVE_OPENCV)
int main()
{
std::cout << "Install the JSON 3rd party library (Nlohmann JSON), reconfigure VISP and build again" << std::endl;
return EXIT_SUCCESS;
}
#elif defined(VISP_HAVE_REALSENSE2) && defined(VISP_HAVE_NLOHMANN_JSON)
int main()
{
std::cout << "Install OpenCV, reconfigure VISP and build again" << std::endl;
return EXIT_SUCCESS;
}
#else
int main()
{
std::cout << "Install librealsense2 3rd party, configure and build ViSP again to use this example" << std::endl;
return EXIT_SUCCESS;
}
#endif
Generic class defining intrinsic camera parameters.
@ perspectiveProjWithoutDistortion
Perspective projection without distortion model.
static const vpColor none
Class that defines generic functionalities for display.
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayFrame(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, double size, const vpColor &color=vpColor::none, unsigned int thickness=1, const vpImagePoint &offset=vpImagePoint(0, 0), const std::string &frameName="", const vpColor &textColor=vpColor::black, const vpImagePoint &textOffset=vpImagePoint(15, 15))
static void flush(const vpImage< unsigned char > &I)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
Implementation of an homogeneous matrix and operations on such kind of matrices.
static void createDepthHistogram(const vpImage< uint16_t > &src_depth, vpImage< vpRGBa > &dest_rgba)
static void convert(const vpImage< unsigned char > &src, vpImage< vpRGBa > &dest)
Definition of the vpImage class member functions.
unsigned int getWidth() const
Type * bitmap
points toward the bitmap
unsigned int getHeight() const
static double getMedian(const std::vector< double > &v)
static double getStdev(const std::vector< double > &v, bool useBesselCorrection=false)
static double getMean(const std::vector< double > &v)
Real-time 6D object pose tracking using its CAD model.
vpCameraParameters getCameraParameters(const rs2_stream &stream, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithDistortion, int index=-1) const
void acquire(vpImage< unsigned char > &grey, double *ts=nullptr)
bool open(const rs2::config &cfg=rs2::config())
vpHomogeneousMatrix getTransformation(const rs2_stream &from, const rs2_stream &to, int from_index=-1) const
std::shared_ptr< vpDisplay > createDisplay()
Return a smart pointer vpDisplay specialization if a GUI library is available or nullptr otherwise.
vpDisplay * allocateDisplay()
Return a newly allocated vpDisplay specialization if a GUI library is available or nullptr otherwise.
VISP_EXPORT double measureTimeMs()