44 #include <visp3/core/vpConfig.h> 46 #if defined(VISP_HAVE_MODULE_MBT) && defined(VISP_HAVE_DISPLAY) 48 #include <visp3/core/vpDebug.h> 49 #include <visp3/core/vpHomogeneousMatrix.h> 50 #include <visp3/core/vpIoTools.h> 51 #include <visp3/core/vpMath.h> 52 #include <visp3/gui/vpDisplayD3D.h> 53 #include <visp3/gui/vpDisplayGDI.h> 54 #include <visp3/gui/vpDisplayGTK.h> 55 #include <visp3/gui/vpDisplayOpenCV.h> 56 #include <visp3/gui/vpDisplayX.h> 57 #include <visp3/io/vpImageIo.h> 58 #include <visp3/io/vpParseArgv.h> 59 #include <visp3/io/vpVideoReader.h> 60 #include <visp3/mbt/vpMbGenericTracker.h> 62 #define GETOPTARGS "x:X:m:M:i:n:dchfolwvpt:T:e:" 65 #define USE_SMALL_DATASET 1 // small depth dataset in ViSP-images 69 #if defined(__GLIBC__) 71 #if (__BYTE_ORDER == __LITTLE_ENDIAN) 72 #define VISP_LITTLE_ENDIAN 73 #elif (__BYTE_ORDER == __BIG_ENDIAN) 74 #define VISP_BIG_ENDIAN 75 #elif (__BYTE_ORDER == __PDP_ENDIAN) 77 #define VISP_PDP_ENDIAN 79 #error Unknown machine endianness detected. 81 #elif defined(_BIG_ENDIAN) && !defined(_LITTLE_ENDIAN) || defined(__BIG_ENDIAN__) && !defined(__LITTLE_ENDIAN__) 82 #define VISP_BIG_ENDIAN 83 #elif defined(_LITTLE_ENDIAN) && !defined(_BIG_ENDIAN) || defined(__LITTLE_ENDIAN__) && !defined(__BIG_ENDIAN__) 84 #define VISP_LITTLE_ENDIAN 85 #elif defined(__sparc) || defined(__sparc__) || defined(_POWER) || defined(__powerpc__) || defined(__ppc__) || \ 86 defined(__hpux) || defined(_MIPSEB) || defined(_POWER) || defined(__s390__) 88 #define VISP_BIG_ENDIAN 89 #elif defined(__i386__) || defined(__alpha__) || defined(__ia64) || defined(__ia64__) || defined(_M_IX86) || \ 90 defined(_M_IA64) || defined(_M_ALPHA) || defined(__amd64) || defined(__amd64__) || defined(_M_AMD64) || \ 91 defined(__x86_64) || defined(__x86_64__) || defined(_M_X64) 93 #define VISP_LITTLE_ENDIAN 95 #error Cannot detect host machine endianness. 100 #ifdef VISP_BIG_ENDIAN 103 uint16_t swap16bits(
const uint16_t val) {
return (((val >> 8) & 0x00FF) | ((val << 8) & 0xFF00)); }
107 uint32_t swap32bits(
const uint32_t val)
109 return (((val >> 24) & 0x000000FF) | ((val >> 8) & 0x0000FF00) | ((val << 8) & 0x00FF0000) |
110 ((val << 24) & 0xFF000000));
115 void readBinaryUIntLE(std::ifstream &file,
unsigned int &uint_value)
118 file.read((
char *)(&uint_value),
sizeof(uint_value));
120 #ifdef VISP_BIG_ENDIAN 122 if (
sizeof(uint_value) == 4) {
123 uint_value = (
unsigned int)swap32bits((uint32_t)uint_value);
125 uint_value = swap16bits((uint16_t)uint_value);
131 void readBinaryUIntLE(std::ifstream &file, uint16_t &uint_value)
134 file.read((
char *)(&uint_value),
sizeof(uint_value));
136 #ifdef VISP_BIG_ENDIAN 138 uint_value = swap16bits((uint16_t)uint_value);
142 void usage(
const char *name,
const char *badparam)
145 Example of tracking with vpGenericTracker.\n\ 148 %s [-i <test image path>] [-x <config file>] [-X <config file depth>]\n\ 149 [-m <model name>] [-M <model name depth>] [-n <initialisation file base name>]\n\ 150 [-f] [-c] [-d] [-h] [-o] [-w] [-l] [-v] [-p]\n\ 151 [-t <tracker type>] [-T <tracker type>] [-e <last frame index>]\n", name);
155 -i <input image path> \n\ 156 Set image input path.\n\ 157 These images come from ViSP-images-x.y.z.tar.gz available \n\ 158 on the ViSP website.\n\ 159 Setting the VISP_INPUT_IMAGE_PATH environment\n\ 160 variable produces the same behavior than using\n\ 164 Set the config file (the xml file) to use.\n\ 165 The config file is used to specify the parameters of the tracker.\n\ 168 Set the config file (the xml file) to use for the depth sensor.\n\ 169 The config file is used to specify the parameters of the tracker.\n\ 172 Specify the name of the file of the model.\n\ 173 The model can either be a vrml model (.wrl) or a .cao file.\n\ 176 Specify the name of the file of the model for the depth sensor.\n\ 177 The model can either be a vrml model (.wrl) or a .cao file.\n\ 179 -n <initialisation file base name> \n\ 180 Base name of the initialisation file. The file will be 'base_name'.init .\n\ 181 This base name is also used for the optional picture specifying where to \n\ 182 click (a .ppm picture).\n\ 185 Turn off the display of the the moving edges and Klt points. \n\ 188 Turn off the display.\n\ 191 Disable the mouse click. Useful to automate the \n\ 192 execution of this program without human intervention.\n\ 195 Use Ogre3D for visibility tests\n\ 198 When Ogre3D is enable [-o] show Ogre3D configuration dialog that allows to set the renderer.\n\ 201 Use the scanline for visibility tests.\n\ 204 Compute covariance matrix.\n\ 207 Compute gradient projection error.\n\ 210 Set tracker type (<1 (Edge)>, <2 (KLT)>, <3 (both)>) for color sensor.\n\ 213 Set tracker type (<4 (Depth normal)>, <8 (Depth dense)>, <12 (both)>) for depth sensor.\n\ 215 -e <last frame index>\n\ 216 Specify the index of the last frame. Once reached, the tracking is stopped.\n\ 219 Print the help.\n\n");
222 fprintf(stdout,
"\nERROR: Bad parameter [%s]\n", badparam);
225 bool getOptions(
int argc,
const char **argv, std::string &ipath, std::string &configFile, std::string &configFile_depth,
226 std::string &modelFile, std::string &modelFile_depth, std::string &initFile,
bool &displayFeatures,
227 bool &click_allowed,
bool &display,
bool &useOgre,
bool &showOgreConfigDialog,
bool &useScanline,
228 bool &computeCovariance,
bool &projectionError,
int &trackerType,
int &tracker_type_depth,
240 configFile = optarg_;
243 configFile_depth = optarg_;
249 modelFile_depth = optarg_;
255 displayFeatures =
false;
258 click_allowed =
false;
270 showOgreConfigDialog =
true;
273 computeCovariance =
true;
276 projectionError =
true;
279 trackerType = atoi(optarg_);
282 tracker_type_depth = atoi(optarg_);
285 lastFrame = atoi(optarg_);
288 usage(argv[0], NULL);
293 usage(argv[0], optarg_);
299 if ((c == 1) || (c == -1)) {
301 usage(argv[0], NULL);
302 std::cerr <<
"ERROR: " << std::endl;
303 std::cerr <<
" Bad argument " << optarg_ << std::endl << std::endl;
310 struct rs_intrinsics {
322 void rs_deproject_pixel_to_point(
float point[3],
const rs_intrinsics &intrin,
const float pixel[2],
float depth)
324 float x = (pixel[0] - intrin.ppx) / intrin.fx;
325 float y = (pixel[1] - intrin.ppy) / intrin.fy;
327 float r2 = x * x + y * y;
328 float f = 1 + intrin.coeffs[0] * r2 + intrin.coeffs[1] * r2 * r2 + intrin.coeffs[4] * r2 * r2 * r2;
329 float ux = x * f + 2 * intrin.coeffs[2] * x * y + intrin.coeffs[3] * (r2 + 2 * x * x);
330 float uy = y * f + 2 * intrin.coeffs[3] * x * y + intrin.coeffs[2] * (r2 + 2 * y * y);
335 point[0] = depth * x;
336 point[1] = depth * y;
341 vpImage<uint16_t> &I_depth_raw, std::vector<vpColVector> &pointcloud,
unsigned int &pointcloud_width,
342 unsigned int &pointcloud_height)
347 std::stringstream ss;
348 ss << input_directory <<
"/image_%04d.pgm";
349 sprintf(buffer, ss.str().c_str(), cpt);
350 std::string filename_image = buffer;
353 std::cerr <<
"Cannot read: " << filename_image << std::endl;
360 ss << input_directory <<
"/depth_image_%04d.bin";
361 sprintf(buffer, ss.str().c_str(), cpt);
362 std::string filename_depth = buffer;
364 std::ifstream file_depth(filename_depth.c_str(), std::ios::in | std::ios::binary);
365 if (!file_depth.is_open()) {
369 unsigned int height = 0, width = 0;
370 readBinaryUIntLE(file_depth, height);
371 readBinaryUIntLE(file_depth, width);
373 I_depth_raw.
resize(height, width);
375 uint16_t depth_value = 0;
376 for (
unsigned int i = 0; i < height; i++) {
377 for (
unsigned int j = 0; j < width; j++) {
378 readBinaryUIntLE(file_depth, depth_value);
379 I_depth_raw[i][j] = depth_value;
384 pointcloud_width = width;
385 pointcloud_height = height;
386 pointcloud.
resize((
size_t)width * height);
389 const float depth_scale = 0.000124986647f;
390 rs_intrinsics depth_intrinsic;
391 depth_intrinsic.ppx = 311.484558f;
392 depth_intrinsic.ppy = 246.283234f;
393 depth_intrinsic.fx = 476.053619f;
394 depth_intrinsic.fy = 476.053497f;
395 depth_intrinsic.coeffs[0] = 0.165056542f;
396 depth_intrinsic.coeffs[1] = -0.0508309528f;
397 depth_intrinsic.coeffs[2] = 0.00435937941f;
398 depth_intrinsic.coeffs[3] = 0.00541406544f;
399 depth_intrinsic.coeffs[4] = 0.250085592f;
401 for (
unsigned int i = 0; i < height; i++) {
402 for (
unsigned int j = 0; j < width; j++) {
403 float scaled_depth = I_depth_raw[i][j] * depth_scale;
405 float pixel[2] = {(float)j, (
float)i};
406 rs_deproject_pixel_to_point(point, depth_intrinsic, pixel, scaled_depth);
409 data_3D[0] = point[0];
410 data_3D[1] = point[1];
411 data_3D[2] = point[2];
413 pointcloud[(size_t)(i * width + j)] = data_3D;
422 #
if defined(VISP_HAVE_XML2) && USE_XML
427 #
if defined(VISP_HAVE_XML2) && USE_XML
432 #if defined(VISP_HAVE_XML2) && USE_XML 434 dynamic_cast<vpMbGenericTracker *
>(tracker)->loadConfigFile(configFile, configFile_depth);
448 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100)) 465 dynamic_cast<vpMbGenericTracker *
>(tracker)->setDepthNormalPclPlaneEstimationMethod(2);
466 dynamic_cast<vpMbGenericTracker *
>(tracker)->setDepthNormalPclPlaneEstimationRansacMaxIter(200);
467 dynamic_cast<vpMbGenericTracker *
>(tracker)->setDepthNormalPclPlaneEstimationRansacThreshold(0.001);
492 int main(
int argc,
const char **argv)
497 vpMbtTukeyEstimator<double> tukey_estimator;
498 std::vector<double> residues;
499 residues.push_back(0.5);
500 residues.push_back(0.1);
501 residues.push_back(0.15);
502 residues.push_back(0.14);
503 residues.push_back(0.12);
504 std::vector<double> weights(5, 1);
506 tukey_estimator.MEstimator(residues, weights, 1e-3);
508 for (
size_t i = 0; i < weights.size(); i++) {
509 std::cout <<
"residues[" << i <<
"]=" << residues[i] <<
" ; weights[i" << i <<
"]=" << weights[i] << std::endl;
511 std::cout << std::endl;
515 vpMbtTukeyEstimator<float> tukey_estimator;
516 std::vector<float> residues;
517 residues.push_back(0.5f);
518 residues.push_back(0.1f);
519 residues.push_back(0.15f);
520 residues.push_back(0.14f);
521 residues.push_back(0.12f);
522 std::vector<float> weights(5, 1);
524 tukey_estimator.MEstimator(residues, weights, (
float)1e-3);
526 for (
size_t i = 0; i < weights.size(); i++) {
527 std::cout <<
"residues[" << i <<
"]=" << residues[i] <<
" ; weights[i" << i <<
"]=" << weights[i] << std::endl;
529 std::cout << std::endl;
534 std::string env_ipath;
535 std::string opt_ipath;
537 std::string opt_configFile;
538 std::string opt_configFile_depth;
539 std::string opt_modelFile;
540 std::string opt_modelFile_depth;
541 std::string opt_initFile;
542 std::string initFile;
543 bool displayFeatures =
true;
544 bool opt_click_allowed =
true;
545 bool opt_display =
true;
546 bool useOgre =
false;
547 bool showOgreConfigDialog =
false;
548 bool useScanline =
false;
549 bool computeCovariance =
false;
550 bool projectionError =
false;
553 #if defined(__mips__) || defined(__mips) || defined(mips) || defined(__MIPS__) 555 int opt_lastFrame = 5;
557 int opt_lastFrame = -1;
565 if (!env_ipath.empty())
569 if (!getOptions(argc, argv, opt_ipath, opt_configFile, opt_configFile_depth, opt_modelFile, opt_modelFile_depth,
570 opt_initFile, displayFeatures, opt_click_allowed, opt_display, useOgre, showOgreConfigDialog,
571 useScanline, computeCovariance, projectionError, trackerType_image, trackerType_depth,
576 #if !defined(VISP_HAVE_MODULE_KLT) || (!defined(VISP_HAVE_OPENCV) || (VISP_HAVE_OPENCV_VERSION < 0x020100)) 577 if (trackerType_image == 2) {
578 std::cout <<
"KLT only features cannot be used: ViSP is not built with " 579 "KLT module or OpenCV is not available." 586 if (opt_ipath.empty() && env_ipath.empty()) {
587 usage(argv[0], NULL);
588 std::cerr << std::endl <<
"ERROR:" << std::endl;
589 std::cerr <<
" Use -i <visp image path> option or set VISP_INPUT_IMAGE_PATH " << std::endl
590 <<
" environment variable to specify the location of the " << std::endl
591 <<
" image path where test images are located." << std::endl
602 std::cerr <<
"ViSP-images does not contain the folder: " << dir_path <<
"!" << std::endl;
606 std::string configFile, configFile_depth;
607 if (!opt_configFile.empty())
608 configFile = opt_configFile;
613 if (!opt_configFile_depth.empty())
614 configFile_depth = opt_configFile_depth;
619 std::string modelFile, modelFile_depth;
620 if (!opt_modelFile.empty())
621 modelFile = opt_modelFile;
623 #if defined(VISP_HAVE_COIN3D) && (COIN_MAJOR_VERSION == 2 || COIN_MAJOR_VERSION == 3 || COIN_MAJOR_VERSION == 4) 631 if (!opt_modelFile_depth.empty())
632 modelFile_depth = opt_modelFile_depth;
637 std::string vrml_ext =
".wrl";
639 (modelFile.compare(modelFile.length() - vrml_ext.length(), vrml_ext.length(), vrml_ext) == 0) ||
640 (modelFile_depth.compare(modelFile_depth.length() - vrml_ext.length(), vrml_ext.length(), vrml_ext) == 0);
643 #if defined(VISP_HAVE_COIN3D) && (COIN_MAJOR_VERSION == 2 || COIN_MAJOR_VERSION == 3 || COIN_MAJOR_VERSION == 4) 644 std::cout <<
"use_vrml: " << use_vrml << std::endl;
646 std::cerr <<
"Error: vrml model file is only supported if ViSP is " 647 "build with Coin3D 3rd party" 653 if (!opt_initFile.empty())
654 initFile = opt_initFile;
660 std::vector<vpColVector> pointcloud;
661 unsigned int pointcloud_width, pointcloud_height;
662 if (!read_data(0, ipath, I, I_depth_raw, pointcloud, pointcloud_width, pointcloud_height)) {
663 std::cerr <<
"Cannot open sequence: " << ipath << std::endl;
670 #if defined VISP_HAVE_X11 672 #elif defined VISP_HAVE_GDI 674 #elif defined VISP_HAVE_OPENCV 676 #elif defined VISP_HAVE_D3D9 678 #elif defined VISP_HAVE_GTK 684 #if (defined VISP_HAVE_DISPLAY) 687 display1.
init(I, 100, 100,
"Test tracking (Left)");
689 "Test tracking (Right)");
697 std::vector<int> trackerTypes(2);
698 trackerTypes[0] = trackerType_image;
699 trackerTypes[1] = trackerType_depth;
704 loadConfiguration(tracker, configFile, configFile_depth);
707 std::string depth_M_color_filename =
710 std::ifstream depth_M_color_file(depth_M_color_filename.c_str());
711 depth_M_color.
load(depth_M_color_file);
712 std::map<std::string, vpHomogeneousMatrix> mapOfCameraTransformationMatrices;
713 mapOfCameraTransformationMatrices[
"Camera2"] = depth_M_color;
714 dynamic_cast<vpMbGenericTracker *
>(tracker)->setCameraTransformationMatrix(mapOfCameraTransformationMatrices);
738 if (opt_display && opt_click_allowed) {
749 if (opt_display && opt_click_allowed) {
750 std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
751 mapOfImages[
"Camera1"] = &I;
752 mapOfImages[
"Camera2"] = &I_depth;
753 std::map<std::string, std::string> mapOfInitFiles;
754 mapOfInitFiles[
"Camera1"] = initFile;
757 dynamic_cast<vpMbGenericTracker *
>(tracker)->initClick(mapOfImages, mapOfInitFiles,
true);
762 vpHomogeneousMatrix c1Moi(0.06846423368, 0.09062570884, 0.3401096693, -2.671882598, 0.1174275908, -0.6011935263);
763 vpHomogeneousMatrix c2Moi(0.04431452054, 0.09294637757, 0.3357760654, -2.677922443, 0.121297639, -0.6028463357);
769 std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
770 mapOfImages[
"Camera1"] = &I;
771 std::map<std::string, const std::vector<vpColVector> *> mapOfPointclouds;
772 mapOfPointclouds[
"Camera2"] = &pointcloud;
773 std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
774 mapOfWidths[
"Camera2"] = pointcloud_width;
775 mapOfHeights[
"Camera2"] = pointcloud_height;
777 dynamic_cast<vpMbGenericTracker *
>(tracker)->track(mapOfImages, mapOfPointclouds, mapOfWidths, mapOfHeights);
786 bool quit =
false, click =
false;
787 unsigned int frame_index = 0;
788 std::vector<double> time_vec;
789 while (read_data(frame_index, ipath, I, I_depth_raw, pointcloud, pointcloud_width, pointcloud_height) && !quit &&
790 (opt_lastFrame > 0 ? (
int)frame_index <= opt_lastFrame :
true)) {
797 std::stringstream ss;
798 ss <<
"Num frame: " << frame_index;
803 if (frame_index == 10) {
804 std::cout <<
"----------Test reset tracker----------" << std::endl;
812 loadConfiguration(tracker, configFile, configFile_depth);
823 #if USE_SMALL_DATASET 824 if (frame_index == 20) {
825 c1Mo.
buildFrom(0.07734634051, 0.08993639906, 0.342344402, -2.708409543, 0.0669276477, -0.3798958303);
826 c2Mo.
buildFrom(0.05319520317, 0.09223511976, 0.3380095812, -2.71438192, 0.07141055397, -0.3810081638);
828 if (frame_index == 50) {
829 c1Mo.
buildFrom(0.09280663035, 0.09277655672, 0.330415149, -2.724431817, 0.0293932671, 0.02027966377);
830 c2Mo.
buildFrom(0.06865933578, 0.09494713501, 0.3260555142, -2.730027451, 0.03498390135, 0.01989831338);
832 std::cout <<
"Test set pose" << std::endl;
836 #if USE_SMALL_DATASET 838 if (frame_index < 15 || frame_index >= 20) {
841 if (frame_index < 30 || frame_index >= 50) {
843 std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
844 mapOfImages[
"Camera1"] = &I;
845 std::map<std::string, const std::vector<vpColVector> *> mapOfPointclouds;
846 mapOfPointclouds[
"Camera2"] = &pointcloud;
847 std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
848 mapOfWidths[
"Camera2"] = pointcloud_width;
849 mapOfHeights[
"Camera2"] = pointcloud_height;
852 dynamic_cast<vpMbGenericTracker *
>(tracker)->track(mapOfImages, mapOfPointclouds, mapOfWidths, mapOfHeights);
854 time_vec.push_back(t);
865 std::stringstream ss;
866 ss <<
"Computation time: " << t <<
" ms";
871 if (opt_click_allowed) {
890 if (computeCovariance) {
891 std::cout <<
"Covariance matrix: \n" << tracker->
getCovarianceMatrix() << std::endl << std::endl;
894 if (projectionError) {
895 std::cout <<
"Projection error: " << tracker->
getProjectionError() << std::endl << std::endl;
904 std::cout <<
"\nFinal poses, c1Mo:\n" << c1Mo <<
"\nc2Mo:\n" << c2Mo << std::endl;
909 if (opt_click_allowed && !quit) {
916 #if defined(VISP_HAVE_XML2) && USE_XML 922 #if defined(VISP_HAVE_COIN3D) && (COIN_MAJOR_VERSION == 2 || COIN_MAJOR_VERSION == 3 || COIN_MAJOR_VERSION == 4) 931 std::cout <<
"Catch an exception: " << e << std::endl;
939 std::cerr <<
"visp_mbt, visp_gui modules and OpenCV are required to run " virtual unsigned int getClipping() const
virtual void setDisplayFeatures(const bool displayF)
virtual void setCovarianceComputation(const bool &flag)
virtual void setOgreShowConfigDialog(const bool showConfigDialog)
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static double getStdev(const std::vector< double > &v, const bool useBesselCorrection=false)
void setHarrisFreeParameter(double harris_k)
virtual void setAngleDisappear(const double &a)
Implementation of an homogeneous matrix and operations on such kind of matrices.
void setMaskNumber(const unsigned int &a)
virtual void setDownScalingFactor(unsigned int scale)
static double getMedian(const std::vector< double > &v)
static const vpColor darkRed
Display for windows using GDI (available on any windows 32 platform).
void setMaxFeatures(const int maxCount)
void setSampleStep(const double &s)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
Use the X11 console to display images on unix-like OS. Thus to enable this class X11 should be instal...
virtual double getProjectionError() const
void setMinDistance(double minDistance)
error that can be emited by ViSP classes.
virtual void resetTracker()=0
static void flush(const vpImage< unsigned char > &I)
void load(std::ifstream &f)
VISP_EXPORT double measureTimeMs()
void setMu1(const double &mu_1)
static bool parse(int *argcPtr, const char **argv, vpArgvInfo *argTable, int flags)
void setQuality(double qualityLevel)
void initPersProjWithoutDistortion(const double px, const double py, const double u0, const double v0)
static double getMean(const std::vector< double > &v)
Display for windows using Direct3D 3rd party. Thus to enable this class Direct3D should be installed...
void setMaskSize(const unsigned int &a)
static void display(const vpImage< unsigned char > &I)
The vpDisplayOpenCV allows to display image using the OpenCV library. Thus to enable this class OpenC...
Generic class defining intrinsic camera parameters.
Main methods for a model-based tracker.
The vpDisplayGTK allows to display image using the GTK 3rd party library. Thus to enable this class G...
void resize(const unsigned int h, const unsigned int w)
resize the image : Image initialization
virtual void setAngleAppear(const double &a)
void setPyramidLevels(const int pyrMaxLevel)
void buildFrom(const vpTranslationVector &t, const vpRotationMatrix &R)
static double rad(double deg)
void init(vpImage< unsigned char > &I, int winx=-1, int winy=-1, const std::string &title="")
void setMu2(const double &mu_2)
void setWindowSize(const int winSize)
virtual void setOgreVisibilityTest(const bool &v)
static void read(vpImage< unsigned char > &I, const std::string &filename)
Implementation of column vector and the associated operations.
static void displayFrame(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, double size, const vpColor &color=vpColor::none, unsigned int thickness=1, const vpImagePoint &offset=vpImagePoint(0, 0))
unsigned int getDownScalingFactor()
virtual vpMatrix getCovarianceMatrix() const
Wrapper for the KLT (Kanade-Lucas-Tomasi) feature tracker implemented in OpenCV. Thus to enable this ...
void setBlockSize(const int blockSize)
void setThreshold(const double &t)
virtual void setScanLineVisibilityTest(const bool &v)
virtual void setClipping(const unsigned int &flags)
void setRange(const unsigned int &r)
virtual void setFarClippingDistance(const double &dist)
static void createDepthHistogram(const vpImage< uint16_t > &src_depth, vpImage< vpRGBa > &dest_rgba)
virtual void setProjectionErrorComputation(const bool &flag)
virtual void setNearClippingDistance(const double &dist)