Visual Servoing Platform version 3.6.0
Loading...
Searching...
No Matches
mbot-apriltag-ibvs.cpp
#include <visp3/core/vpMomentAreaNormalized.h>
#include <visp3/core/vpMomentBasic.h>
#include <visp3/core/vpMomentCentered.h>
#include <visp3/core/vpMomentDatabase.h>
#include <visp3/core/vpMomentGravityCenter.h>
#include <visp3/core/vpMomentGravityCenterNormalized.h>
#include <visp3/core/vpMomentObject.h>
#include <visp3/core/vpPixelMeterConversion.h>
#include <visp3/core/vpPoint.h>
#include <visp3/core/vpSerial.h>
#include <visp3/core/vpXmlParserCamera.h>
#include <visp3/detection/vpDetectorAprilTag.h>
#include <visp3/gui/vpDisplayX.h>
#include <visp3/io/vpImageIo.h>
#include <visp3/robot/vpUnicycle.h>
#include <visp3/sensor/vpV4l2Grabber.h>
#include <visp3/visual_features/vpFeatureMomentAreaNormalized.h>
#include <visp3/visual_features/vpFeatureMomentGravityCenterNormalized.h>
#include <visp3/vs/vpServo.h>
int main(int argc, const char **argv)
{
#if defined(VISP_HAVE_APRILTAG) && defined(VISP_HAVE_V4L2)
int device = 0;
double tagSize = 0.065;
float quad_decimate = 4.0;
int nThreads = 2;
std::string intrinsic_file = "";
std::string camera_name = "";
bool display_tag = false;
bool display_on = false;
bool serial_off = false;
bool save_image = false; // Only possible if display_on = true
for (int i = 1; i < argc; i++) {
if (std::string(argv[i]) == "--tag_size" && i + 1 < argc) {
tagSize = std::atof(argv[i + 1]);
} else if (std::string(argv[i]) == "--input" && i + 1 < argc) {
device = std::atoi(argv[i + 1]);
} else if (std::string(argv[i]) == "--quad_decimate" && i + 1 < argc) {
quad_decimate = (float)atof(argv[i + 1]);
} else if (std::string(argv[i]) == "--nthreads" && i + 1 < argc) {
nThreads = std::atoi(argv[i + 1]);
} else if (std::string(argv[i]) == "--intrinsic" && i + 1 < argc) {
intrinsic_file = std::string(argv[i + 1]);
} else if (std::string(argv[i]) == "--camera_name" && i + 1 < argc) {
camera_name = std::string(argv[i + 1]);
} else if (std::string(argv[i]) == "--display_tag") {
display_tag = true;
#if defined(VISP_HAVE_X11)
} else if (std::string(argv[i]) == "--display_on") {
display_on = true;
} else if (std::string(argv[i]) == "--save_image") {
save_image = true;
#endif
} else if (std::string(argv[i]) == "--serial_off") {
serial_off = true;
} else if (std::string(argv[i]) == "--tag_family" && i + 1 < argc) {
tagFamily = (vpDetectorAprilTag::vpAprilTagFamily)std::atoi(argv[i + 1]);
} else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") {
std::cout << "Usage: " << argv[0]
<< " [--input <camera input>] [--tag_size <tag_size in m>]"
" [--quad_decimate <quad_decimate>] [--nthreads <nb>]"
" [--intrinsic <intrinsic file>] [--camera_name <camera name>]"
" [--tag_family <family> (0: TAG_36h11, 1: TAG_36h10, 2: "
"TAG_36ARTOOLKIT,"
" 3: TAG_25h9, 4: TAG_25h7, 5: TAG_16h5)]"
" [--display_tag]";
#if defined(VISP_HAVE_X11)
std::cout << " [--display_on] [--save_image]";
#endif
std::cout << " [--serial_off] [--help]" << std::endl;
return EXIT_SUCCESS;
}
}
// Me Auriga led ring
// if serial com ok: led 1 green
// if exception: led 1 red
// if tag detected: led 2 green, else led 2 red
// if motor left: led 3 blue
// if motor right: led 4 blue
vpSerial *serial = NULL;
if (!serial_off) {
serial = new vpSerial("/dev/ttyAMA0", 115200);
serial->write("LED_RING=0,0,0,0\n"); // Switch off all led
serial->write("LED_RING=1,0,10,0\n"); // Switch on led 1 to green: serial ok
}
try {
std::ostringstream device_name;
device_name << "/dev/video" << device;
g.setDevice(device_name.str());
g.setScale(1);
g.acquire(I);
vpDisplay *d = NULL;
#ifdef VISP_HAVE_X11
if (display_on) {
d = new vpDisplayX(I);
}
#endif
cam.initPersProjWithoutDistortion(615.1674805, 615.1675415, I.getWidth() / 2., I.getHeight() / 2.);
if (!intrinsic_file.empty() && !camera_name.empty())
parser.parse(cam, intrinsic_file, camera_name, vpCameraParameters::perspectiveProjWithoutDistortion);
std::cout << "cam:\n" << cam << std::endl;
std::cout << "tagFamily: " << tagFamily << std::endl;
std::cout << "tagSize: " << tagSize << std::endl;
vpDetectorAprilTag detector(tagFamily);
detector.setAprilTagQuadDecimate(quad_decimate);
detector.setAprilTagNbThreads(nThreads);
detector.setDisplayTag(display_tag);
vpServo task;
if (display_on)
lambda.initStandard(2.5, 0.4, 30); // lambda(0)=2.5, lambda(oo)=0.4 and lambda'(0)=30
else
lambda.initStandard(4, 0.4, 30); // lambda(0)=4, lambda(oo)=0.4 and lambda'(0)=30
vpUnicycle robot;
task.setLambda(lambda);
cRe[0][0] = 0;
cRe[0][1] = -1;
cRe[0][2] = 0;
cRe[1][0] = 0;
cRe[1][1] = 0;
cRe[1][2] = -1;
cRe[2][0] = 1;
cRe[2][1] = 0;
cRe[2][2] = 0;
task.set_cVe(cVe);
vpMatrix eJe(6, 2, 0);
eJe[0][0] = eJe[5][1] = 1.0;
std::cout << "eJe: \n" << eJe << std::endl;
// Desired distance to the target
double Z_d = 0.4;
// Define the desired polygon corresponding the the AprilTag CLOCKWISE
double X[4] = {tagSize / 2., tagSize / 2., -tagSize / 2., -tagSize / 2.};
double Y[4] = {tagSize / 2., -tagSize / 2., -tagSize / 2., tagSize / 2.};
std::vector<vpPoint> vec_P, vec_P_d;
for (int i = 0; i < 4; i++) {
vpPoint P_d(X[i], Y[i], 0);
vpHomogeneousMatrix cdMo(0, 0, Z_d, 0, 0, 0);
P_d.track(cdMo); //
vec_P_d.push_back(P_d);
}
vpMomentObject m_obj(3), m_obj_d(3);
vpMomentDatabase mdb, mdb_d;
vpMomentBasic mb_d; // Here only to get the desired area m00
vpMomentCentered mc, mc_d;
man_d(0, Z_d); // Declare normalized area. Desired area parameter will be updated below with m00
vpMomentGravityCenterNormalized mgn, mgn_d; // Declare normalized gravity center
// Desired moments
m_obj_d.setType(vpMomentObject::DENSE_POLYGON); // Consider the AprilTag as a polygon
m_obj_d.fromVector(vec_P_d); // Initialize the object with the points coordinates
mb_d.linkTo(mdb_d); // Add basic moments to database
mg_d.linkTo(mdb_d); // Add gravity center to database
mc_d.linkTo(mdb_d); // Add centered moments to database
man_d.linkTo(mdb_d); // Add area normalized to database
mgn_d.linkTo(mdb_d); // Add gravity center normalized to database
mdb_d.updateAll(m_obj_d); // All of the moments must be updated, not just an_d
mg_d.compute(); // Compute gravity center moment
mc_d.compute(); // Compute centered moments AFTER gravity center
double area = 0;
area = mb_d.get(2, 0) + mb_d.get(0, 2);
else
area = mb_d.get(0, 0);
// Update an moment with the desired area
man_d.setDesiredArea(area);
man_d.compute(); // Compute area normalized moment AFTER centered moments
mgn_d.compute(); // Compute gravity center normalized moment AFTER area normalized moment
// Desired plane
double A = 0.0;
double B = 0.0;
double C = 1.0 / Z_d;
// Construct area normalized features
vpFeatureMomentGravityCenterNormalized s_mgn(mdb, A, B, C), s_mgn_d(mdb_d, A, B, C);
vpFeatureMomentAreaNormalized s_man(mdb, A, B, C), s_man_d(mdb_d, A, B, C);
// Add the features
task.addFeature(s_man, s_man_d);
// Update desired gravity center normalized feature
s_mgn_d.update(A, B, C);
// Update desired area normalized feature
s_man_d.update(A, B, C);
std::vector<double> time_vec;
for (;;) {
g.acquire(I);
double t = vpTime::measureTimeMs();
std::vector<vpHomogeneousMatrix> cMo_vec;
detector.detect(I, tagSize, cam, cMo_vec);
time_vec.push_back(t);
{
std::stringstream ss;
ss << "Detection time: " << t << " ms";
vpDisplay::displayText(I, 40, 20, ss.str(), vpColor::red);
}
if (detector.getNbObjects() == 1) {
if (!serial_off) {
serial->write("LED_RING=2,0,10,0\n"); // Switch on led 2 to green: tag detected
}
// Update current points used to compute the moments
std::vector<vpImagePoint> vec_ip = detector.getPolygon(0);
vec_P.clear();
for (size_t i = 0; i < vec_ip.size(); i++) { // size = 4
double x = 0, y = 0;
vpPixelMeterConversion::convertPoint(cam, vec_ip[i], x, y);
P.set_x(x);
P.set_y(y);
vec_P.push_back(P);
}
// Display visual features
vpDisplay::displayPolygon(I, vec_ip, vpColor::green, 3); // Current polygon used to compure an moment
3); // Current polygon used to compure an moment
3); // Vertical line as desired x position
// Current moments
m_obj.setType(vpMomentObject::DENSE_POLYGON); // Consider the AprilTag as a polygon
m_obj.fromVector(vec_P); // Initialize the object with the points coordinates
mg.linkTo(mdb); // Add gravity center to database
mc.linkTo(mdb); // Add centered moments to database
man.linkTo(mdb); // Add area normalized to database
mgn.linkTo(mdb); // Add gravity center normalized to database
mdb.updateAll(m_obj); // All of the moments must be updated, not just an_d
mg.compute(); // Compute gravity center moment
mc.compute(); // Compute centered moments AFTER gravity center
man.setDesiredArea(
area); // Since desired area was init at 0, because unknow at contruction, need to be updated here
man.compute(); // Compute area normalized moment AFTER centered moment
mgn.compute(); // Compute gravity center normalized moment AFTER area normalized moment
s_mgn.update(A, B, C);
s_mgn.compute_interaction();
s_man.update(A, B, C);
s_man.compute_interaction();
task.set_cVe(cVe);
task.set_eJe(eJe);
// Compute the control law. Velocities are computed in the mobile robot reference frame
std::cout << "Send velocity to the mbot: " << v[0] << " m/s " << vpMath::deg(v[1]) << " deg/s" << std::endl;
task.print();
double radius = 0.0325;
double L = 0.0725;
double motor_left = (-v[0] - L * v[1]) / radius;
double motor_right = (v[0] - L * v[1]) / radius;
std::cout << "motor left vel: " << motor_left << " motor right vel: " << motor_right << std::endl;
if (!serial_off) {
// serial->write("LED_RING=3,0,0,10\n"); // Switch on led 3 to blue: motor left servoed
// serial->write("LED_RING=4,0,0,10\n"); // Switch on led 4 to blue: motor right servoed
}
std::stringstream ss;
double rpm_left = motor_left * 30. / M_PI;
double rpm_right = motor_right * 30. / M_PI;
ss << "MOTOR_RPM=" << vpMath::round(rpm_left) << "," << vpMath::round(rpm_right) << "\n";
std::cout << "Send: " << ss.str() << std::endl;
if (!serial_off) {
serial->write(ss.str());
}
} else {
// stop the robot
if (!serial_off) {
serial->write("LED_RING=2,10,0,0\n"); // Switch on led 2 to red: tag not detected
// serial->write("LED_RING=3,0,0,0\n"); // Switch on led 3 to blue: motor left not servoed
// serial->write("LED_RING=4,0,0,0\n"); // Switch on led 4 to blue: motor right not servoed
serial->write("MOTOR_RPM=0,-0\n"); // Stop the robot
}
}
vpDisplay::displayText(I, 20, 20, "Click to quit.", vpColor::red);
if (display_on && save_image) {
vpImageIo::write(O, "image.png");
}
if (vpDisplay::getClick(I, false))
break;
}
if (!serial_off) {
serial->write("LED_RING=0,0,0,0\n"); // Switch off all led
}
std::cout << "Benchmark computation time" << std::endl;
std::cout << "Mean / Median / Std: " << vpMath::getMean(time_vec) << " ms"
<< " ; " << vpMath::getMedian(time_vec) << " ms"
<< " ; " << vpMath::getStdev(time_vec) << " ms" << std::endl;
if (display_on)
delete d;
if (!serial_off) {
delete serial;
}
} catch (const vpException &e) {
std::cerr << "Catch an exception: " << e.getMessage() << std::endl;
if (!serial_off) {
serial->write("LED_RING=1,10,0,0\n"); // Switch on led 1 to red
}
}
return EXIT_SUCCESS;
#else
(void)argc;
(void)argv;
#ifndef VISP_HAVE_APRILTAG
std::cout << "ViSP is not build with Apriltag support" << std::endl;
#endif
#ifndef VISP_HAVE_V4L2
std::cout << "ViSP is not build with v4l2 support" << std::endl;
#endif
std::cout << "Install missing 3rd parties, configure and build ViSP to run this tutorial" << std::endl;
return EXIT_SUCCESS;
#endif
}
Adaptive gain computation.
void initStandard(double gain_at_zero, double gain_at_infinity, double slope_at_zero)
Generic class defining intrinsic camera parameters.
void initPersProjWithoutDistortion(double px, double py, double u0, double v0)
@ perspectiveProjWithoutDistortion
Perspective projection without distortion model.
Implementation of column vector and the associated operations.
static const vpColor red
Definition vpColor.h:211
static const vpColor green
Definition vpColor.h:214
void setDisplayTag(bool display, const vpColor &color=vpColor::none, unsigned int thickness=2)
void setAprilTagQuadDecimate(float quadDecimate)
@ TAG_36h11
AprilTag 36h11 pattern (recommended)
void setAprilTagNbThreads(int nThreads)
bool detect(const vpImage< unsigned char > &I)
std::vector< std::vector< vpImagePoint > > & getPolygon()
size_t getNbObjects() const
vpImagePoint getCog(size_t i) const
Use the X11 console to display images on unix-like OS. Thus to enable this class X11 should be instal...
Definition vpDisplayX.h:132
Class that defines generic functionalities for display.
Definition vpDisplay.h:173
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
static void getImage(const vpImage< unsigned char > &Is, vpImage< vpRGBa > &Id)
static void displayCross(const vpImage< unsigned char > &I, const vpImagePoint &ip, unsigned int size, const vpColor &color, unsigned int thickness=1)
static void flush(const vpImage< unsigned char > &I)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
static void displayPolygon(const vpImage< unsigned char > &I, const std::vector< vpImagePoint > &vip, const vpColor &color, unsigned int thickness=1, bool closed=true)
error that can be emitted by ViSP classes.
Definition vpException.h:59
const char * getMessage() const
Functionality computation for normalized surface moment feature. Computes the interaction matrix asso...
Functionality computation for centered and normalized moment feature. Computes the interaction matrix...
void update(double A, double B, double C)
void track(const vpHomogeneousMatrix &cMo)
Implementation of an homogeneous matrix and operations on such kind of matrices.
static void write(const vpImage< unsigned char > &I, const std::string &filename, int backend=IO_DEFAULT_BACKEND)
Definition of the vpImage class member functions.
Definition vpImage.h:135
unsigned int getWidth() const
Definition vpImage.h:242
unsigned int getHeight() const
Definition vpImage.h:184
static double getMedian(const std::vector< double > &v)
Definition vpMath.cpp:314
static double getStdev(const std::vector< double > &v, bool useBesselCorrection=false)
Definition vpMath.cpp:345
static int round(double x)
Definition vpMath.h:323
static double getMean(const std::vector< double > &v)
Definition vpMath.cpp:294
static double deg(double rad)
Definition vpMath.h:106
Implementation of a matrix and operations on matrices.
Definition vpMatrix.h:152
Class handling the normalized surface moment that is invariant in scale and used to estimate depth.
void setDesiredArea(double a_star)
This class defines the 2D basic moment . This class is a wrapper for vpMomentObject wich allows to us...
const std::vector< double > & get() const
This class defines the double-indexed centered moment descriptor .
This class allows to register all vpMoments so they can access each other according to their dependen...
virtual void updateAll(vpMomentObject &object)
Class describing 2D normalized gravity center moment.
Class describing 2D gravity center moment.
Class for generic objects.
void setType(vpObjectType input_type)
vpObjectType getType() const
void fromVector(std::vector< vpPoint > &points)
void linkTo(vpMomentDatabase &moments)
Definition vpMoment.cpp:98
static void convertPoint(const vpCameraParameters &cam, const double &u, const double &v, double &x, double &y)
Class that defines a 3D point in the object frame and allows forward projection of a 3D point in the ...
Definition vpPoint.h:77
void set_x(double x)
Set the point x coordinate in the image plane.
Definition vpPoint.cpp:508
void set_y(double y)
Set the point y coordinate in the image plane.
Definition vpPoint.cpp:510
Implementation of a rotation matrix and operations on such kind of matrices.
void write(const std::string &s)
Definition vpSerial.cpp:313
void setInteractionMatrixType(const vpServoIteractionMatrixType &interactionMatrixType, const vpServoInversionType &interactionMatrixInversion=PSEUDO_INVERSE)
Definition vpServo.cpp:564
@ EYEINHAND_L_cVe_eJe
Definition vpServo.h:155
void set_cVe(const vpVelocityTwistMatrix &cVe_)
Definition vpServo.h:448
void print(const vpServo::vpServoPrintType display_level=ALL, std::ostream &os=std::cout)
Definition vpServo.cpp:299
void setLambda(double c)
Definition vpServo.h:403
void set_eJe(const vpMatrix &eJe_)
Definition vpServo.h:506
void setServo(const vpServoType &servo_type)
Definition vpServo.cpp:210
vpColVector computeControlLaw()
Definition vpServo.cpp:930
@ CURRENT
Definition vpServo.h:179
void addFeature(vpBasicFeature &s, vpBasicFeature &s_star, unsigned int select=vpBasicFeature::FEATURE_ALL)
Definition vpServo.cpp:487
Class that consider the case of a translation vector.
Generic functions for unicycle mobile robots.
Definition vpUnicycle.h:54
Class that is a wrapper over the Video4Linux2 (V4L2) driver.
void setScale(unsigned scale=vpV4l2Grabber::DEFAULT_SCALE)
void setDevice(const std::string &devname)
void acquire(vpImage< unsigned char > &I)
XML parser to load and save intrinsic camera parameters.
int parse(vpCameraParameters &cam, const std::string &filename, const std::string &camera_name, const vpCameraParameters::vpCameraParametersProjType &projModel, unsigned int image_width=0, unsigned int image_height=0, bool verbose=true)
VISP_EXPORT double measureTimeMs()