diff --git a/apps/calibration/intrinsic/calibration-helper.hpp b/apps/calibration/intrinsic/calibration-helper.hpp index 63db49ee19..57b0c03235 100644 --- a/apps/calibration/intrinsic/calibration-helper.hpp +++ b/apps/calibration/intrinsic/calibration-helper.hpp @@ -42,16 +42,45 @@ #include #include #include +#include #ifndef DOXYGEN_SHOULD_SKIP_THIS +#if defined(ENABLE_VISP_NAMESPACE) +using namespace VISP_NAMESPACE_NAME; +#endif + namespace calib_helper { + +// Adapted from: +// https://stackoverflow.com/questions/58881746/c-how-to-cout-and-write-at-the-same-time/58881939#58881939 +class Tee +{ +private: + std::ostream &os; + std::ofstream &file; + +public: + Tee(std::ostream &os_, std::ofstream &file_) : os(os_), file(file_) { } + + template + Tee &operator<<(const T &thing) + { + os << thing; + if (file.is_open()) { + file << thing; + } + return *this; + } +}; + class Settings { public: - Settings() - : boardSize(), calibrationPattern(UNDEFINED), squareSize(0.), input(), tempo(0.), goodInput(false), patternToUse() + Settings(Tee &tee_) + : boardSize(), calibrationPattern(UNDEFINED), squareSize(0.), input(), tempo(0.), goodInput(false), patternToUse(), + tee(tee_) { boardSize = cv::Size(0, 0); calibrationPattern = UNDEFINED; @@ -60,26 +89,35 @@ class Settings tempo = 1.f; } - enum Pattern { UNDEFINED, CHESSBOARD, CIRCLES_GRID }; + enum Pattern { UNDEFINED, CHESSBOARD, CIRCLES_GRID, CHARUCOBOARD }; bool read(const std::string &filename) // Read the parameters { // reading configuration file - if (!VISP_NAMESPACE_ADDRESSING vpIoTools::loadConfigFile(filename)) + if (!vpIoTools::loadConfigFile(filename)) return false; - VISP_NAMESPACE_ADDRESSING vpIoTools::readConfigVar("BoardSize_Width:", boardSize.width); - VISP_NAMESPACE_ADDRESSING vpIoTools::readConfigVar("BoardSize_Height:", boardSize.height); - VISP_NAMESPACE_ADDRESSING vpIoTools::readConfigVar("Square_Size:", squareSize); - VISP_NAMESPACE_ADDRESSING vpIoTools::readConfigVar("Calibrate_Pattern:", patternToUse); - VISP_NAMESPACE_ADDRESSING vpIoTools::readConfigVar("Input:", input); - VISP_NAMESPACE_ADDRESSING vpIoTools::readConfigVar("Tempo:", tempo); - - std::cout << "grid width : " << boardSize.width << std::endl; - std::cout << "grid height: " << boardSize.height << std::endl; - std::cout << "square size: " << squareSize << std::endl; - std::cout << "pattern : " << patternToUse << std::endl; - std::cout << "input seq : " << input << std::endl; - std::cout << "tempo : " << tempo << std::endl; + vpIoTools::readConfigVar("BoardSize_Width:", boardSize.width); + vpIoTools::readConfigVar("BoardSize_Height:", boardSize.height); + vpIoTools::readConfigVar("Square_Size:", squareSize); + vpIoTools::readConfigVar("Calibrate_Pattern:", patternToUse); + vpIoTools::readConfigVar("Input:", input); + vpIoTools::readConfigVar("Tempo:", tempo); + + tee << "grid width : " << boardSize.width << "\n"; + tee << "grid height: " << boardSize.height << "\n"; + tee << "square size: " << squareSize << "\n"; + tee << "pattern : " << patternToUse << "\n"; + if (patternToUse.compare("CHARUCOBOARD") == 0) { + vpIoTools::readConfigVar("Charuco_Marker_Size:", markerSize); + vpIoTools::readConfigVar("Charuco_Dictionary:", dictionnary); + vpIoTools::readConfigVar("Charuco_Legacy_Pattern:", legacyPattern); + + tee << " - ChArUco marker size: " << markerSize << "\n"; + tee << " - ChArUco dictionnary: " << dictionnary << "\n"; + tee << " - ChArUco legacy pattern?: " << legacyPattern << "\n"; + } + tee << "input seq : " << input << "\n"; + tee << "tempo : " << tempo << "\n"; interprate(); return true; } @@ -88,11 +126,11 @@ class Settings { goodInput = true; if (boardSize.width <= 0 || boardSize.height <= 0) { - std::cerr << "Invalid Board size: " << boardSize.width << " " << boardSize.height << std::endl; + tee << "Invalid Board size: " << boardSize.width << " " << boardSize.height << "\n"; goodInput = false; } if (squareSize <= 10e-6) { - std::cerr << "Invalid square size " << squareSize << std::endl; + tee << "Invalid square size " << squareSize << "\n"; goodInput = false; } @@ -104,8 +142,20 @@ class Settings calibrationPattern = CHESSBOARD; else if (patternToUse.compare("CIRCLES_GRID") == 0) calibrationPattern = CIRCLES_GRID; + else if (patternToUse.compare("CHARUCOBOARD") == 0) { + calibrationPattern = CHARUCOBOARD; + + if (markerSize <= 10e-6) { + tee << "Invalid ArUco size " << markerSize << "\n"; + goodInput = false; + } + if (markerSize >= squareSize) { + tee << "ArUco size (" << markerSize << ") is greater than square size(" << squareSize << ")" << "\n"; + goodInput = false; + } + } if (calibrationPattern == UNDEFINED) { - std::cerr << " Inexistent camera calibration mode: " << patternToUse << std::endl; + tee << " Inexistent camera calibration mode: " << patternToUse << "\n"; goodInput = false; } } @@ -120,43 +170,49 @@ class Settings std::string input; // The input image sequence float tempo; // Tempo in seconds between two images. If > 10 wait a click to // continue + float markerSize; // ArUco marker length in the case of ChArUco board. + std::string dictionnary; // ArUco dictionnary in the case of ChArUco board. + bool legacyPattern; // See OpenCV "setLegacyPattern()" doc and https://github.com/opencv/opencv/issues/23152 bool goodInput; private: std::string patternToUse; + Tee &tee; }; struct CalibInfo { - CalibInfo(const VISP_NAMESPACE_ADDRESSING vpImage &img, const std::vector &points, - const std::vector &imPts, const std::string &frame_name) + CalibInfo(const vpImage &img, const std::vector &points, + const std::vector &imPts, const std::string &frame_name) : m_img(img), m_points(points), m_imPts(imPts), m_frame_name(frame_name) { } - VISP_NAMESPACE_ADDRESSING vpImage m_img; - std::vector m_points; - std::vector m_imPts; + vpImage m_img; + std::vector m_points; + std::vector m_imPts; std::string m_frame_name; }; -void drawCalibrationOccupancy(VISP_NAMESPACE_ADDRESSING vpImage &I, const std::vector &calib_info, - unsigned int patternW) +void drawCalibrationOccupancy(vpImage &I, const std::vector &calib_info, + unsigned int patternW, bool charuco) { + int shift1 = charuco ? -2 : -1; + int shift2 = charuco ? -1 : 0; I = 0u; unsigned char pixel_value = static_cast(255.0 / calib_info.size()); for (size_t idx = 0; idx < calib_info.size(); idx++) { const CalibInfo &calib = calib_info[idx]; - std::vector corners; + std::vector corners; corners.push_back(calib.m_imPts.front()); - corners.push_back(*(calib.m_imPts.begin() + patternW - 1)); + corners.push_back(*(calib.m_imPts.begin() + patternW + shift1)); corners.push_back(calib.m_imPts.back()); - corners.push_back(*(calib.m_imPts.end() - patternW)); - VISP_NAMESPACE_ADDRESSING vpPolygon poly(corners); + corners.push_back(*(calib.m_imPts.end() - (patternW + shift2))); + vpPolygon poly(corners); for (unsigned int i = 0; i < I.getHeight(); i++) { for (unsigned int j = 0; j < I.getWidth(); j++) { - if (poly.isInside(VISP_NAMESPACE_ADDRESSING vpImagePoint(i, j))) { + if (poly.isInside(vpImagePoint(i, j))) { I[i][j] += pixel_value; } } @@ -164,26 +220,29 @@ void drawCalibrationOccupancy(VISP_NAMESPACE_ADDRESSING vpImage & } } -std::vector undistort(const VISP_NAMESPACE_ADDRESSING vpCameraParameters &cam_dist, const std::vector &imPts) +std::vector undistort(const vpCameraParameters &cam_dist, const std::vector &imPts) { - std::vector imPts_undist; + std::vector imPts_undist; - VISP_NAMESPACE_ADDRESSING vpCameraParameters cam(cam_dist.get_px(), cam_dist.get_py(), cam_dist.get_u0(), cam_dist.get_v0()); + vpCameraParameters cam(cam_dist.get_px(), cam_dist.get_py(), cam_dist.get_u0(), cam_dist.get_v0()); for (size_t i = 0; i < imPts.size(); i++) { double x = 0, y = 0; - VISP_NAMESPACE_ADDRESSING vpPixelMeterConversion::convertPoint(cam_dist, imPts[i], x, y); + vpPixelMeterConversion::convertPoint(cam_dist, imPts[i], x, y); - VISP_NAMESPACE_ADDRESSING vpImagePoint imPt; - VISP_NAMESPACE_ADDRESSING vpMeterPixelConversion::convertPoint(cam, x, y, imPt); + vpImagePoint imPt; + vpMeterPixelConversion::convertPoint(cam, x, y, imPt); imPts_undist.push_back(imPt); } return imPts_undist; } -bool extractCalibrationPoints(const Settings &s, const cv::Mat &cvI, std::vector &pointBuf) +bool extractCalibrationPoints(const Settings &s, const cv::Mat &cvI, + const cv::Ptr &ch_detector, std::vector &pointBuf) { + std::vector markerIds; bool found = false; + switch (s.calibrationPattern) // Find feature points on the input format { case Settings::CHESSBOARD: @@ -198,6 +257,10 @@ bool extractCalibrationPoints(const Settings &s, const cv::Mat &cvI, std::vector case Settings::CIRCLES_GRID: found = findCirclesGrid(cvI, s.boardSize, pointBuf, cv::CALIB_CB_SYMMETRIC_GRID); break; + case Settings::CHARUCOBOARD: + ch_detector->detectBoard(cvI, pointBuf, markerIds); + found = pointBuf.size() == (size_t)(s.boardSize.width-1)*(s.boardSize.height-1); + break; case Settings::UNDEFINED: default: break; @@ -205,7 +268,7 @@ bool extractCalibrationPoints(const Settings &s, const cv::Mat &cvI, std::vector if (found) // If done with success, { - std::vector data; + std::vector data; if (s.calibrationPattern == Settings::CHESSBOARD) { // improve the found corners' coordinate accuracy for chessboard @@ -221,6 +284,715 @@ bool extractCalibrationPoints(const Settings &s, const cv::Mat &cvI, std::vector return found; } +void computeDistortionDisplacementMap(const vpCameraParameters &cam_dist, vpImage &I_color) +{ + vpImage displacement_map(I_color.getHeight(), I_color.getWidth()); + + double min_displ = 1e12, max_displ = 0; + for (unsigned int i = 0; i < I_color.getHeight(); i++) { + for (unsigned int j = 0; j < I_color.getWidth(); j++) { + double x = 0, y = 0, id = 0, jd = 0; + vpPixelMeterConversion::convertPointWithoutDistortion(cam_dist, j, i, x, y); + vpMeterPixelConversion::convertPoint(cam_dist, x, y, jd, id); + + double erri = id - i; + double errj = jd - j; + double displ = std::sqrt(erri*erri + errj*errj); + displacement_map[i][j] = displ; + min_displ = displ < min_displ ? displ : min_displ; + max_displ = displ > max_displ ? displ : max_displ; + } + } + + double a = 255 / (max_displ - min_displ); + double b = (-255 * min_displ) / (max_displ - min_displ); + + vpImage I_gray(I_color.getHeight(), I_color.getWidth()); + for (unsigned int i = 0; i < displacement_map.getHeight(); i++) { + for (unsigned int j = 0; j < displacement_map.getWidth(); j++) { + I_gray[i][j] = static_cast(vpMath::clamp(a * displacement_map[i][j] + b, 0.0, 255.0)); + } + } + + vpColormap colormap(vpColormap::COLORMAP_TURBO); + colormap.convert(I_gray, I_color); +} + +void createMosaic(const std::vector> &list_imgs, std::vector> &list_mosaics, + unsigned int nb_rows = 4, unsigned int nb_cols = 6) +{ + const unsigned int nb_totals = nb_rows*nb_cols; + if (list_imgs.empty()) { + return; + } + + const unsigned int img_h = list_imgs[0].getHeight(); + const unsigned int img_w = list_imgs[0].getWidth(); + vpImage mosaic(img_h*nb_rows, img_w*nb_cols); + for (size_t i = 0; i < list_imgs.size(); i += nb_totals) { + mosaic = vpRGBa(0, 0, 0); + + for (size_t j = 0; j < nb_totals; j++) { + const size_t idx = i + j; + const unsigned int pos_mod = idx % nb_totals; + if (idx >= list_imgs.size()) { + break; + } + + const unsigned int pos_row = pos_mod / nb_cols; + vpImagePoint top_left(pos_row*img_h, (pos_mod - pos_row*nb_cols)*img_w); + mosaic.insert(list_imgs[idx], top_left); + } + + list_mosaics.push_back(mosaic); + } +} + +double getProjectionErrorUV(const std::vector &calibrator, const std::vector &calib_info, + std::vector> &err_imPt_imgs, std::vector &err_imPt, bool with_dist) +{ + assert(!calib_info.empty()); + double max_scale_uv = -1e6; + err_imPt_imgs.reserve(calib_info.size()); + err_imPt.reserve(calib_info.size()*calib_info[0].m_points.size()); + + for (size_t i = 0; i < calibrator.size(); i++) { + const vpCalibration &calib = calibrator[i]; + const CalibInfo &calib_info_cur = calib_info[i]; + std::vector err_imPt_per_img; + err_imPt_per_img.reserve(calib_info_cur.m_points.size()); + + for (size_t j = 0; j < calib_info_cur.m_points.size(); j++) { + vpPoint pt_3d = calib_info_cur.m_points[j]; + vpImagePoint pt_proj; + if (with_dist) { + pt_3d.project(calib.cMo_dist); + vpMeterPixelConversion::convertPoint(calib.cam_dist, pt_3d.get_x(), pt_3d.get_y(), pt_proj); + } + else { + pt_3d.project(calib.cMo); + vpMeterPixelConversion::convertPoint(calib.cam, pt_3d.get_x(), pt_3d.get_y(), pt_proj); + } + err_imPt_per_img.push_back(calib_info_cur.m_imPts[j] - pt_proj); + err_imPt.push_back(calib_info_cur.m_imPts[j] - pt_proj); + + double err_u = std::fabs(err_imPt_per_img.back().get_u()); + double err_v = std::fabs(err_imPt_per_img.back().get_v()); + max_scale_uv = err_u > max_scale_uv ? err_u : max_scale_uv; + max_scale_uv = err_v > max_scale_uv ? err_v : max_scale_uv; + } + err_imPt_imgs.push_back(err_imPt_per_img); + } + + return max_scale_uv; +} + +void displayProjectionErrorUV(const vpImage &I_err_imPt, const std::vector &err_imPt, + unsigned int disp_size, double max_scale_uv, bool with_dist, const vpColor &color, unsigned int offset_text, + const vpColor &color_text, bool cv_calib, unsigned int offset_text_incr = 15) +{ + const unsigned int margin = 25; + const unsigned int margin_2 = margin/2; + const unsigned int graph_size = disp_size - 2*margin; + const unsigned int tick_size = 10; + const unsigned int graph_offset = cv_calib ? 0 : (with_dist ? disp_size : 0); + + // axis arrows + // left + vpDisplay::displayArrow(I_err_imPt, vpImagePoint(disp_size/2, graph_offset+disp_size/2), + vpImagePoint(disp_size/2, graph_offset+margin_2), vpColor::white, 2); + // right + vpDisplay::displayArrow(I_err_imPt, vpImagePoint(disp_size/2, graph_offset+disp_size/2), + vpImagePoint(disp_size/2, graph_offset+disp_size-margin_2), vpColor::white, 2); + // up + vpDisplay::displayArrow(I_err_imPt, vpImagePoint(disp_size/2, graph_offset+disp_size/2), + vpImagePoint(margin_2, graph_offset+disp_size/2), vpColor::white, 2); + // down + vpDisplay::displayArrow(I_err_imPt, vpImagePoint(disp_size/2, graph_offset+disp_size/2), + vpImagePoint(disp_size-margin_2, graph_offset+disp_size/2), vpColor::white, 2); + + // outermost tick + // left + vpDisplay::displayLine(I_err_imPt, vpImagePoint(disp_size/2-tick_size, graph_offset+margin), + vpImagePoint(disp_size/2+tick_size, graph_offset+margin), vpColor::white); + // right + vpDisplay::displayLine(I_err_imPt, vpImagePoint(disp_size/2-tick_size, graph_offset+disp_size-margin), + vpImagePoint(disp_size/2+tick_size, graph_offset+disp_size-margin), vpColor::white); + // up + vpDisplay::displayLine(I_err_imPt, vpImagePoint(margin, graph_offset+disp_size/2-tick_size), + vpImagePoint(margin, graph_offset+disp_size/2+tick_size), vpColor::white); + // down + vpDisplay::displayLine(I_err_imPt, vpImagePoint(disp_size-margin, graph_offset+disp_size/2-tick_size), + vpImagePoint(disp_size-margin, graph_offset+disp_size/2+tick_size), vpColor::white); + // label + std::ostringstream oss_max; + oss_max << std::fixed << std::setprecision(2) << max_scale_uv << " px"; + std::string max_val = oss_max.str(); + vpDisplay::displayText(I_err_imPt, vpImagePoint(disp_size/2 + 30, graph_offset+disp_size-50), max_val, vpColor::white); + + // half tick + const unsigned int half_tick_pos = graph_size/4; + // left + vpDisplay::displayLine(I_err_imPt, vpImagePoint(disp_size/2-tick_size, graph_offset+margin+half_tick_pos), + vpImagePoint(disp_size/2+tick_size, graph_offset+margin+half_tick_pos), vpColor::white); + // right + vpDisplay::displayLine(I_err_imPt, vpImagePoint(disp_size/2-tick_size, graph_offset+disp_size/2+half_tick_pos), + vpImagePoint(disp_size/2+tick_size, graph_offset+disp_size/2+half_tick_pos), vpColor::white); + // up + vpDisplay::displayLine(I_err_imPt, vpImagePoint(margin+half_tick_pos, graph_offset+disp_size/2-tick_size), + vpImagePoint(margin+half_tick_pos, graph_offset+disp_size/2+tick_size), vpColor::white); + // down + vpDisplay::displayLine(I_err_imPt, vpImagePoint(disp_size/2+half_tick_pos, graph_offset+disp_size/2-tick_size), + vpImagePoint(disp_size/2+half_tick_pos, graph_offset+disp_size/2+tick_size), vpColor::white); + + std::vector u_vec, v_vec; + u_vec.reserve(err_imPt.size()); + v_vec.reserve(err_imPt.size()); + + double disp_scale_imPt = graph_size / (2*max_scale_uv); + for (size_t i = 0; i < err_imPt.size(); i++) { + vpImagePoint display_imPt(I_err_imPt.getHeight()/2.0 + disp_scale_imPt*err_imPt[i].get_i(), + graph_offset + I_err_imPt.getWidth()/(cv_calib ? 2.0 : 4.0) + disp_scale_imPt*err_imPt[i].get_j()); + vpDisplay::displayCross(I_err_imPt, display_imPt, 8, color); + + u_vec.push_back(err_imPt[i].get_j()); + v_vec.push_back(err_imPt[i].get_v()); + } + + double reproj_error = 0; + for (size_t i = 0; i < err_imPt.size(); i++) { + reproj_error += err_imPt[i].get_i()*err_imPt[i].get_i() + err_imPt[i].get_j()*err_imPt[i].get_j(); + } + reproj_error = std::sqrt(reproj_error / err_imPt.size()); + + double u_err_mean = vpMath::getMean(u_vec), u_err_med = vpMath::getMedian(u_vec), u_err_std = vpMath::getStdev(u_vec); + double v_err_mean = vpMath::getMean(v_vec), v_err_med = vpMath::getMedian(v_vec), v_err_std = vpMath::getStdev(v_vec); + + std::ostringstream oss; + if (!cv_calib) { + oss << std::fixed << std::setprecision(3) << "Reprojection error" + << (with_dist ? " (with dist): " : " (without dist): ") << reproj_error << " (" << err_imPt.size() << " pts)"; + vpDisplay::displayText(I_err_imPt, offset_text, graph_offset+20, oss.str(), color_text); + } + else { + oss << std::fixed << std::setprecision(3) << "Reprojection error: " << reproj_error + << " (" << err_imPt.size() << " pts)"; + vpDisplay::displayText(I_err_imPt, offset_text, graph_offset+20, oss.str(), color_text); + } + + oss.str(""); + oss << std::fixed << std::setprecision(3) << "u error: " << u_err_mean << " (mean) " << u_err_med + << " (median) " << u_err_std << " (std)"; + vpDisplay::displayText(I_err_imPt, offset_text+offset_text_incr, graph_offset+20, oss.str(), color_text); + + oss.str(""); + oss << std::fixed << std::setprecision(3) << "v error: " << v_err_mean << " (mean) " << v_err_med + << " (median) " << v_err_std << " (std)"; + vpDisplay::displayText(I_err_imPt, offset_text+2*offset_text_incr, graph_offset+20, oss.str(), color_text); +} + +// +// OpenCV calibration from opencv/samples/cpp/calibration.cpp +// +enum CvPattern { CV_CHESSBOARD, CV_CIRCLES_GRID, CV_ASYMMETRIC_CIRCLES_GRID, CV_CHARUCOBOARD }; + +double computeReprojectionErrors( + const std::vector > &objectPoints, + const std::vector > &imagePoints, + const std::vector &rvecs, const std::vector &tvecs, + const cv::Mat &cameraMatrix, const cv::Mat &distCoeffs, std::vector> &imagePoints_proj, + std::vector &perViewErrors) +{ + imagePoints_proj.resize(objectPoints.size()); + std::vector imagePoints2; + int totalPoints = 0; + double totalErr = 0, err = 0; + perViewErrors.resize(objectPoints.size()); + + for (int i = 0; i < (int)objectPoints.size(); i++) { + cv::projectPoints(cv::Mat(objectPoints[i]), rvecs[i], tvecs[i], + cameraMatrix, distCoeffs, imagePoints2); + err = cv::norm(cv::Mat(imagePoints[i]), cv::Mat(imagePoints2), cv::NORM_L2); + int n = (int)objectPoints[i].size(); + perViewErrors[i] = (float)std::sqrt(err*err/n); + totalErr += err*err; + totalPoints += n; + imagePoints_proj[i] = imagePoints2; + } + + return std::sqrt(totalErr/totalPoints); +} + +void calcChessboardCorners(cv::Size boardSize, float squareSize, std::vector &corners, + CvPattern patternType = CV_CHESSBOARD) +{ + corners.resize(0); + + switch (patternType) { + case CV_CHESSBOARD: + case CV_CIRCLES_GRID: + for (int i = 0; i < boardSize.height; i++) + for (int j = 0; j < boardSize.width; j++) + corners.push_back(cv::Point3f(float(j*squareSize), + float(i*squareSize), 0)); + break; + + case CV_ASYMMETRIC_CIRCLES_GRID: + for (int i = 0; i < boardSize.height; i++) + for (int j = 0; j < boardSize.width; j++) + corners.push_back(cv::Point3f(float((2*j + i % 2)*squareSize), + float(i*squareSize), 0)); + break; + + case CV_CHARUCOBOARD: + for (int i = 0; i < boardSize.height-1; i++) + for (int j = 0; j < boardSize.width-1; j++) + corners.push_back(cv::Point3f(float(j*squareSize), + float(i*squareSize), 0)); + break; + default: + throw vpException(vpException::badValue, "Unknown pattern type: " + std::to_string(patternType)); + } +} + +void saveCameraParams(const std::string &filename, + cv::Size imageSize, cv::Size boardSize, + float squareSize, float aspectRatio, int flags, + const cv::Mat &cameraMatrix, const cv::Mat &distCoeffs, + const std::vector &rvecs, const std::vector &tvecs, + const std::vector &reprojErrs, + const std::vector > &imagePoints, + const std::vector &newObjPoints, + double totalAvgErr) +{ + cv::FileStorage fs(filename, cv::FileStorage::WRITE); + + time_t tt; + time(&tt); + struct tm *t2 = localtime(&tt); + char buf[1024]; + strftime(buf, sizeof(buf)-1, "%c", t2); + + fs << "calibration_time" << buf; + + if (!rvecs.empty() || !reprojErrs.empty()) + fs << "nframes" << (int)std::max(rvecs.size(), reprojErrs.size()); + fs << "image_width" << imageSize.width; + fs << "image_height" << imageSize.height; + fs << "board_width" << boardSize.width; + fs << "board_height" << boardSize.height; + fs << "square_size" << squareSize; + + if (flags & cv::CALIB_FIX_ASPECT_RATIO) + fs << "aspectRatio" << aspectRatio; + + if (flags != 0) { + snprintf(buf, sizeof(buf), "flags: %s%s%s%s", + flags & cv::CALIB_USE_INTRINSIC_GUESS ? "+use_intrinsic_guess" : "", + flags & cv::CALIB_FIX_ASPECT_RATIO ? "+fix_aspectRatio" : "", + flags & cv::CALIB_FIX_PRINCIPAL_POINT ? "+fix_principal_point" : "", + flags & cv::CALIB_ZERO_TANGENT_DIST ? "+zero_tangent_dist" : ""); + //cvWriteComment( *fs, buf, 0 ); + } + + fs << "flags" << flags; + + fs << "camera_matrix" << cameraMatrix; + fs << "distortion_coefficients" << distCoeffs; + + fs << "avg_reprojection_error" << totalAvgErr; + if (!reprojErrs.empty()) + fs << "per_view_reprojection_errors" << cv::Mat(reprojErrs); + + if (!rvecs.empty() && !tvecs.empty()) { + CV_Assert(rvecs[0].type() == tvecs[0].type()); + cv::Mat bigmat((int)rvecs.size(), 6, rvecs[0].type()); + for (int i = 0; i < (int)rvecs.size(); i++) { + cv::Mat r = bigmat(cv::Range(i, i+1), cv::Range(0, 3)); + cv::Mat t = bigmat(cv::Range(i, i+1), cv::Range(3, 6)); + + CV_Assert(rvecs[i].rows == 3 && rvecs[i].cols == 1); + CV_Assert(tvecs[i].rows == 3 && tvecs[i].cols == 1); + //*.t() is MatExpr (not Mat) so we can use assignment operator + r = rvecs[i].t(); + t = tvecs[i].t(); + } + //cvWriteComment( *fs, "a set of 6-tuples (rotation vector + translation vector) for each view", 0 ); + fs << "extrinsic_parameters" << bigmat; + } + + if (!imagePoints.empty()) { + cv::Mat imagePtMat((int)imagePoints.size(), (int)imagePoints[0].size(), CV_32FC2); + for (int i = 0; i < (int)imagePoints.size(); i++) { + cv::Mat r = imagePtMat.row(i).reshape(2, imagePtMat.cols); + cv::Mat imgpti(imagePoints[i]); + imgpti.copyTo(r); + } + fs << "image_points" << imagePtMat; + } + + if (!newObjPoints.empty()) { + fs << "grid_points" << newObjPoints; + } +} + +bool runCalibration(const std::vector> &imagePoints, + cv::Size imageSize, cv::Size boardSize, CvPattern patternType, + float squareSize, float aspectRatio, + float grid_width, bool release_object, + int flags, cv::Mat &cameraMatrix, cv::Mat &distCoeffs, + std::vector &rvecs, std::vector &tvecs, + std::vector &reprojErrs, + std::vector &newObjPoints, + std::vector> &imagePoints_proj, + double &totalAvgErr, Tee &tee) +{ + if (flags & cv::CALIB_FIX_ASPECT_RATIO) + cameraMatrix.at(0, 0) = aspectRatio; + + distCoeffs = cv::Mat::zeros(8, 1, CV_64F); + + std::vector > objectPoints(1); + calcChessboardCorners(boardSize, squareSize, objectPoints[0], patternType); + // Board imperfectness correction introduced in PR #12772 + // The correction does not make sense for asymmetric and assymetric circles grids + if (patternType == CV_CHESSBOARD) { + int offset = boardSize.width - 1; + objectPoints[0][offset].x = objectPoints[0][0].x + grid_width; + } + else if (patternType == CV_CHARUCOBOARD) { + int offset = boardSize.width - 2; + objectPoints[0][offset].x = objectPoints[0][0].x + grid_width; + } + + newObjPoints = objectPoints[0]; + + objectPoints.resize(imagePoints.size(), objectPoints[0]); + + double rms; + int iFixedPoint = -1; + if (release_object) + iFixedPoint = boardSize.width - 1; + rms = cv::calibrateCameraRO(objectPoints, imagePoints, imageSize, iFixedPoint, + cameraMatrix, distCoeffs, rvecs, tvecs, newObjPoints, flags); + // printf("RMS error reported by calibrateCamera: %g\n", rms); + tee << "RMS error reported by OpenCV calibrateCameraRO(): " << rms << "\n"; + + bool ok = checkRange(cameraMatrix) && checkRange(distCoeffs); + + if (release_object) { + tee << "New board corners: " << "\n"; + tee << newObjPoints[0] << "\n"; + tee << newObjPoints[boardSize.width - 1] << "\n"; + tee << newObjPoints[boardSize.width * (boardSize.height - 1)] << "\n"; + tee << newObjPoints.back() << "\n"; + } + + objectPoints.clear(); + objectPoints.resize(imagePoints.size(), newObjPoints); + totalAvgErr = computeReprojectionErrors(objectPoints, imagePoints, + rvecs, tvecs, cameraMatrix, distCoeffs, imagePoints_proj, reprojErrs); + + return ok; +} + +bool runAndSave(const std::string &outputFilename, + const std::vector > &imagePoints, + std::vector> &imagePoints_proj, + cv::Size imageSize, cv::Size boardSize, CvPattern patternType, float squareSize, + float grid_width, bool release_object, + float aspectRatio, int flags, cv::Mat &cameraMatrix, + cv::Mat &distCoeffs, std::vector &reprojErrs, + bool writeExtrinsics, bool writePoints, bool writeGrid, + std::vector &rvecs, std::vector &tvecs, + Tee &tee) +{ + double totalAvgErr = 0; + std::vector newObjPoints; + + bool ok = runCalibration(imagePoints, imageSize, boardSize, patternType, squareSize, + aspectRatio, grid_width, release_object, flags, cameraMatrix, distCoeffs, + rvecs, tvecs, reprojErrs, newObjPoints, imagePoints_proj, totalAvgErr, tee); + // printf("%s. avg reprojection error = %.7f\n", + // ok ? "Calibration succeeded" : "Calibration failed", + // totalAvgErr); + tee << (ok ? "OpenCV Calibration succeeded. " : "OpenCV Calibration failed. ") + << "Avg reprojection error = " << totalAvgErr << "\n"; + + if (ok) + saveCameraParams(outputFilename, imageSize, + boardSize, squareSize, aspectRatio, + flags, cameraMatrix, distCoeffs, + writeExtrinsics ? rvecs : std::vector(), + writeExtrinsics ? tvecs : std::vector(), + writeExtrinsics ? reprojErrs : std::vector(), + writePoints ? imagePoints : std::vector >(), + writeGrid ? newObjPoints : std::vector(), + totalAvgErr); + return ok; +} + +std::vector cv_undistort(const cv::Mat &cv_cam, const cv::Mat &cv_dist, + const std::vector &imPts) +{ + std::vector imPts_undist; + std::vector cv_imPts, cv_imPts_undist; + imPts_undist.reserve(imPts.size()); + cv_imPts.reserve(imPts.size()); + cv_imPts_undist.reserve(imPts.size()); + + for (size_t i = 0; i < imPts.size(); i++) { + cv_imPts.push_back(cv::Point2d(imPts[i].get_u(), imPts[i].get_v())); + } + cv::undistortPoints(cv_imPts, cv_imPts_undist, cv_cam, cv_dist); + + for (size_t i = 0; i < cv_imPts_undist.size(); i++) { + imPts_undist.push_back(vpImagePoint(cv_imPts_undist[i].y, cv_imPts_undist[i].x)); + } + + return imPts_undist; +} + +void undistortDistort(const cv::Mat &cam, const cv::Mat &dist, double j, double i, double &jd, double &id) +{ + double inv_px = 1 / cam.at(0, 0); + double inv_py = 1 / cam.at(1, 1); + + double x = 0, y = 0; + x = (j - cam.at(0, 2)) * inv_px; + y = (i - cam.at(1, 2)) * inv_py; + + vpMeterPixelConversion::convertPoint(cam, dist, x, y, jd, id); +} + +void computeDistortionDisplacementMap(const cv::Mat &cam, const cv::Mat &dist, vpImage &I_color) +{ + assert(cam.type() == CV_64F); + assert(dist.type() == CV_64F); + vpImage displacement_map(I_color.getHeight(), I_color.getWidth()); + + double min_displ = 1e12, max_displ = 0; + for (unsigned int i = 0; i < I_color.getHeight(); i++) { + for (unsigned int j = 0; j < I_color.getWidth(); j++) { + double jd = 0, id = 0; + undistortDistort(cam, dist, j, i, jd, id); + + double erri = id - i; + double errj = jd - j; + double displ = std::sqrt(erri*erri + errj*errj); + displacement_map[i][j] = displ; + min_displ = displ < min_displ ? displ : min_displ; + max_displ = displ > max_displ ? displ : max_displ; + } + } + + double a = 255 / (max_displ - min_displ); + double b = (-255 * min_displ) / (max_displ - min_displ); + + vpImage I_gray(I_color.getHeight(), I_color.getWidth()); + for (unsigned int i = 0; i < displacement_map.getHeight(); i++) { + for (unsigned int j = 0; j < displacement_map.getWidth(); j++) { + I_gray[i][j] = static_cast(vpMath::clamp(a * displacement_map[i][j] + b, 0.0, 255.0)); + } + } + + vpColormap colormap(vpColormap::COLORMAP_TURBO); + colormap.convert(I_gray, I_color); +} + +double getProjectionErrorUV(const cv::Mat &cam, const cv::Mat &dist, const std::vector &calib_info, + std::vector &rvecs, std::vector &tvecs, std::vector> &err_imPt_imgs, + std::vector &err_imPt, bool with_dist) +{ + assert(!calib_info.empty()); + double max_scale_uv = -1e6; + err_imPt_imgs.reserve(calib_info.size()); + err_imPt.reserve(calib_info.size()*calib_info[0].m_points.size()); + + std::vector obj_pts; + obj_pts.reserve(calib_info[0].m_points.size()); + for (size_t j = 0; j < calib_info[0].m_points.size(); j++) { + cv::Point3d obj_pt( + calib_info[0].m_points[j].get_oX(), + calib_info[0].m_points[j].get_oY(), + calib_info[0].m_points[j].get_oZ() + ); + obj_pts.push_back(obj_pt); + } + + for (size_t i = 0; i < calib_info.size(); i++) { + const CalibInfo &calib_info_cur = calib_info[i]; + std::vector err_imPt_per_img; + err_imPt_per_img.reserve(calib_info_cur.m_points.size()); + + std::vector img_pts; + img_pts.reserve(calib_info_cur.m_points.size()); + + cv::projectPoints(obj_pts, rvecs[i], tvecs[i], cam, with_dist ? dist : cv::noArray(), img_pts); + + for (size_t j = 0; j < calib_info_cur.m_points.size(); j++) { + vpImagePoint pt_proj(img_pts[j].y, img_pts[j].x); + + err_imPt_per_img.push_back(calib_info_cur.m_imPts[j] - pt_proj); + err_imPt.push_back(calib_info_cur.m_imPts[j] - pt_proj); + + double err_u = std::fabs(err_imPt_per_img.back().get_u()); + double err_v = std::fabs(err_imPt_per_img.back().get_v()); + max_scale_uv = err_u > max_scale_uv ? err_u : max_scale_uv; + max_scale_uv = err_v > max_scale_uv ? err_v : max_scale_uv; + } + + err_imPt_imgs.push_back(err_imPt_per_img); + } + + return max_scale_uv; +} + +void parseOpenCVCalibFlags(const std::string &str, int &cv_flags, Tee &tee) +{ + if (str.find("CALIB_USE_INTRINSIC_GUESS") != std::string::npos) { + cv_flags |= cv::CALIB_USE_INTRINSIC_GUESS; + tee << "CALIB_USE_INTRINSIC_GUESS" << "\n"; + } + if (str.find("CALIB_FIX_ASPECT_RATIO") != std::string::npos) { + cv_flags |= cv::CALIB_FIX_ASPECT_RATIO; + tee << "CALIB_FIX_ASPECT_RATIO" << "\n"; + } + if (str.find("CALIB_FIX_PRINCIPAL_POINT") != std::string::npos) { + cv_flags |= cv::CALIB_FIX_PRINCIPAL_POINT; + tee << "CALIB_FIX_PRINCIPAL_POINT" << "\n"; + } + if (str.find("CALIB_ZERO_TANGENT_DIST") != std::string::npos) { + cv_flags |= cv::CALIB_ZERO_TANGENT_DIST; + tee << "CALIB_ZERO_TANGENT_DIST" << "\n"; + } + if (str.find("CALIB_FIX_FOCAL_LENGTH") != std::string::npos) { + cv_flags |= cv::CALIB_FIX_FOCAL_LENGTH; + tee << "CALIB_FIX_FOCAL_LENGTH" << "\n"; + } + if (str.find("CALIB_FIX_K1") != std::string::npos) { + cv_flags |= cv::CALIB_FIX_K1; + tee << "CALIB_FIX_K1" << "\n"; + } + if (str.find("CALIB_FIX_K2") != std::string::npos) { + cv_flags |= cv::CALIB_FIX_K2; + tee << "CALIB_FIX_K2" << "\n"; + } + if (str.find("CALIB_FIX_K3") != std::string::npos) { + cv_flags |= cv::CALIB_FIX_K3; + tee << "CALIB_FIX_K3" << "\n"; + } + if (str.find("CALIB_USE_QR") != std::string::npos) { + cv_flags |= cv::CALIB_USE_QR; + tee << "CALIB_USE_QR" << "\n"; + } + if (str.find("CALIB_USE_LU") != std::string::npos) { + cv_flags |= cv::CALIB_USE_LU; + tee << "CALIB_USE_LU" << "\n"; + } +} +namespace calib_helper_compat +{ +enum PredefinedDictionaryType +{ + DICT_4X4_50 = 0, ///< 4x4 bits, minimum hamming distance between any two codes = 4, 50 codes + DICT_4X4_100, ///< 4x4 bits, minimum hamming distance between any two codes = 3, 100 codes + DICT_4X4_250, ///< 4x4 bits, minimum hamming distance between any two codes = 3, 250 codes + DICT_4X4_1000, ///< 4x4 bits, minimum hamming distance between any two codes = 2, 1000 codes + DICT_5X5_50, ///< 5x5 bits, minimum hamming distance between any two codes = 8, 50 codes + DICT_5X5_100, ///< 5x5 bits, minimum hamming distance between any two codes = 7, 100 codes + DICT_5X5_250, ///< 5x5 bits, minimum hamming distance between any two codes = 6, 250 codes + DICT_5X5_1000, ///< 5x5 bits, minimum hamming distance between any two codes = 5, 1000 codes + DICT_6X6_50, ///< 6x6 bits, minimum hamming distance between any two codes = 13, 50 codes + DICT_6X6_100, ///< 6x6 bits, minimum hamming distance between any two codes = 12, 100 codes + DICT_6X6_250, ///< 6x6 bits, minimum hamming distance between any two codes = 11, 250 codes + DICT_6X6_1000, ///< 6x6 bits, minimum hamming distance between any two codes = 9, 1000 codes + DICT_7X7_50, ///< 7x7 bits, minimum hamming distance between any two codes = 19, 50 codes + DICT_7X7_100, ///< 7x7 bits, minimum hamming distance between any two codes = 18, 100 codes + DICT_7X7_250, ///< 7x7 bits, minimum hamming distance between any two codes = 17, 250 codes + DICT_7X7_1000, ///< 7x7 bits, minimum hamming distance between any two codes = 14, 1000 codes + DICT_ARUCO_ORIGINAL, ///< 6x6 bits, minimum hamming distance between any two codes = 3, 1024 codes + DICT_APRILTAG_16h5, ///< 4x4 bits, minimum hamming distance between any two codes = 5, 30 codes + DICT_APRILTAG_25h9, ///< 5x5 bits, minimum hamming distance between any two codes = 9, 35 codes + DICT_APRILTAG_36h10, ///< 6x6 bits, minimum hamming distance between any two codes = 10, 2320 codes + DICT_APRILTAG_36h11, ///< 6x6 bits, minimum hamming distance between any two codes = 11, 587 codes + DICT_ARUCO_MIP_36h12 ///< 6x6 bits, minimum hamming distance between any two codes = 12, 250 codes +}; +} + +int getArUcoDict(const std::string &name) +{ + if (name == "DICT_4X4_50") { + return calib_helper_compat::DICT_4X4_50; + } + else if (name == "DICT_4X4_100") { + return calib_helper_compat::DICT_4X4_100; + } + else if (name == "DICT_4X4_250") { + return calib_helper_compat::DICT_4X4_250; + } + else if (name == "DICT_4X4_1000") { + return calib_helper_compat::DICT_4X4_1000; + } + else if (name == "DICT_5X5_50") { + return calib_helper_compat::DICT_5X5_50; + } + else if (name == "DICT_5X5_100") { + return calib_helper_compat::DICT_5X5_100; + } + else if (name == "DICT_5X5_250") { + return calib_helper_compat::DICT_5X5_250; + } + else if (name == "DICT_5X5_1000") { + return calib_helper_compat::DICT_5X5_1000; + } + else if (name == "DICT_6X6_50") { + return calib_helper_compat::DICT_6X6_50; + } + else if (name == "DICT_6X6_100") { + return calib_helper_compat::DICT_6X6_100; + } + else if (name == "DICT_6X6_250") { + return calib_helper_compat::DICT_6X6_250; + } + else if (name == "DICT_6X6_1000") { + return calib_helper_compat::DICT_6X6_1000; + } + else if (name == "DICT_7X7_50") { + return calib_helper_compat::DICT_7X7_50; + } + else if (name == "DICT_7X7_100") { + return calib_helper_compat::DICT_7X7_100; + } + else if (name == "DICT_7X7_250") { + return calib_helper_compat::DICT_7X7_250; + } + else if (name == "DICT_7X7_1000") { + return calib_helper_compat::DICT_7X7_1000; + } + else if (name == "DICT_ARUCO_ORIGINAL") { + return calib_helper_compat::DICT_ARUCO_ORIGINAL; + } + else if (name == "DICT_APRILTAG_16h5") { + return calib_helper_compat::DICT_APRILTAG_16h5; + } + else if (name == "DICT_APRILTAG_25h9") { + return calib_helper_compat::DICT_APRILTAG_25h9; + } + else if (name == "DICT_APRILTAG_36h10") { + return calib_helper_compat::DICT_APRILTAG_36h10; + } + else if (name == "DICT_APRILTAG_36h11") { + return calib_helper_compat::DICT_APRILTAG_36h11; + } + else if (name == "DICT_ARUCO_MIP_36h12") { + return calib_helper_compat::DICT_ARUCO_MIP_36h12; + } + else { + throw vpException(vpException::badValue, "Invalid ArUco dictionnary name."); + } +} + } // namespace calib_helper #endif // DOXYGEN_SHOULD_SKIP_THIS diff --git a/apps/calibration/intrinsic/visp-calibrate-camera.cpp b/apps/calibration/intrinsic/visp-calibrate-camera.cpp index 9cd4c0fd75..27027fde13 100644 --- a/apps/calibration/intrinsic/visp-calibrate-camera.cpp +++ b/apps/calibration/intrinsic/visp-calibrate-camera.cpp @@ -40,16 +40,24 @@ #include #if defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_HIGHGUI) && defined(HAVE_OPENCV_IMGPROC) && defined(VISP_HAVE_PUGIXML) \ - && (((VISP_HAVE_OPENCV_VERSION < 0x050000) && defined(HAVE_OPENCV_CALIB3D)) || ((VISP_HAVE_OPENCV_VERSION >= 0x050000) && defined(HAVE_OPENCV_3D))) + && (((VISP_HAVE_OPENCV_VERSION < 0x050000) && defined(HAVE_OPENCV_CALIB3D)) || ((VISP_HAVE_OPENCV_VERSION >= 0x050000) && defined(HAVE_OPENCV_3D))) \ + && ((VISP_HAVE_OPENCV_VERSION >= 0x040700) && defined(HAVE_OPENCV_OBJDETECT)) \ + && (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11) #include +#if defined(HAVE_OPENCV_3D) +#include +#endif + #if defined(HAVE_OPENCV_CALIB3D) #include #elif defined(HAVE_OPENCV_CALIB) #include #endif +#include + #if defined(HAVE_OPENCV_CONTRIB) #include // Needed on Ubuntu 16.04 with OpenCV 2.4.9.1 #endif @@ -73,7 +81,7 @@ using namespace calib_helper; -void usage(const char *argv[], int error) +static void usage(const char *argv[], int error) { std::cout << "Synopsis" << std::endl << " " << argv[0] @@ -81,6 +89,7 @@ void usage(const char *argv[], int error) << " [--init-from-xml ]" << " [--camera-name ]" << " [--aspect-ratio ]" + << " [--save]" << " [--output ]" << " [--help, -h]" << std::endl << std::endl; @@ -98,9 +107,45 @@ void usage(const char *argv[], int error) << std::endl << " --aspect-ratio " << std::endl << " Pixel aspect ratio. To estimate px = py, use \"--aspect-ratio 1\" option. Set to -1" << std::endl - << " to unset any constraint for px and py parameters. " << std::endl + << " to unset any constraint for px and py parameters." << std::endl << " Default: -1." << std::endl << std::endl + << " --init-focal" << std::endl + << " By default, the initial camera focal length is computed such as:" << std::endl + << " (image_width / 640) x 600." << std::endl + << " The user can instead supply a desired camera focal length using this parameter." << std::endl + << std::endl + << " --opencv-calib" << std::endl + << " Flag to also perform the calibration using the OpenCV calibration pipeline." << std::endl + << std::endl + << " --opencv-calib-flags" << std::endl + << " Flags to be passed to the calibrateCameraRO() function." << std::endl + << " By default it corresponds to: CALIB_USE_INTRINSIC_GUESS+CALIB_USE_LU." << std::endl + << " Concatenation is possible, e.g.: CALIB_USE_INTRINSIC_GUESS+CALIB_FIX_PRINCIPAL_POINT." << std::endl + << " Supported flags are:" << std::endl + << " - CALIB_USE_INTRINSIC_GUESS" << std::endl + << " - CALIB_FIX_ASPECT_RATIO" << std::endl + << " - CALIB_FIX_PRINCIPAL_POINT" << std::endl + << " - CALIB_ZERO_TANGENT_DIST" << std::endl + << " - CALIB_FIX_FOCAL_LENGTH" << std::endl + << " - CALIB_FIX_K1" << std::endl + << " - CALIB_FIX_K2" << std::endl + << " - CALIB_FIX_K3" << std::endl + << " - CALIB_USE_QR" << std::endl + << " - CALIB_USE_LU" << std::endl + << std::endl + << " --fast-display" << std::endl + << " Flag to not show:" << std::endl + << " - side-by-side comparison between original and undistorted images" << std::endl + << " - reprojection errors graph for each calibration images" << std::endl + << " This can be used to speed-up the program running time when doing multiple trials." << std::endl + << std::endl + << " --save" << std::endl + << " Flag to automatically save the image processing results in a new directory." << std::endl + << std::endl + << " --save-jpg" << std::endl + << " Flag to save image results in jpeg instead of png format." << std::endl + << std::endl << " --output " << std::endl << " XML file containing estimated camera parameters." << std::endl << " Default: \"camera.xml\"." << std::endl @@ -121,21 +166,24 @@ int main(int argc, const char *argv[]) using namespace VISP_NAMESPACE_NAME; #endif -#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11) - vpDisplay *d = nullptr; -#endif - try { if (argc == 1) { usage(argv, 0); return EXIT_SUCCESS; } std::string opt_output_file_name = "camera.xml"; - Settings s; const std::string opt_inputSettingsFile = argc > 1 ? argv[1] : "default.cfg"; std::string opt_init_camera_xml_file; std::string opt_camera_name = "Camera"; double opt_aspect_ratio = -1; // Not used + bool opt_use_focal_cmd_line = false; + double opt_init_focal = 600.0; + std::string opt_img_ext = ".png"; + bool opt_save_results = false; + bool perform_opencv_calib = false; + int cv_flags = 0; + std::string cv_flags_str = "CALIB_USE_INTRINSIC_GUESS+CALIB_USE_LU"; + bool fast_display = false; for (int i = 2; i < argc; i++) { if (std::string(argv[i]) == "--init-from-xml" && i + 1 < argc) { @@ -150,6 +198,25 @@ int main(int argc, const char *argv[]) else if (std::string(argv[i]) == "--aspect-ratio" && i + 1 < argc) { opt_aspect_ratio = std::atof(argv[++i]); } + else if (std::string(argv[i]) == "--init-focal" && i + 1 < argc) { + opt_use_focal_cmd_line = true; + opt_init_focal = std::atof(argv[++i]); + } + else if (std::string(argv[i]) == "--opencv-calib") { + perform_opencv_calib = true; + } + else if (std::string(argv[i]) == "--opencv-calib-flags" && i + 1 < argc) { + cv_flags_str = std::string(argv[++i]); + } + else if (std::string(argv[i]) == "--fast-display") { + fast_display = true; + } + else if (std::string(argv[i]) == "--save") { + opt_save_results = true; + } + else if (std::string(argv[i]) == "--save-jpg") { + opt_img_ext = ".jpg"; + } else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") { usage(argv, 0); return EXIT_SUCCESS; @@ -160,30 +227,46 @@ int main(int argc, const char *argv[]) } } - std::cout << "Settings from config file: " << argv[1] << std::endl; + // Results folder + log file + std::string save_results_folder = ""; + vpImage I_save_results; + std::ofstream log_file; + Tee tee(std::cout, log_file); + if (opt_save_results) { + save_results_folder = vpTime::getDateTime("calib_results_%Y-%m-%d_%H.%M.%S"); + vpIoTools::makeDirectory(save_results_folder); + + std::string output_log_file_name = vpIoTools::createFilePath(save_results_folder, "calibration_log.txt"); + log_file.open(output_log_file_name); + tee << "Create output results folder: " << save_results_folder << "\n"; + tee << "Save calibration log into: " << output_log_file_name << "\n"; + } + + tee << "Settings from config file: " << argv[1] << "\n"; + Settings s(tee); if (!s.read(opt_inputSettingsFile)) { - std::cout << "Could not open the configuration file: \"" << opt_inputSettingsFile << "\"" << std::endl; + tee << "Could not open the configuration file: \"" << opt_inputSettingsFile << "\"\n"; usage(argv, 0); return EXIT_FAILURE; } if (!s.goodInput) { - std::cout << "Invalid input detected. Application stopping. " << std::endl; + tee << "Invalid input detected. Application stopping. " << "\n"; return EXIT_FAILURE; } - std::cout << "\nSettings from command line options: " << std::endl; + tee << "\nSettings from command line options: " << "\n"; if (!opt_init_camera_xml_file.empty()) { - std::cout << "Init parameters: " << opt_init_camera_xml_file << std::endl; + tee << "Init parameters: " << opt_init_camera_xml_file << "\n"; } - std::cout << "Ouput xml file : " << opt_output_file_name << std::endl; - std::cout << "Camera name : " << opt_camera_name << std::endl; + tee << "Ouput xml file : " << opt_output_file_name << "\n"; + tee << "Camera name : " << opt_camera_name << "\n"; // Check if output file name exists if (vpIoTools::checkFilename(opt_output_file_name)) { - std::cout << "\nOutput file name " << opt_output_file_name << " already exists." << std::endl; - std::cout << "Remove this file or change output file name using [--output ] command line option." - << std::endl; + tee << "\nOutput file name " << opt_output_file_name << " already exists." << "\n"; + tee << "Remove this file or change output file name using [--output ] command line option." + << "\n"; return EXIT_SUCCESS; } @@ -195,76 +278,86 @@ int main(int argc, const char *argv[]) reader.open(I); } catch (const vpException &e) { - std::cout << "Catch an exception: " << e.getStringMessage() << std::endl; - std::cout << "Check if input images name \"" << s.input << "\" set in " << opt_inputSettingsFile - << " config file is correct..." << std::endl; + tee << "Catch an exception: " << e.getStringMessage() << "\n"; + tee << "Check if input images name \"" << s.input << "\" set in " << opt_inputSettingsFile + << " config file is correct..." << "\n"; return EXIT_FAILURE; } -#if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11) - std::shared_ptr d = vpDisplayFactory::createDisplay(I, vpDisplay::SCALE_AUTO); -#else - d = vpDisplayFactory::allocateDisplay(I, vpDisplay::SCALE_AUTO); -#endif + std::shared_ptr d = vpDisplayFactory::createDisplay(I, -1, -1, "", vpDisplay::SCALE_AUTO); vpCameraParameters cam_init; bool init_from_xml = false; if (!opt_init_camera_xml_file.empty()) { if (!vpIoTools::checkFilename(opt_init_camera_xml_file)) { - std::cout << "Input camera file \"" << opt_init_camera_xml_file << "\" doesn't exist!" << std::endl; - std::cout << "Modify [--init-from-xml ] option value" << std::endl; -#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11) - if (display != nullptr) { - delete display; - } -#endif + tee << "Input camera file \"" << opt_init_camera_xml_file << "\" doesn't exist!" << "\n"; + tee << "Modify [--init-from-xml ] option value" << "\n"; return EXIT_FAILURE; } init_from_xml = true; } if (init_from_xml) { - std::cout << "Initialize camera parameters from xml file: " << opt_init_camera_xml_file << std::endl; + tee << "Initialize camera parameters from xml file: " << opt_init_camera_xml_file << "\n"; vpXmlParserCamera parser; if (parser.parse(cam_init, opt_init_camera_xml_file, opt_camera_name, vpCameraParameters::perspectiveProjWithoutDistortion) != vpXmlParserCamera::SEQUENCE_OK) { - std::cout << "Unable to find camera with name \"" << opt_camera_name - << "\" in file: " << opt_init_camera_xml_file << std::endl; - std::cout << "Modify [--camera-name ] option value" << std::endl; -#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11) - if (display != nullptr) { - delete display; - } -#endif + tee << "Unable to find camera with name \"" << opt_camera_name + << "\" in file: " << opt_init_camera_xml_file << "\n"; + tee << "Modify [--camera-name ] option value" << "\n"; return EXIT_FAILURE; } } else { - std::cout << "Initialize camera parameters with default values " << std::endl; + tee << "Initialize camera parameters with default values " << "\n"; // Initialize camera parameters - double px = cam_init.get_px(); - double py = cam_init.get_py(); + double scale = I.getWidth() / 640.0; + double px = opt_use_focal_cmd_line ? opt_init_focal : scale * cam_init.get_px(); + double py = opt_use_focal_cmd_line ? opt_init_focal : scale * cam_init.get_py(); // Set (u0,v0) in the middle of the image double u0 = I.getWidth() / 2; double v0 = I.getHeight() / 2; cam_init.initPersProjWithoutDistortion(px, py, u0, v0); } - std::cout << "Camera parameters used for initialization:\n" << cam_init << std::endl; + tee << "Camera parameters used for initialization:\n" << cam_init << "\n"; std::vector model; std::vector calibrator; - for (int i = 0; i < s.boardSize.height; i++) { - for (int j = 0; j < s.boardSize.width; j++) { - model.push_back(vpPoint(j * s.squareSize, i * s.squareSize, 0)); + // Create board 3D object points + if (s.calibrationPattern == Settings::CHARUCOBOARD) { + for (int i = 0; i < s.boardSize.height-1; i++) { + for (int j = 0; j < s.boardSize.width-1; j++) { + model.push_back(vpPoint(j * s.squareSize, i * s.squareSize, 0)); + } + } + } + else { + for (int i = 0; i < s.boardSize.height; i++) { + for (int j = 0; j < s.boardSize.width; j++) { + model.push_back(vpPoint(j * s.squareSize, i * s.squareSize, 0)); + } } } + // ChArUco + cv::Ptr ch_board; + std::vector markerIds; + cv::Ptr ch_detector; + if (s.calibrationPattern == Settings::CHARUCOBOARD) { + cv::aruco::Dictionary dictionary = cv::aruco::getPredefinedDictionary(getArUcoDict(s.dictionnary)); + ch_board = cv::makePtr(s.boardSize, s.squareSize, s.markerSize, dictionary); + ch_board->setLegacyPattern(s.legacyPattern); + ch_detector = cv::makePtr(cv::aruco::CharucoDetector(*ch_board)); + } + std::vector calib_info; std::multimap > map_cam_sorted; // Sorted by residual - map_cam_sorted.insert(std::make_pair(1000, cam_init)); + std::vector calib_points_all; + calib_points_all.reserve(model.size()*20); + double start_time = vpTime::measureTimeMs(); do { reader.acquire(I); long frame_index = reader.getFrameIndex(); @@ -277,14 +370,14 @@ int main(int argc, const char *argv[]) std::vector pointBuf; vpImageConvert::convert(I, cvI); - std::cout << "Process frame: " << frame_name << std::flush; - bool found = extractCalibrationPoints(s, cvI, pointBuf); + tee << "Process frame: " << frame_name; + bool found = extractCalibrationPoints(s, cvI, ch_detector, pointBuf); - std::cout << ", grid detection status: " << found; + tee << ", grid detection status: " << found; if (!found) - std::cout << ", image rejected" << std::endl; + tee << ", image rejected" << "\n"; else - std::cout << ", image used as input data" << std::endl; + tee << ", image used as input data" << "\n"; if (found) { // If image processing done with success vpDisplay::setTitle(I, frame_name); @@ -306,6 +399,7 @@ int main(int argc, const char *argv[]) calib_points.push_back(vpPoint(model[i].get_oX(), model[i].get_oY(), model[i].get_oZ())); calib_points.back().set_x(data[i].get_u()); calib_points.back().set_y(data[i].get_v()); + calib_points_all.push_back(calib_points.back()); } vpHomogeneousMatrix cMo; @@ -313,29 +407,33 @@ int main(int argc, const char *argv[]) std::multimap::const_iterator it_cam; for (it_cam = map_cam_sorted.begin(); it_cam != map_cam_sorted.end(); ++it_cam) { vpCameraParameters cam = it_cam->second; - if (calib.computeCalibration(vpCalibration::CALIB_VIRTUAL_VS, cMo, cam, false) == EXIT_SUCCESS) { - calibrator.push_back(calib); - // Add calibration info - calib_info.push_back(CalibInfo(I, calib_points, data, frame_name)); - calib_status = true; - double residual = calib.getResidual(); - map_cam_sorted.insert(std::make_pair(residual, cam)); - break; + try { + if (calib.computeCalibration(vpCalibration::CALIB_VIRTUAL_VS, cMo, cam, false) == EXIT_SUCCESS) { + calibrator.push_back(calib); + // Add calibration info + calib_info.push_back(CalibInfo(I, calib_points, data, frame_name)); + calib_status = true; + double residual = calib.getResidual(); + map_cam_sorted.insert(std::make_pair(residual, cam)); + break; + } + } + catch (const vpException &e) { + tee << "Catch a ViSP exception when doing computeCalibration(): " << e.what() << "\n"; } } if (!calib_status) { - std::cout << "frame: " << frame_name << ", unable to calibrate from single image, image rejected" - << std::endl; + tee << "frame: " << frame_name << ", unable to calibrate from single image, image rejected" << "\n"; found = false; } } if (found) vpDisplay::displayText(I, 15 * vpDisplay::getDownScalingFactor(I), 15 * vpDisplay::getDownScalingFactor(I), - "Image processing succeed", vpColor::green); + "Image processing succeeded", vpColor::green); else vpDisplay::displayText(I, 15 * vpDisplay::getDownScalingFactor(I), 15 * vpDisplay::getDownScalingFactor(I), - "Image processing failed", vpColor::green); + "Image processing failed", vpColor::red); if (s.tempo > 10.f) { vpDisplay::displayText(I, 35 * vpDisplay::getDownScalingFactor(I), 15 * vpDisplay::getDownScalingFactor(I), @@ -347,22 +445,24 @@ int main(int argc, const char *argv[]) vpDisplay::flush(I); vpTime::wait(s.tempo * 1000); } + + if (opt_save_results) { + vpDisplay::getImage(I, I_save_results); + std::ostringstream oss; + oss << save_results_folder << "/" << vpIoTools::getNameWE(reader.getFrameName()) << opt_img_ext; + vpImageIo::write(I_save_results, oss.str()); + } } while (!reader.end()); // Now we consider the multi image calibration // Calibrate by a non linear method based on virtual visual servoing if (calibrator.empty()) { - std::cerr << "Unable to calibrate. Image processing failed !" << std::endl; -#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11) - if (display != nullptr) { - delete display; - } -#endif + tee << "Unable to calibrate. Image processing failed!" << "\n"; return EXIT_FAILURE; } // Display calibration pattern occupancy - drawCalibrationOccupancy(I, calib_info, s.boardSize.width); + drawCalibrationOccupancy(I, calib_info, s.boardSize.width, s.calibrationPattern == Settings::CHARUCOBOARD); cv::Mat1b img(I.getHeight(), I.getWidth()); vpImageConvert::convert(I, img); @@ -403,6 +503,14 @@ int main(int argc, const char *argv[]) vpTime::wait(s.tempo * 1000); } + std::vector> list_img_reproj, list_img_reproj_dist, list_img_undist; + if (opt_save_results) { + vpDisplay::getImage(I_color, I_save_results); + std::ostringstream oss; + oss << save_results_folder << "/calibration_pattern_occupancy" << opt_img_ext; + vpImageIo::write(I_save_results, oss.str()); + } + d->close(I_color); std::stringstream ss_additional_info; @@ -419,6 +527,10 @@ int main(int argc, const char *argv[]) ss_additional_info << "Circles grid"; break; + case Settings::CHARUCOBOARD: + ss_additional_info << "ChArUco"; + break; + case Settings::UNDEFINED: default: ss_additional_info << "Undefined"; @@ -431,10 +543,10 @@ int main(int argc, const char *argv[]) double error; // Initialize with camera parameter that has the lowest residual vpCameraParameters cam = map_cam_sorted.begin()->second; - std::cout << "\nCalibration without distortion in progress on " << calibrator.size() << " images..." << std::endl; + tee << "\nCalibration without distortion in progress on " << calibrator.size() << " images..." << "\n"; if (vpCalibration::computeCalibrationMulti(vpCalibration::CALIB_VIRTUAL_VS, calibrator, cam, error, false) == EXIT_SUCCESS) { - std::cout << cam << std::endl; + tee << cam << "\n"; d->init(I); vpDisplay::setTitle(I, "Without distortion results"); @@ -443,7 +555,7 @@ int main(int argc, const char *argv[]) double reproj_error = sqrt(calibrator[i].getResidual() / calibrator[i].get_npt()); const CalibInfo &calib = calib_info[i]; - std::cout << "Image " << calib.m_frame_name << " reprojection error: " << reproj_error << std::endl; + tee << "Image " << calib.m_frame_name << " reprojection error: " << reproj_error << "\n"; ss_additional_info << "" << reproj_error << ""; I = calib.m_img; @@ -480,42 +592,44 @@ int main(int argc, const char *argv[]) vpDisplay::flush(I); vpTime::wait(s.tempo * 1000); } + + if (opt_save_results) { + vpDisplay::getImage(I, I_save_results); + list_img_reproj.push_back(I_save_results); + std::ostringstream oss; + oss << save_results_folder << "/reproj_err_without_dist_" << vpIoTools::getNameWE(calib.m_frame_name) + << opt_img_ext; + vpImageIo::write(I_save_results, oss.str()); + } } - std::cout << "\nGlobal reprojection error: " << error << std::endl; + tee << "\nGlobal reprojection error: " << error << "\n"; ss_additional_info << "" << error << ""; - ss_additional_info << ""; + vpXmlParserCamera xml; if (xml.save(cam, opt_output_file_name.c_str(), opt_camera_name, I.getWidth(), I.getHeight()) == vpXmlParserCamera::SEQUENCE_OK) - std::cout << "Camera parameters without distortion successfully saved in \"" << opt_output_file_name << "\"" - << std::endl; + tee << "Camera parameters without distortion successfully saved in \"" << opt_output_file_name << "\"" + << "\n"; else { - std::cout << "Failed to save the camera parameters without distortion in \"" << opt_output_file_name << "\"" - << std::endl; - std::cout << "A file with the same name exists. Remove it to be able " + tee << "Failed to save the camera parameters without distortion in \"" << opt_output_file_name << "\"" + << "\n"; + tee << "A file with the same name exists. Remove it to be able " "to save the parameters..." - << std::endl; + << "\n"; } } else { - std::cout << "Calibration without distortion failed." << std::endl; -#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11) - if (display != nullptr) { - delete display; - } -#endif + tee << "Calibration without distortion failed." << "\n"; return EXIT_FAILURE; } - vpCameraParameters cam_without_dist = cam; - std::vector calibrator_without_dist = calibrator; - std::cout << "\n\nCalibration with distortion in progress on " << calibrator.size() << " images..." << std::endl; + tee << "\n\nCalibration with distortion in progress on " << calibrator.size() << " images..." << "\n"; if (vpCalibration::computeCalibrationMulti(vpCalibration::CALIB_VIRTUAL_VS_DIST, calibrator, cam, error, false) == EXIT_SUCCESS) { - std::cout << cam << std::endl; + tee << cam << "\n"; vpDisplay::setTitle(I, "With distortion results"); ss_additional_info << ""; @@ -523,7 +637,7 @@ int main(int argc, const char *argv[]) double reproj_error = sqrt(calibrator[i].getResidual_dist() / calibrator[i].get_npt()); const CalibInfo &calib = calib_info[i]; - std::cout << "Image " << calib.m_frame_name << " reprojection error: " << reproj_error << std::endl; + tee << "Image " << calib.m_frame_name << " reprojection error: " << reproj_error << "\n"; ss_additional_info << "" << reproj_error << ""; I = calib.m_img; vpDisplay::display(I); @@ -559,112 +673,375 @@ int main(int argc, const char *argv[]) vpDisplay::flush(I); vpTime::wait(s.tempo * 1000); } + + if (opt_save_results) { + vpDisplay::getImage(I, I_save_results); + list_img_reproj_dist.push_back(I_save_results); + std::ostringstream oss; + oss << save_results_folder << "/reproj_err_with_dist_" << vpIoTools::getNameWE(calib.m_frame_name) + << opt_img_ext; + vpImageIo::write(I_save_results, oss.str()); + } } - std::cout << "\nGlobal reprojection error: " << error << std::endl; + tee << "\nGlobal reprojection error: " << error << "\n"; ss_additional_info << "" << error << ""; - ss_additional_info << ""; + // + // Display side-by-side original and undistorted images, draw lines from start and end calib pattern points + // vpImage I_undist; vpImage I_dist_undist(I.getHeight(), 2 * I.getWidth()); d->close(I); d->init(I_dist_undist, 0, 0, "Straight lines have to be straight - distorted image / undistorted image"); - for (size_t idx = 0; idx < calib_info.size(); idx++) { - std::cout << "\nThis tool computes the line fitting error (mean distance error) on image points extracted from " - "the raw distorted image." - << std::endl; - - I = calib_info[idx].m_img; - vpImageTools::undistort(I, cam, I_undist); - - I_dist_undist.insert(I, vpImagePoint(0, 0)); - I_dist_undist.insert(I_undist, vpImagePoint(0, I.getWidth())); - vpDisplay::display(I_dist_undist); - - vpDisplay::displayText(I_dist_undist, 15 * vpDisplay::getDownScalingFactor(I_dist_undist), - 15 * vpDisplay::getDownScalingFactor(I_dist_undist), - calib_info[idx].m_frame_name + std::string(" distorted"), vpColor::red); - vpDisplay::displayText(I_dist_undist, 30 * vpDisplay::getDownScalingFactor(I_dist_undist), - 15 * vpDisplay::getDownScalingFactor(I_dist_undist), - "Draw lines from first / last points.", vpColor::red); - std::vector grid_points = calib_info[idx].m_imPts; - for (int i = 0; i < s.boardSize.height; i++) { - std::vector current_line(grid_points.begin() + i * s.boardSize.width, - grid_points.begin() + (i + 1) * s.boardSize.width); - - std::vector current_line_undist = undistort(cam, current_line); - double a = 0, b = 0, c = 0; - double line_fitting_error = vpMath::lineFitting(current_line, a, b, c); - double line_fitting_error_undist = vpMath::lineFitting(current_line_undist, a, b, c); - std::cout << calib_info[idx].m_frame_name << " line " << i + 1 - << " fitting error on distorted points: " << line_fitting_error - << " ; on undistorted points: " << line_fitting_error_undist << std::endl; - - vpImagePoint ip1 = current_line.front(); - vpImagePoint ip2 = current_line.back(); - vpDisplay::displayLine(I_dist_undist, ip1, ip2, vpColor::red); - } - - std::cout << "\nThis tool computes the line fitting error (mean distance error) on image points extracted from " - "the undistorted image" - << " (vpImageTools::undistort())." << std::endl; - cv::Mat cvI; - std::vector pointBuf; - vpImageConvert::convert(I_undist, cvI); - - bool found = extractCalibrationPoints(s, cvI, pointBuf); - if (found) { - std::vector grid_points; - for (unsigned int i = 0; i < pointBuf.size(); i++) { - vpImagePoint ip(pointBuf[i].y, pointBuf[i].x); - grid_points.push_back(ip); - } + if (!fast_display) { + for (size_t idx = 0; idx < calib_info.size(); idx++) { + tee << "\nThis tool computes the line fitting error (mean distance error) on image points extracted from " + "the raw distorted image." + << "\n"; - vpDisplay::displayText(I_dist_undist, 15 * vpDisplay::getDownScalingFactor(I_dist_undist), - I.getWidth() + 15 * vpDisplay::getDownScalingFactor(I_dist_undist), - calib_info[idx].m_frame_name + std::string(" undistorted"), vpColor::red); - for (int i = 0; i < s.boardSize.height; i++) { - std::vector current_line(grid_points.begin() + i * s.boardSize.width, - grid_points.begin() + (i + 1) * s.boardSize.width); + I = calib_info[idx].m_img; + vpImageTools::undistort(I, cam, I_undist); + + I_dist_undist.insert(I, vpImagePoint(0, 0)); + I_dist_undist.insert(I_undist, vpImagePoint(0, I.getWidth())); + vpDisplay::display(I_dist_undist); + vpDisplay::displayText(I_dist_undist, 15 * vpDisplay::getDownScalingFactor(I_dist_undist), + 15 * vpDisplay::getDownScalingFactor(I_dist_undist), + calib_info[idx].m_frame_name + std::string(" distorted"), vpColor::red); + vpDisplay::displayText(I_dist_undist, 30 * vpDisplay::getDownScalingFactor(I_dist_undist), + 15 * vpDisplay::getDownScalingFactor(I_dist_undist), + "Draw lines from first / last points.", vpColor::red); + std::vector grid_points = calib_info[idx].m_imPts; + const int offset = (s.calibrationPattern == Settings::CHARUCOBOARD) ? -1 : 0; + for (int i = 0; i < s.boardSize.height+offset; i++) { + std::vector current_line(grid_points.begin() + i * (s.boardSize.width+offset), + grid_points.begin() + (i + 1) * (s.boardSize.width+offset)); + + std::vector current_line_undist = undistort(cam, current_line); double a = 0, b = 0, c = 0; double line_fitting_error = vpMath::lineFitting(current_line, a, b, c); - std::cout << calib_info[idx].m_frame_name << " undistorted image, line " << i + 1 - << " fitting error: " << line_fitting_error << std::endl; + double line_fitting_error_undist = vpMath::lineFitting(current_line_undist, a, b, c); + tee << calib_info[idx].m_frame_name << " line " << i + 1 + << " fitting error on distorted points: " << line_fitting_error + << " ; on undistorted points: " << line_fitting_error_undist << "\n"; - vpImagePoint ip1 = current_line.front() + vpImagePoint(0, I.getWidth()); - vpImagePoint ip2 = current_line.back() + vpImagePoint(0, I.getWidth()); + vpImagePoint ip1 = current_line.front(); + vpImagePoint ip2 = current_line.back(); vpDisplay::displayLine(I_dist_undist, ip1, ip2, vpColor::red); } + + tee << "\nThis tool computes the line fitting error (mean distance error) on image points extracted from " + "the undistorted image" << " (vpImageTools::undistort())." << "\n"; + cv::Mat cvI; + std::vector pointBuf; + vpImageConvert::convert(I_undist, cvI); + + bool found = extractCalibrationPoints(s, cvI, ch_detector, pointBuf); + if (found) { + std::vector found_grid_points; + for (unsigned int i = 0; i < pointBuf.size(); i++) { + vpImagePoint ip(pointBuf[i].y, pointBuf[i].x); + found_grid_points.push_back(ip); + } + + vpDisplay::displayText(I_dist_undist, 15 * vpDisplay::getDownScalingFactor(I_dist_undist), + I.getWidth() + 15 * vpDisplay::getDownScalingFactor(I_dist_undist), + calib_info[idx].m_frame_name + std::string(" undistorted"), vpColor::red); + for (int i = 0; i < s.boardSize.height+offset; i++) { + std::vector current_line(found_grid_points.begin() + i * (s.boardSize.width+offset), + found_grid_points.begin() + (i + 1) * (s.boardSize.width+offset)); + + double a = 0, b = 0, c = 0; + double line_fitting_error = vpMath::lineFitting(current_line, a, b, c); + tee << calib_info[idx].m_frame_name << " undistorted image, line " << i + 1 + << " fitting error: " << line_fitting_error << "\n"; + + vpImagePoint ip1 = current_line.front() + vpImagePoint(0, I.getWidth()); + vpImagePoint ip2 = current_line.back() + vpImagePoint(0, I.getWidth()); + vpDisplay::displayLine(I_dist_undist, ip1, ip2, vpColor::red); + } + } + else { + std::string msg("Unable to detect grid on undistorted image"); + tee << msg << "\n"; + tee << "Check that the grid is not too close to the image edges" << "\n"; + vpDisplay::displayText(I_dist_undist, 15 * vpDisplay::getDownScalingFactor(I_dist_undist), + I.getWidth() + 15 * vpDisplay::getDownScalingFactor(I_dist_undist), + calib_info[idx].m_frame_name + std::string(" undistorted"), vpColor::red); + vpDisplay::displayText(I_dist_undist, 30 * vpDisplay::getDownScalingFactor(I_dist_undist), + I.getWidth() + 15 * vpDisplay::getDownScalingFactor(I_dist_undist), msg, vpColor::red); + } + + if (s.tempo > 10.f) { + vpDisplay::displayText( + I_dist_undist, I_dist_undist.getHeight() - 20 * vpDisplay::getDownScalingFactor(I_dist_undist), + 15 * vpDisplay::getDownScalingFactor(I_dist_undist), "Click to continue...", vpColor::red); + vpDisplay::flush(I_dist_undist); + vpDisplay::getClick(I_dist_undist); + } + else { + vpDisplay::flush(I_dist_undist); + vpTime::wait(s.tempo * 1000); + } + + if (opt_save_results) { + vpDisplay::getImage(I_dist_undist, I_save_results); + std::ostringstream oss; + oss << save_results_folder << "/dist_vs_undist_" << vpIoTools::getNameWE(calib_info[idx].m_frame_name) + << opt_img_ext; + vpImage I_right; + vpImageTools::crop(I_save_results, vpRect(I_save_results.getWidth()/2, 0, + I_save_results.getWidth()/2, I_save_results.getHeight()), I_right); + list_img_undist.push_back(I_right); + vpImageIo::write(I_save_results, oss.str()); + } } - else { - std::string msg("Unable to detect grid on undistorted image"); - std::cout << msg << std::endl; - std::cout << "Check that the grid is not too close to the image edges" << std::endl; - vpDisplay::displayText(I_dist_undist, 15 * vpDisplay::getDownScalingFactor(I_dist_undist), - 15 * vpDisplay::getDownScalingFactor(I_dist_undist), - calib_info[idx].m_frame_name + std::string(" undistorted"), vpColor::red); - vpDisplay::displayText(I_dist_undist, 30 * vpDisplay::getDownScalingFactor(I_dist_undist), - 15 * vpDisplay::getDownScalingFactor(I_dist_undist), msg, vpColor::red); + tee << "\n"; + } + + // + // Display pseudo distortion displacement map + // + d->close(I_dist_undist); + vpImage I_dist_map(I.getHeight(), I.getWidth()); + d->init(I_dist_map, 0, 0, "Distortion displacement map"); + computeDistortionDisplacementMap(cam, I_dist_map); + d->display(I_dist_map); + + const float ref_img_width = 640; + const unsigned int step = std::max(20u, static_cast((I_dist_undist.getWidth()/ref_img_width)*20)); + for (unsigned int i = 0; i < I_dist_map.getHeight(); i += step) { + for (unsigned int j = 0; j < I_dist_map.getWidth(); j += step) { + vpImagePoint imPt(i, j); + double x = 0, y = 0; + vpPixelMeterConversion::convertPointWithoutDistortion(cam, imPt, x, y); + vpImagePoint imPt_dist; + vpMeterPixelConversion::convertPoint(cam, x, y, imPt_dist); + + vpDisplay::displayArrow(I_dist_map, imPt, imPt_dist, vpColor::red, 2); } - if (s.tempo > 10.f) { - vpDisplay::displayText( - I_dist_undist, I_dist_undist.getHeight() - 20 * vpDisplay::getDownScalingFactor(I_dist_undist), - 15 * vpDisplay::getDownScalingFactor(I_dist_undist), "Click to continue...", vpColor::red); - vpDisplay::flush(I_dist_undist); - vpDisplay::getClick(I_dist_undist); + unsigned int remainder_col = (I_dist_map.getWidth()-1) - (I_dist_map.getWidth() / step)*step; + if (remainder_col >= step/2) { + unsigned int j = I_dist_map.getWidth() - 1; + vpImagePoint imPt(i, j); + double x = 0, y = 0; + vpPixelMeterConversion::convertPointWithoutDistortion(cam, imPt, x, y); + vpImagePoint imPt_dist; + vpMeterPixelConversion::convertPoint(cam, x, y, imPt_dist); + + vpDisplay::displayArrow(I_dist_map, imPt, imPt_dist, vpColor::red, 2); } - else { - vpDisplay::flush(I_dist_undist); - vpTime::wait(s.tempo * 1000); + } + + // Last row / column + unsigned int remainder_row = (I_dist_map.getHeight()-1) - (I_dist_map.getHeight() / step)*step; + if (remainder_row >= step/2) { + unsigned int i = I_dist_map.getHeight() - 1; + + for (unsigned int j = 0; j < I_dist_map.getWidth(); j += step) { + vpImagePoint imPt(i, j); + double x = 0, y = 0; + vpPixelMeterConversion::convertPointWithoutDistortion(cam, imPt, x, y); + vpImagePoint imPt_dist; + vpMeterPixelConversion::convertPoint(cam, x, y, imPt_dist); + + vpDisplay::displayArrow(I_dist_map, imPt, imPt_dist, vpColor::red, 2); + } + + unsigned int remainder_col = (I_dist_map.getWidth()-1) - (I_dist_map.getWidth() / step)*step; + if (remainder_col >= step/2) { + unsigned int j = I_dist_map.getWidth() - 1; + vpImagePoint imPt(i, j); + double x = 0, y = 0; + vpPixelMeterConversion::convertPointWithoutDistortion(cam, imPt, x, y); + vpImagePoint imPt_dist; + vpMeterPixelConversion::convertPoint(cam, x, y, imPt_dist); + + vpDisplay::displayArrow(I_dist_map, imPt, imPt_dist, vpColor::red, 2); } } - std::cout << std::endl; - vpXmlParserCamera xml; + // Image center and camera principal point + vpDisplay::displayCross(I_dist_map, vpImagePoint((I_dist_map.getHeight()-1)/2, (I_dist_map.getWidth()-1)/2), + 10*vpDisplay::getDownScalingFactor(I_dist_map), vpColor::green, 2); + vpDisplay::displayCircle(I_dist_map, vpImagePoint(cam.get_v0(), cam.get_u0()), + 8*vpDisplay::getDownScalingFactor(I_dist_map), vpColor::red); + + if (s.tempo > 10.f) { + vpDisplay::displayText( + I_dist_map, I_dist_map.getHeight() - 20 * vpDisplay::getDownScalingFactor(I_dist_map), + 15 * vpDisplay::getDownScalingFactor(I_dist_map), "Click to continue...", vpColor::red); + vpDisplay::flush(I_dist_map); + vpDisplay::getClick(I_dist_map); + } + else { + vpDisplay::flush(I_dist_map); + vpTime::wait(s.tempo * 1000); + } + + if (opt_save_results) { + vpDisplay::getImage(I_dist_map, I_save_results); + std::ostringstream oss; + oss << save_results_folder << "/distortion_displacement_map" << opt_img_ext; + vpImageIo::write(I_save_results, oss.str()); + + // Save mosaic images + { + std::vector> mosaics; + createMosaic(list_img_reproj, mosaics); + for (size_t i = 0; i < mosaics.size(); i++) { + oss.str(""); + oss << save_results_folder << "/mosaics_reproj_err_without_dist_%04d" << opt_img_ext; + char name[FILENAME_MAX]; + snprintf(name, FILENAME_MAX, oss.str().c_str(), i); + vpImageIo::write(mosaics[i], name); + } + } + { + std::vector> mosaics; + createMosaic(list_img_reproj_dist, mosaics); + for (size_t i = 0; i < mosaics.size(); i++) { + oss.str(""); + oss << save_results_folder << "/mosaics_reproj_err_with_dist_%04d" << opt_img_ext; + char name[FILENAME_MAX]; + snprintf(name, FILENAME_MAX, oss.str().c_str(), i); + vpImageIo::write(mosaics[i], name); + } + } + { + std::vector> mosaics; + createMosaic(list_img_undist, mosaics); + for (size_t i = 0; i < mosaics.size(); i++) { + oss.str(""); + oss << save_results_folder << "/mosaics_undist_%04d" << opt_img_ext; + char name[FILENAME_MAX]; + snprintf(name, FILENAME_MAX, oss.str().c_str(), i); + vpImageIo::write(mosaics[i], name); + } + } + } + d->close(I_dist_map); + + // + // Display reprojection error graph for all the images + // + const unsigned int disp_size = 650; + vpImage I_err_imPt(disp_size, 2*disp_size); + std::shared_ptr d2 = vpDisplayFactory::createDisplay(I_err_imPt, 0, 0, + "Reprojection error in the image plane for all the images"); + vpDisplay::display(I_err_imPt); + + // no distortion + std::vector> err_imPt_imgs_no_dist; + err_imPt_imgs_no_dist.reserve(calibrator.size()); + std::vector err_imPt_no_dist; + err_imPt_no_dist.reserve(calib_points_all.size()); + double max_scale_uv_no_dist = getProjectionErrorUV(calibrator, calib_info, err_imPt_imgs_no_dist, + err_imPt_no_dist, false); + { + bool with_dist = false; + displayProjectionErrorUV(I_err_imPt, err_imPt_no_dist, disp_size, max_scale_uv_no_dist, with_dist, + vpColor::red, 20, vpColor::red, false); + } + // distortion + std::vector> err_imPt_imgs_dist; + err_imPt_imgs_dist.reserve(calibrator.size()); + std::vector err_imPt_dist; + err_imPt_dist.reserve(calib_points_all.size()); + double max_scale_uv_dist = getProjectionErrorUV(calibrator, calib_info, err_imPt_imgs_dist, err_imPt_dist, true); + { + bool with_dist = true; + displayProjectionErrorUV(I_err_imPt, err_imPt_dist, disp_size, max_scale_uv_dist, with_dist, + vpColor::blue, 20, vpColor::blue, false); + } + + if (s.tempo > 10.f) { + vpDisplay::displayText(I_err_imPt, I_err_imPt.getHeight() - 20 * vpDisplay::getDownScalingFactor(I_err_imPt), + 15 * vpDisplay::getDownScalingFactor(I_err_imPt), "Click to continue...", vpColor::red); + vpDisplay::flush(I_err_imPt); + vpDisplay::getClick(I_err_imPt); + } + else { + vpDisplay::flush(I_err_imPt); + vpTime::wait(s.tempo * 1000); + } + + if (opt_save_results) { + vpDisplay::getImage(I_err_imPt, I_save_results); + std::ostringstream oss; + oss << save_results_folder << "/reprojection_error_graph" << opt_img_ext; + vpImageIo::write(I_save_results, oss.str()); + } + + // Display reprojection error graph for each images + if (!fast_display) { + std::vector> list_img_reproj_graph; + for (size_t i = 0; i < err_imPt_imgs_no_dist.size(); i++) { + std::ostringstream oss_title; + oss_title << "Reprojection error in the image plane for: " << vpIoTools::getNameWE(calib_info[i].m_frame_name); + d2->setTitle(oss_title.str()); + vpDisplay::display(I_err_imPt); + + // no distortion + displayProjectionErrorUV(I_err_imPt, err_imPt_no_dist, disp_size, max_scale_uv_no_dist, false, + vpColor::red, 40, vpColor::red, false); + displayProjectionErrorUV(I_err_imPt, err_imPt_imgs_no_dist[i], disp_size, max_scale_uv_no_dist, false, + vpColor::green, disp_size-60, vpColor::green, false); + // distortion + displayProjectionErrorUV(I_err_imPt, err_imPt_dist, disp_size, max_scale_uv_dist, true, + vpColor::blue, 40, vpColor::blue, false); + displayProjectionErrorUV(I_err_imPt, err_imPt_imgs_dist[i], disp_size, max_scale_uv_dist, true, + vpColor::green, disp_size-60, vpColor::green, false); + + oss_title.str(""); + oss_title << vpIoTools::getNameWE(calib_info[i].m_frame_name) << " without / with dist"; + vpDisplay::displayText(I_err_imPt, 20 * vpDisplay::getDownScalingFactor(I_err_imPt), + 15 * vpDisplay::getDownScalingFactor(I_err_imPt), + oss_title.str(), vpColor::white); + + if (s.tempo > 10.f) { + vpDisplay::displayText(I_err_imPt, I_err_imPt.getHeight() - 20 * vpDisplay::getDownScalingFactor(I_err_imPt), + 15 * vpDisplay::getDownScalingFactor(I_err_imPt), "Click to continue...", vpColor::red); + vpDisplay::flush(I_err_imPt); + vpDisplay::getClick(I_err_imPt); + } + else { + vpDisplay::flush(I_err_imPt); + vpTime::wait(s.tempo * 1000); + } + + if (opt_save_results) { + vpDisplay::getImage(I_err_imPt, I_save_results); + list_img_reproj_graph.push_back(I_save_results); + + std::ostringstream oss; + oss << save_results_folder << "/reproj_err_graph_" << vpIoTools::getNameWE(calib_info[i].m_frame_name) + << opt_img_ext; + vpImageIo::write(I_save_results, oss.str()); + } + } + + if (opt_save_results) { + // Save mosaic images + std::ostringstream oss; + std::vector> mosaics; + createMosaic(list_img_reproj_graph, mosaics, 4, 3); + for (size_t i = 0; i < mosaics.size(); i++) { + oss.str(""); + oss << save_results_folder << "/mosaics_reproj_err_graph_%04d" << opt_img_ext; + char name[FILENAME_MAX]; + snprintf(name, FILENAME_MAX, oss.str().c_str(), i); + vpImageIo::write(mosaics[i], name); + } + } + } + + vpXmlParserCamera xml; // Camera poses ss_additional_info << ""; for (size_t i = 0; i < calibrator.size(); i++) { @@ -678,49 +1055,526 @@ int main(int argc, const char *argv[]) ss_additional_info << ""; if (xml.save(cam, opt_output_file_name.c_str(), opt_camera_name, I.getWidth(), I.getHeight(), - ss_additional_info.str()) == vpXmlParserCamera::SEQUENCE_OK) - std::cout << "Camera parameters without distortion successfully saved in \"" << opt_output_file_name << "\"" - << std::endl; + ss_additional_info.str()) == vpXmlParserCamera::SEQUENCE_OK) { + tee << "Camera parameters with distortion successfully saved in \"" << opt_output_file_name << "\"" << "\n"; + } else { - std::cout << "Failed to save the camera parameters without distortion in \"" << opt_output_file_name << "\"" - << std::endl; - std::cout << "A file with the same name exists. Remove it to be able " + tee << "Failed to save the camera parameters with distortion in \"" << opt_output_file_name << "\"" + << "\n"; + tee << "A file with the same name exists. Remove it to be able " "to save the parameters..." - << std::endl; + << "\n"; } - std::cout << std::endl; - std::cout << "Estimated pose using vpPoseVector format: [tx ty tz tux tuy tuz] with translation in meter and " + tee << "\n"; + tee << "Estimated pose using vpPoseVector format: [tx ty tz tux tuy tuz] with translation in meter and " "rotation in rad" - << std::endl; - for (unsigned int i = 0; i < calibrator.size(); i++) - std::cout << "Estimated pose on input data extracted from " << calib_info[i].m_frame_name << ": " - << vpPoseVector(calibrator[i].cMo_dist).t() << std::endl; + << "\n"; + for (size_t i = 0; i < calibrator.size(); i++) { + tee << "Estimated pose on input data extracted from " << calib_info[i].m_frame_name << ": " + << vpPoseVector(calibrator[i].cMo_dist).t() << "\n"; + } } else { - std::cout << "Calibration with distortion failed." << std::endl; -#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11) - if (display != nullptr) { - delete display; - } -#endif + tee << "Calibration with distortion failed." << "\n"; return EXIT_FAILURE; } - std::cout << "\nCamera calibration succeeded. Results are saved in " << "\"" << opt_output_file_name << "\"" << std::endl; -#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11) - if (display != nullptr) { - delete display; - } -#endif + // + // OpenCV calibration + // + if (perform_opencv_calib) { + tee << "\n\n========================================\n"; + tee << "===Now perform OpenCV calibration===\n"; + tee << "========================================\n"; + + std::string opt_output_file_name_parent = vpIoTools::getParent(opt_output_file_name); + std::string cv_calib_output_file = (opt_output_file_name_parent.empty() ? "" : + opt_output_file_name_parent + vpIoTools::separator) + + vpIoTools::getNameWE(opt_output_file_name) + "_opencv.yml"; + + std::vector> cv_imagePoints; + cv_imagePoints.reserve(calib_info.size()); + for (const auto &calib_info_cur : calib_info) { + std::vector cv_imagePoints_cur_img; + for (const auto &img_pts_cur : calib_info_cur.m_imPts) { + cv_imagePoints_cur_img.push_back(cv::Point2f( + static_cast(img_pts_cur.get_u()), + static_cast(img_pts_cur.get_v()) + )); + } + + cv_imagePoints.push_back(cv_imagePoints_cur_img); + } + + cv::Size cv_imageSize(I.getWidth(), I.getHeight()); + cv::Size cv_boardSize = s.boardSize; + CvPattern cv_patternType = CV_CHESSBOARD; + switch (s.calibrationPattern) { + case Settings::CHESSBOARD: + cv_patternType = CV_CHESSBOARD; + break; + case Settings::CIRCLES_GRID: + cv_patternType = CV_CIRCLES_GRID; + break; + case Settings::CHARUCOBOARD: + cv_patternType = CV_CHARUCOBOARD; + break; + default: + tee << "Unsupported calibration pattern type between ViSP (" << s.calibrationPattern << ") and OpenCV." << "\n"; + throw vpException(vpException::badValue, "Unsupported calibration pattern type between ViSP and OpenCV."); + } + float cv_squareSize = s.squareSize; + float cv_grid_width = cv_squareSize * + (cv_patternType != CV_CHARUCOBOARD ? (cv_boardSize.width - 1) : (cv_boardSize.width - 2)); + bool cv_release_object = false; + float cv_aspectRatio = 1; + cv::Matx33d cv_cameraMatrix_ = cv::Matx33d::eye(); + cv_cameraMatrix_(0, 0) = cam.get_px(); cv_cameraMatrix_(0, 2) = cam.get_u0(); + cv_cameraMatrix_(1, 1) = cam.get_py(); cv_cameraMatrix_(1, 2) = cam.get_v0(); + cv::Mat cv_cameraMatrix(cv_cameraMatrix_); + cv::Mat cv_distCoeffs; + bool cv_writeExtrinsics = true, cv_writePoints = true, cv_writeGrid = true; + + tee << "OpenCV calibration flag:" << "\n"; + parseOpenCVCalibFlags(cv_flags_str, cv_flags, tee); + tee << "OpenCV calibration flag: " << cv_flags << " (int)" << "\n"; + + std::vector> cv_imagePoints_proj; + std::vector cv_reprojErrs; + std::vector rvecs, tvecs; + bool cv_calib_status = runAndSave( + cv_calib_output_file, cv_imagePoints, cv_imagePoints_proj, cv_imageSize, cv_boardSize, cv_patternType, + cv_squareSize, cv_grid_width, cv_release_object, cv_aspectRatio, cv_flags, cv_cameraMatrix, cv_distCoeffs, + cv_reprojErrs, cv_writeExtrinsics, cv_writePoints, cv_writeGrid, rvecs, tvecs, tee + ); + tee << "OpenCV calibration status: " << cv_calib_status << "\n"; + if (cv_calib_status) { + tee << "Calibration matrix:\n" << cv_cameraMatrix << "\n"; + tee << "Distortion coefficients: " << cv_distCoeffs.t() << "\n"; + tee << "Save OpenCV calibration file to: " << cv_calib_output_file << "\n"; + + // + // OpenCV Display reprojected image points + // + tee << "\n"; + assert(calibrator.size() == cv_reprojErrs.size()); + assert(calibrator.size() == cv_imagePoints_proj.size()); + + std::string cv_save_results_folder; + if (opt_save_results) { + cv_save_results_folder = save_results_folder + vpIoTools::separator + "opencv"; + vpIoTools::makeDirectory(cv_save_results_folder); + } + + d->init(I, 0, 0, "OpenCV calibration results"); + vpImage I_save_results; + std::vector> cv_list_img_reproj_dist, cv_list_img_undist; + cv_list_img_reproj_dist.reserve(calibrator.size()); + + for (size_t i = 0; i < calibrator.size(); i++) { + double reproj_error = cv_reprojErrs[i]; + + const CalibInfo &calib = calib_info[i]; + tee << "Image " << calib.m_frame_name << " OpenCV reprojection error: " << reproj_error << "\n"; + I = calib.m_img; + vpDisplay::display(I); + + std::ostringstream ss; + ss << "Reprojection error: " << reproj_error; + vpDisplay::displayText(I, 15 * vpDisplay::getDownScalingFactor(I), 15 * vpDisplay::getDownScalingFactor(I), + calib.m_frame_name, vpColor::red); + vpDisplay::displayText(I, 30 * vpDisplay::getDownScalingFactor(I), 15 * vpDisplay::getDownScalingFactor(I), + ss.str(), vpColor::red); + vpDisplay::displayText(I, 45 * vpDisplay::getDownScalingFactor(I), 15 * vpDisplay::getDownScalingFactor(I), + "Extracted points", vpColor::red); + vpDisplay::displayText(I, 60 * vpDisplay::getDownScalingFactor(I), 15 * vpDisplay::getDownScalingFactor(I), + "Projected points", vpColor::green); + + for (size_t idx = 0; idx < calib.m_points.size(); idx++) { + vpDisplay::displayCross(I, calib.m_imPts[idx], 12 * vpDisplay::getDownScalingFactor(I), vpColor::red); + vpImagePoint imPt(cv_imagePoints_proj[i][idx].y, cv_imagePoints_proj[i][idx].x); + vpDisplay::displayCross(I, imPt, 12 * vpDisplay::getDownScalingFactor(I), vpColor::green); + } + + if (s.tempo > 10.f) { + vpDisplay::displayText(I, I.getHeight() - 20 * vpDisplay::getDownScalingFactor(I), + 15 * vpDisplay::getDownScalingFactor(I), "Click to continue...", vpColor::red); + vpDisplay::flush(I); + vpDisplay::getClick(I); + } + else { + vpDisplay::flush(I); + vpTime::wait(s.tempo * 1000); + } + + if (opt_save_results) { + vpDisplay::getImage(I, I_save_results); + cv_list_img_reproj_dist.push_back(I_save_results); + std::ostringstream oss; + oss << cv_save_results_folder << "/reproj_err_with_dist_" << vpIoTools::getNameWE(calib.m_frame_name) + << opt_img_ext; + vpImageIo::write(I_save_results, oss.str()); + } + } + + // + // OpenCV Display side-by-side original and undistorted images, draw lines from start and end calib pattern pts + // + cv::Mat1b cv_img, cv_img_undist; + d->close(I); + vpImage I_undist; + vpImage I_dist_undist(I.getHeight(), 2 * I.getWidth()); + d->init(I_dist_undist, 0, 0, "(OpenCV) Straight lines have to be straight - distorted image / undistorted image"); + + if (!fast_display) { + for (size_t idx = 0; idx < calib_info.size(); idx++) { + tee << "\n(OpenCV) This tool computes the line fitting error (mean distance error) on image points" + " extracted from the raw distorted image." << "\n"; + + I = calib_info[idx].m_img; + vpImageConvert::convert(I, cv_img); + cv::undistort(cv_img, cv_img_undist, cv_cameraMatrix, cv_distCoeffs); + vpImageConvert::convert(cv_img_undist, I_undist); + + I_dist_undist.insert(I, vpImagePoint(0, 0)); + I_dist_undist.insert(I_undist, vpImagePoint(0, I.getWidth())); + vpDisplay::display(I_dist_undist); + + vpDisplay::displayText(I_dist_undist, 15 * vpDisplay::getDownScalingFactor(I_dist_undist), + 15 * vpDisplay::getDownScalingFactor(I_dist_undist), + calib_info[idx].m_frame_name + std::string(" distorted"), vpColor::red); + vpDisplay::displayText(I_dist_undist, 30 * vpDisplay::getDownScalingFactor(I_dist_undist), + 15 * vpDisplay::getDownScalingFactor(I_dist_undist), + "Draw lines from first / last points.", vpColor::red); + std::vector grid_points = calib_info[idx].m_imPts; + const int offset = (s.calibrationPattern == Settings::CHARUCOBOARD) ? -1 : 0; + for (int i = 0; i < s.boardSize.height; i++) { + std::vector current_line(grid_points.begin() + i * (s.boardSize.width+offset), + grid_points.begin() + (i + 1) * (s.boardSize.width+offset)); + + std::vector current_line_undist = cv_undistort(cv_cameraMatrix, cv_distCoeffs, current_line); + double a = 0, b = 0, c = 0; + double line_fitting_error = vpMath::lineFitting(current_line, a, b, c); + double line_fitting_error_undist = vpMath::lineFitting(current_line_undist, a, b, c); + tee << calib_info[idx].m_frame_name << " line " << i + 1 + << " fitting error on distorted points: " << line_fitting_error + << " ; on undistorted points: " << line_fitting_error_undist << "\n"; + + vpImagePoint ip1 = current_line.front(); + vpImagePoint ip2 = current_line.back(); + vpDisplay::displayLine(I_dist_undist, ip1, ip2, vpColor::red); + } + + tee << "\n(OpenCV) This tool computes the line fitting error (mean distance error) on image points" + " extracted from the undistorted image" << " (cv::undistort)." << "\n"; + cv::Mat cvI; + std::vector pointBuf; + vpImageConvert::convert(I_undist, cvI); + + bool found = extractCalibrationPoints(s, cvI, ch_detector, pointBuf); + if (found) { + std::vector found_grid_points; + for (unsigned int i = 0; i < pointBuf.size(); i++) { + vpImagePoint ip(pointBuf[i].y, pointBuf[i].x); + found_grid_points.push_back(ip); + } + + vpDisplay::displayText(I_dist_undist, 15 * vpDisplay::getDownScalingFactor(I_dist_undist), + I.getWidth() + 15 * vpDisplay::getDownScalingFactor(I_dist_undist), + calib_info[idx].m_frame_name + std::string(" undistorted"), vpColor::red); + for (int i = 0; i < s.boardSize.height+offset; i++) { + std::vector current_line(found_grid_points.begin() + i * (s.boardSize.width+offset), + found_grid_points.begin() + (i + 1) * (s.boardSize.width+offset)); + + double a = 0, b = 0, c = 0; + double line_fitting_error = vpMath::lineFitting(current_line, a, b, c); + tee << calib_info[idx].m_frame_name << " undistorted image, line " << i + 1 + << " fitting error: " << line_fitting_error << "\n"; + + vpImagePoint ip1 = current_line.front() + vpImagePoint(0, I.getWidth()); + vpImagePoint ip2 = current_line.back() + vpImagePoint(0, I.getWidth()); + vpDisplay::displayLine(I_dist_undist, ip1, ip2, vpColor::red); + } + } + else { + std::string msg("Unable to detect grid on undistorted image"); + tee << msg << "\n"; + tee << "Check that the grid is not too close to the image edges" << "\n"; + vpDisplay::displayText(I_dist_undist, 15 * vpDisplay::getDownScalingFactor(I_dist_undist), + I.getWidth() + 15 * vpDisplay::getDownScalingFactor(I_dist_undist), + calib_info[idx].m_frame_name + std::string(" undistorted"), vpColor::red); + vpDisplay::displayText(I_dist_undist, 30 * vpDisplay::getDownScalingFactor(I_dist_undist), + I.getWidth() + 15 * vpDisplay::getDownScalingFactor(I_dist_undist), msg, vpColor::red); + } + + if (s.tempo > 10.f) { + vpDisplay::displayText( + I_dist_undist, I_dist_undist.getHeight() - 20 * vpDisplay::getDownScalingFactor(I_dist_undist), + 15 * vpDisplay::getDownScalingFactor(I_dist_undist), "Click to continue...", vpColor::red); + vpDisplay::flush(I_dist_undist); + vpDisplay::getClick(I_dist_undist); + } + else { + vpDisplay::flush(I_dist_undist); + vpTime::wait(s.tempo * 1000); + } + + if (opt_save_results) { + vpDisplay::getImage(I_dist_undist, I_save_results); + std::ostringstream oss; + oss << cv_save_results_folder << "/dist_vs_undist_" << vpIoTools::getNameWE(calib_info[idx].m_frame_name) + << opt_img_ext; + vpImage I_right; + vpImageTools::crop(I_save_results, vpRect(I_save_results.getWidth()/2, 0, + I_save_results.getWidth()/2, I_save_results.getHeight()), + I_right); + cv_list_img_undist.push_back(I_right); + vpImageIo::write(I_save_results, oss.str()); + } + } + } + + // + // (OpenCV) Display pseudo distortion displacement map + // + d->close(I_dist_undist); + vpImage I_dist_map(I.getHeight(), I.getWidth()); + d->init(I_dist_map, 0, 0, "(OpenCV) Distortion displacement map"); + computeDistortionDisplacementMap(cv_cameraMatrix, cv_distCoeffs, I_dist_map); + d->display(I_dist_map); + + const float ref_img_width = 640; + const unsigned int step = std::max(20u, static_cast((I_dist_undist.getWidth()/ref_img_width)*20)); + for (unsigned int i = 0; i < I_dist_map.getHeight(); i += step) { + for (unsigned int j = 0; j < I_dist_map.getWidth(); j += step) { + vpImagePoint imPt(i, j); + double jd = 0, id = 0; + undistortDistort(cv_cameraMatrix, cv_distCoeffs, j, i, jd, id); + vpImagePoint imPt_dist(id, jd); + + vpDisplay::displayArrow(I_dist_map, imPt, imPt_dist, vpColor::red, 2); + } + + unsigned int remainder_col = (I_dist_map.getWidth()-1) - (I_dist_map.getWidth() / step)*step; + if (remainder_col >= step/2) { + unsigned int j = I_dist_map.getWidth() - 1; + + vpImagePoint imPt(i, j); + double jd = 0, id = 0; + undistortDistort(cv_cameraMatrix, cv_distCoeffs, j, i, jd, id); + vpImagePoint imPt_dist(id, jd); + + vpDisplay::displayArrow(I_dist_map, imPt, imPt_dist, vpColor::red, 2); + } + } + + // Last row / column + unsigned int remainder_row = (I_dist_map.getHeight()-1) - (I_dist_map.getHeight() / step)*step; + if (remainder_row >= step/2) { + unsigned int i = I_dist_map.getHeight() - 1; + + for (unsigned int j = 0; j < I_dist_map.getWidth(); j += step) { + vpImagePoint imPt(i, j); + double jd = 0, id = 0; + undistortDistort(cv_cameraMatrix, cv_distCoeffs, j, i, jd, id); + vpImagePoint imPt_dist(id, jd); + + vpDisplay::displayArrow(I_dist_map, imPt, imPt_dist, vpColor::red, 2); + } + + unsigned int remainder_col = (I_dist_map.getWidth()-1) - (I_dist_map.getWidth() / step)*step; + if (remainder_col >= step/2) { + unsigned int j = I_dist_map.getWidth() - 1; + vpImagePoint imPt(i, j); + double jd = 0, id = 0; + undistortDistort(cv_cameraMatrix, cv_distCoeffs, j, i, jd, id); + vpImagePoint imPt_dist(id, jd); + + vpDisplay::displayArrow(I_dist_map, imPt, imPt_dist, vpColor::red, 2); + } + } + + // Image center and camera principal point + vpDisplay::displayCross(I_dist_map, vpImagePoint((I_dist_map.getHeight()-1)/2, (I_dist_map.getWidth()-1)/2), + 10*vpDisplay::getDownScalingFactor(I_dist_map), vpColor::green, 2); + vpDisplay::displayCircle(I_dist_map, vpImagePoint(cam.get_v0(), cam.get_u0()), + 8*vpDisplay::getDownScalingFactor(I_dist_map), vpColor::red); + + if (s.tempo > 10.f) { + vpDisplay::displayText( + I_dist_map, I_dist_map.getHeight() - 20 * vpDisplay::getDownScalingFactor(I_dist_map), + 15 * vpDisplay::getDownScalingFactor(I_dist_map), "Click to continue...", vpColor::red); + vpDisplay::flush(I_dist_map); + vpDisplay::getClick(I_dist_map); + } + else { + vpDisplay::flush(I_dist_map); + vpTime::wait(s.tempo * 1000); + } + + if (opt_save_results) { + vpDisplay::getImage(I_dist_map, I_save_results); + std::ostringstream oss; + oss << cv_save_results_folder << "/distortion_displacement_map" << opt_img_ext; + vpImageIo::write(I_save_results, oss.str()); + + // Save mosaic images + { + std::vector> mosaics; + createMosaic(cv_list_img_reproj_dist, mosaics); + for (size_t i = 0; i < mosaics.size(); i++) { + oss.str(""); + oss << cv_save_results_folder << "/mosaics_reproj_err_with_dist_%04d" << opt_img_ext; + char name[FILENAME_MAX]; + snprintf(name, FILENAME_MAX, oss.str().c_str(), i); + vpImageIo::write(mosaics[i], name); + } + } + { + std::vector> mosaics; + createMosaic(cv_list_img_undist, mosaics); + for (size_t i = 0; i < mosaics.size(); i++) { + oss.str(""); + oss << cv_save_results_folder << "/mosaics_undist_%04d" << opt_img_ext; + char name[FILENAME_MAX]; + snprintf(name, FILENAME_MAX, oss.str().c_str(), i); + vpImageIo::write(mosaics[i], name); + } + } + } + + d->close(I_dist_map); + + // + // OpenCV Display reprojection error graph for all the images + // + const unsigned int disp_size = 650; + vpImage I_err_imPt(disp_size, disp_size); + std::shared_ptr d2 = vpDisplayFactory::createDisplay(I_err_imPt, 0, 0, + "(OpenCV) Reprojection error in the image plane for all the images"); + vpDisplay::display(I_err_imPt); + + // distortion + std::vector> err_imPt_imgs_dist; + err_imPt_imgs_dist.reserve(calibrator.size()); + std::vector err_imPt_dist; + err_imPt_dist.reserve(calib_points_all.size()); + double max_scale_uv_dist = getProjectionErrorUV(cv_cameraMatrix, cv_distCoeffs, calib_info, rvecs, tvecs, + err_imPt_imgs_dist, err_imPt_dist, true); + { + displayProjectionErrorUV(I_err_imPt, err_imPt_dist, disp_size, max_scale_uv_dist, true, + vpColor::blue, 20, vpColor::blue, true); + } + + if (s.tempo > 10.f) { + vpDisplay::displayText(I_err_imPt, I_err_imPt.getHeight() - 20 * vpDisplay::getDownScalingFactor(I_err_imPt), + 15 * vpDisplay::getDownScalingFactor(I_err_imPt), "Click to continue...", vpColor::red); + vpDisplay::flush(I_err_imPt); + vpDisplay::getClick(I_err_imPt); + } + else { + vpDisplay::flush(I_err_imPt); + vpTime::wait(s.tempo * 1000); + } + + if (opt_save_results) { + vpDisplay::getImage(I_err_imPt, I_save_results); + std::ostringstream oss; + oss << cv_save_results_folder << "/reprojection_error_graph" << opt_img_ext; + vpImageIo::write(I_save_results, oss.str()); + } + + // + // OpenCV Display reprojection error graph for each images + // + if (!fast_display) { + std::vector> list_img_reproj_graph; + for (size_t i = 0; i < err_imPt_imgs_dist.size(); i++) { + std::ostringstream oss_title; + oss_title << "(OpenCV) Reprojection error in the image plane for: " << + vpIoTools::getNameWE(calib_info[i].m_frame_name); + d2->setTitle(oss_title.str()); + vpDisplay::display(I_err_imPt); + + // distortion + displayProjectionErrorUV(I_err_imPt, err_imPt_dist, disp_size, max_scale_uv_dist, true, + vpColor::blue, 40, vpColor::blue, true); + displayProjectionErrorUV(I_err_imPt, err_imPt_imgs_dist[i], disp_size, max_scale_uv_dist, true, + vpColor::green, disp_size-60, vpColor::green, true); + + oss_title.str(""); + oss_title << vpIoTools::getNameWE(calib_info[i].m_frame_name); + vpDisplay::displayText(I_err_imPt, 20 * vpDisplay::getDownScalingFactor(I_err_imPt), + 15 * vpDisplay::getDownScalingFactor(I_err_imPt), + oss_title.str(), vpColor::white); + + if (s.tempo > 10.f) { + vpDisplay::displayText(I_err_imPt, I_err_imPt.getHeight() - 20 * vpDisplay::getDownScalingFactor(I_err_imPt), + 15 * vpDisplay::getDownScalingFactor(I_err_imPt), "Click to continue...", vpColor::red); + vpDisplay::flush(I_err_imPt); + vpDisplay::getClick(I_err_imPt); + } + else { + vpDisplay::flush(I_err_imPt); + vpTime::wait(s.tempo * 1000); + } + + if (opt_save_results) { + vpDisplay::getImage(I_err_imPt, I_save_results); + list_img_reproj_graph.push_back(I_save_results); + + std::ostringstream oss; + oss << cv_save_results_folder << "/reproj_err_graph_" << vpIoTools::getNameWE(calib_info[i].m_frame_name) + << opt_img_ext; + vpImageIo::write(I_save_results, oss.str()); + } + } + + if (opt_save_results) { + // Save mosaic images + std::ostringstream oss; + std::vector> mosaics; + createMosaic(list_img_reproj_graph, mosaics, 4, 3); + for (size_t i = 0; i < mosaics.size(); i++) { + oss.str(""); + oss << cv_save_results_folder << "/mosaics_reproj_err_graph_%04d" << opt_img_ext; + char name[FILENAME_MAX]; + snprintf(name, FILENAME_MAX, oss.str().c_str(), i); + vpImageIo::write(mosaics[i], name); + } + } + } + + tee << "\n"; + tee << "(OpenCV) Estimated pose using vpPoseVector format: [tx ty tz tux tuy tuz] with translation in meter and " + "rotation in rad" + << "\n"; + for (size_t i = 0; i < rvecs.size(); i++) { + tee << "Estimated pose on input data extracted from " << calib_info[i].m_frame_name << ": " + << vpPoseVector( + tvecs[i].at(0), tvecs[i].at(1), tvecs[i].at(2), + rvecs[i].at(0), rvecs[i].at(1), rvecs[i].at(2) + ).t() << "\n"; + } + + } // if (cv_calib_status) + } // if (perform_opencv_calib) + + + double end_time = vpTime::measureTimeMs(); + int milli = (end_time-start_time); + int seconds = milli / 1000; + milli %= 1000; + + int minutes = seconds / 60; + seconds %= 60; + + tee << "\nCamera calibration succeeded. Results are saved in " << "\"" << opt_output_file_name + << "\". Total time: " << minutes << " min " << seconds << " sec " << milli << " ms." << "\n"; return EXIT_SUCCESS; } catch (const vpException &e) { std::cout << "Catch an exception: " << e << std::endl; -#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11) - if (display != nullptr) { - delete display; - } -#endif return EXIT_FAILURE; } } @@ -733,14 +1587,17 @@ int main() #if !defined(HAVE_OPENCV_HIGHGUI) std::cout << "This example requires OpenCV highgui module." << std::endl; #endif -#if defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION < 0x050000) && !defined(HAVE_OPENCV_CALIB3D) +#if defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION < 0x050000) && !(defined(HAVE_OPENCV_CALIB3D) && defined(HAVE_OPENCV_OBJDETECT)) std::cout << "This example requires OpenCV calib3d module." << std::endl; #endif -#if defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x050000) && !defined(HAVE_OPENCV_3D) +#if defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x050000) && !(defined(HAVE_OPENCV_CALIB3D) && defined(HAVE_OPENCV_OBJDETECT)) std::cout << "This example requires OpenCV 3d module." << std::endl; #endif #if !defined(VISP_HAVE_PUGIXML) - std::cout << "pugixml built-in 3rdparty is requested to run the calibration." << std::endl; + std::cout << "pugixml built-in 3rdparty is required to run the calibration." << std::endl; +#endif +#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11) + std::cout << "Compiler with C++11 support is required." << std::endl; #endif return EXIT_SUCCESS; } diff --git a/doc/biblio/references.bib b/doc/biblio/references.bib index 3a73a2317d..b6410d3b5a 100644 --- a/doc/biblio/references.bib +++ b/doc/biblio/references.bib @@ -11,6 +11,18 @@ @Article{Fischler81 month = jun } +@Article{Tsai87, + author = {Tsai, R.}, + journal = {IEEE Journal on Robotics and Automation}, + title = {A versatile camera calibration technique for high-accuracy 3D machine vision metrology using off-the-shelf TV cameras and lenses}, + year = {1987}, + volume = {3}, + number = {4}, + pages = {323-344}, + keywords = {Cameras;Calibration;Machine vision;Metrology;TV;Lenses;Robot vision systems;Robotic assembly;Robot kinematics;Application software}, + doi = {10.1109/JRA.1987.1087109} +} + @Article{Tsai89a, author = {Tsai, R. and Lenz, R.}, title = {A New Technique for Fully Autonomous and Efficient @@ -819,3 +831,34 @@ @article{Lee2010ASL Color Image Enhancement, Contrast Enhancement, Color Restoration }, doi = {10.1109/TCE.2010.5681151} } + +@techreport{marchand:inria-00072535, + TITLE = {{A New Formulation for Non-Linear Camera Calibration Using Virtual Visual Servoing}}, + AUTHOR = {Marchand, Eric and Chaumette, Fran{\c c}ois}, + URL = {https://inria.hal.science/inria-00072535}, + TYPE = {Research Report}, + NUMBER = {RR-4096}, + INSTITUTION = {{INRIA}}, + YEAR = {2001}, + KEYWORDS = {CALIBRATION ; NON-LINEAR APPROACH ; VIRTUAL VISUAL SERVOING}, + PDF = {https://inria.hal.science/inria-00072535v1/file/RR-4096.pdf}, + HAL_ID = {inria-00072535}, + HAL_VERSION = {v1}, +} + +@inproceedings{devernay:hal-00821474, + TITLE = {{Automatic calibration and removal of distortion from scenes of structured environments}}, + AUTHOR = {Devernay, Fr{\'e}d{\'e}ric and Faugeras, Olivier}, + URL = {https://inria.hal.science/hal-00821474}, + BOOKTITLE = {{Proc. SPIE}}, + ADDRESS = {San Diego, United States}, + PUBLISHER = {{SPIE}}, + SERIES = {Proc. SPIE}, + VOLUME = {2567}, + YEAR = {1995}, + DOI = {10.1117/12.218487}, + KEYWORDS = {Algorithms ; Video ; Optics ; Lenses ; Distortion ; Computer vision technology ; Coded apertures ; Cameras ; Calibration}, + PDF = {https://inria.hal.science/hal-00821474v1/file/devernay-faugeras_95b.pdf}, + HAL_ID = {hal-00821474}, + HAL_VERSION = {v1}, +} diff --git a/doc/image/tutorial/calibration/img-calib-GoPro-mosaics-undist-OpenCV.jpg b/doc/image/tutorial/calibration/img-calib-GoPro-mosaics-undist-OpenCV.jpg new file mode 100644 index 0000000000..26270d1899 Binary files /dev/null and b/doc/image/tutorial/calibration/img-calib-GoPro-mosaics-undist-OpenCV.jpg differ diff --git a/doc/image/tutorial/calibration/img-calib-GoPro-mosaics-undist-ViSP.jpg b/doc/image/tutorial/calibration/img-calib-GoPro-mosaics-undist-ViSP.jpg new file mode 100644 index 0000000000..73572ae355 Binary files /dev/null and b/doc/image/tutorial/calibration/img-calib-GoPro-mosaics-undist-ViSP.jpg differ diff --git a/doc/image/tutorial/calibration/img-calib-GoPro-mosaics.jpg b/doc/image/tutorial/calibration/img-calib-GoPro-mosaics.jpg new file mode 100644 index 0000000000..1a77cfa987 Binary files /dev/null and b/doc/image/tutorial/calibration/img-calib-GoPro-mosaics.jpg differ diff --git a/doc/image/tutorial/calibration/img-calib-GoPro-reprojection-error-graph-OpenCV.jpg b/doc/image/tutorial/calibration/img-calib-GoPro-reprojection-error-graph-OpenCV.jpg new file mode 100644 index 0000000000..82774d1336 Binary files /dev/null and b/doc/image/tutorial/calibration/img-calib-GoPro-reprojection-error-graph-OpenCV.jpg differ diff --git a/doc/image/tutorial/calibration/img-calib-GoPro-reprojection-error-graph-ViSP.jpg b/doc/image/tutorial/calibration/img-calib-GoPro-reprojection-error-graph-ViSP.jpg new file mode 100644 index 0000000000..e89c0d053c Binary files /dev/null and b/doc/image/tutorial/calibration/img-calib-GoPro-reprojection-error-graph-ViSP.jpg differ diff --git a/doc/image/tutorial/calibration/img-calib-dist-vs_undist-GOPR0262-OpenCV.jpg b/doc/image/tutorial/calibration/img-calib-dist-vs_undist-GOPR0262-OpenCV.jpg new file mode 100644 index 0000000000..3526e2c7c8 Binary files /dev/null and b/doc/image/tutorial/calibration/img-calib-dist-vs_undist-GOPR0262-OpenCV.jpg differ diff --git a/doc/image/tutorial/calibration/img-calib-dist-vs_undist-GOPR0262-ViSP.jpg b/doc/image/tutorial/calibration/img-calib-dist-vs_undist-GOPR0262-ViSP.jpg new file mode 100644 index 0000000000..f631e54b43 Binary files /dev/null and b/doc/image/tutorial/calibration/img-calib-dist-vs_undist-GOPR0262-ViSP.jpg differ diff --git a/doc/image/tutorial/calibration/img-chessboard-distortion-displacement-map.jpg b/doc/image/tutorial/calibration/img-chessboard-distortion-displacement-map.jpg new file mode 100644 index 0000000000..bfbc808c10 Binary files /dev/null and b/doc/image/tutorial/calibration/img-chessboard-distortion-displacement-map.jpg differ diff --git a/doc/image/tutorial/calibration/img-reproj-err-graph-chessboard-05.jpg b/doc/image/tutorial/calibration/img-reproj-err-graph-chessboard-05.jpg new file mode 100644 index 0000000000..bddc1f5c16 Binary files /dev/null and b/doc/image/tutorial/calibration/img-reproj-err-graph-chessboard-05.jpg differ diff --git a/doc/image/tutorial/calibration/img-reproj-error-graph.jpg b/doc/image/tutorial/calibration/img-reproj-error-graph.jpg new file mode 100644 index 0000000000..84eba963be Binary files /dev/null and b/doc/image/tutorial/calibration/img-reproj-error-graph.jpg differ diff --git a/doc/tutorial/calibration/tutorial-calibration-intrinsic.dox b/doc/tutorial/calibration/tutorial-calibration-intrinsic.dox index 89f2a702ea..4fc19ae950 100644 --- a/doc/tutorial/calibration/tutorial-calibration-intrinsic.dox +++ b/doc/tutorial/calibration/tutorial-calibration-intrinsic.dox @@ -69,9 +69,9 @@ How to improve calibration accuracy: - Download and print, one of the following calibration grid: - a black and white chessboard - [OpenCV_Chessboard.pdf] (recommended); + [OpenCV_Chessboard.pdf] (recommended); - a symmetrical circle pattern - [grid2d.pdf]. + [grid2d.pdf]. - Then stick the printed grid on a rigid support. @@ -133,8 +133,9 @@ $ ./tutorial-grabber-opencv --seqname chessboard-%02d.jpg --record 1 \section calibration 4. Calibration \subsection calibration_source_code 4.1. Source code -Note that all the material (source code) described in this tutorial is part of ViSP source code -(in `example/calibration` folder) and could be found in https://github.com/lagadic/visp/tree/master/example/calibration. +Note that all the material (source code) described in this tutorial is part of the ViSP source code +(in the `apps/calibration/intrinsic` folder) and could also be found on Github at +https://github.com/lagadic/visp/tree/master/apps/calibration/intrinsic url. The calibration tool is available in `visp-calibrate-camera.cpp` located in `apps/calibration/intrinsic` folder. @@ -144,7 +145,7 @@ We will not describe in detail the source, but just mention that: - the calibration tool takes as input a configuration file that allows to precise the kind of pattern used in the images (chessboard or circles grid), and the location of the images used as input. If `libjpeg` and `libpng` 3rd party libraries are installed and detected during ViSP configuration, you may consider .pgm, .ppm, .jpg, .png images. - Default configuration files are provided in \c example/calibration folder; + Default configuration files are provided in \c apps/calibration/intrinsic folder; - the resulting parameters are saved in \c camera.xml file. \subsection calibration_chessboard 4.2. With a chessboard @@ -369,7 +370,7 @@ Since ViSP 3.3.1 we provide a set of tools to analyse calibration results. \subsubsection calibration_tools_patterns 4.4.1. Grid patterns Running `visp-calibrate-camera` binary allows to visualize the locations of the calibration patterns in the image: -\image html img-grid-patterns.png +\image html img-grid-patterns.png Colormap representing the calibration pattern occupancy, hot colors show pixels location with high board density ; black squares represent the corners locations used as input to the calibration pipeline. A good calibration is obtained when the patterns cover most part of the image. @@ -377,7 +378,7 @@ A good calibration is obtained when the patterns cover most part of the image. Reprojection error could be seen in the next image. It shows the current reprojection error, the extracted points and the projected points using the estimated `cMo` camera pose and camera parameters: -\image html img-calib-reprojection-error.jpg +\image html img-calib-reprojection-error.jpg Red crosses represent the extracted chessboard corners and the green crosses the projected 3D chessboard corners using the estimated camera intrinsic parameters and the camera poses. On the console, the global reprojection error is also given for the model without distortion parameters: \code{.sh} @@ -440,7 +441,7 @@ To get an idea on how much there is distortion, `visp-calibrate-camera` binary g - right image: image is undistorted, chessboard points are extracted and lines starting from the first and last points are drawn -\image html img-calib-line-fitting.jpg +\image html img-calib-line-fitting.jpg Comparison between extracted lines from the input distorted and undistorted images. On the console, mean distance errors for line fitting for points extracted from distorted image and after using vpPixelMeterConversion::convertPoint() is given: @@ -465,7 +466,129 @@ chessboard-03.jpg undistorted image, line 5 fitting error: 0.08680507551 chessboard-03.jpg undistorted image, line 6 fitting error: 0.06309999164 \endcode -\subsubsection calibration_tools_poses 4.4.4. Camera poses +\subsubsection calibration_tools_lens_distortion 4.4.4. Camera lens distortion +A pseudo "distortion displacement map" is also displayed: + +\image html img-chessboard-distortion-displacement-map.jpg Distortion displacement map: hot colors representing large pixels displacement induced by the camera lens distortion model ; the red arrows representing the direction and the amount of displacement. + +The effect of the lens distortion is visible with hot colors corresponding to much stronger effect of the distortion +compared to cold colors (typically in the center of the image). With barrel distortion, this effect tends to "push" the +pixels at the extremities to the image toward the center of the image. + +\subsubsection calibration_tools_reproj_graph 4.4.5. Reprojection errors graph +The reprojection errors in the uv-coordinates is also displayed for the calibration without (in red) and with distortion (in blue) support: + +\image html img-reproj-error-graph.jpg Visualisation of the reprojection errors for all the extracted corners in the uv-plane, without and with taking into account a camera model lens distortion. + +For each calibration image, the same graph is also displayed, with the current reprojection errors visible in green: + +\image html img-reproj-err-graph-chessboard-05.jpg Visualisation of the reprojection errors for all the extracted corners in the uv-plane, in green those for the current image. + +This should help to analyse if a calibration image is appropriated or if it needs to be discarded, in addition to the total reprojection error metric. + +\subsubsection calibration_tools_log 4.4.6. Calibration results log +You can use the following command line arguments to: +- `--save` to automatically create a folder with the current datetime with the result images saved on disk, plus a log text file containing the terminal output +- `--save-jpg` to save the images in jpeg format instead of png one +- `--help` flag will print a short description of the different command line flags + +\subsubsection calibration_tools_opencv 4.4.7. Calibration using OpenCV +A basic support of the OpenCV camera calibration is possible using the `--opencv-calib` flag. +Passing this flag allows to both calibrate using the ViSP and the OpenCV camera calibration pipeline. + +This is a rough support of the OpenCV calibration pipeline with the following limitations: + - calibration with ChArUco calibration boards is experimental and has not been fully tested + - the full ChArUco board must be totally visible in the image + - some customisation of the [`cv::calibrateCameraRO()`](https://docs.opencv.org/4.x/d9/d0c/group__calib3d.html#gacb6b35670216b24b67c70fcd21519ead) + function is possible, but not complete + - the following OpenCV calibration flags are currently supported: + - `CALIB_USE_INTRINSIC_GUESS` + - `CALIB_FIX_ASPECT_RATIO` + - `CALIB_FIX_PRINCIPAL_POINT` + - `CALIB_ZERO_TANGENT_DIST` + - `CALIB_FIX_FOCAL_LENGTH` + - `CALIB_FIX_K1` + - `CALIB_FIX_K2` + - `CALIB_FIX_K3` + - `CALIB_USE_QR` + - `CALIB_USE_LU` + - with by default `cv::CALIB_USE_INTRINSIC_GUESS | cv::CALIB_USE_LU` being used, and the intrinsic parameters initialised using the ViSP calibration results. + +An example for a ViSP config file for ChArUco board: +\code{.sh} +# Number of inner corners per a item row and column. (square, circle) +BoardSize_Width: 18 +BoardSize_Height: 9 + +# The size of a square in meters +Square_Size: 0.02 + +# The type of pattern used for camera calibration. +# One of: CHESSBOARD, CIRCLES_GRID or CHARUCOBOARD +Calibrate_Pattern: CHARUCOBOARD + +# The input image sequence to use for calibration +Input: pycalib/data/charuco/%08d.jpg + +# Tempo in seconds between two images. If > 10 wait a click to continue +Tempo: 1 + +# ChArUco +Charuco_Marker_Size: 0.015 +Charuco_Dictionary: DICT_4X4_250 +Charuco_Legacy_Pattern: 0 +\endcode + +Nevertheless, the benefit of using a camera lens model with higher polynomial order can be directly seen with a with wide-angle camera lens. +As an example, we will perform the calibration of a GoPro Hero 4 camera using this dataset: +- https://github.com/jhbrito/CameraCalibrationDataset/tree/main/GoPro%20Hero%204 ; MIT License / Copyright (c) 2021 jhbrito + +The model of the distortion and the calibration pipeline used in ViSP is composed of only one term, which is not enough to accurately model wide-ange lens. + +\image html img-calib-dist-vs_undist-GOPR0262-ViSP.jpg Visualisation of an undistorted image from a wide-angle camera lens (ViSP model). + +On the previous image, we have drawn a straight line for each row of the chessboard using the first and last extracted corner location. +The barrel distortion effect is clearly noticeable, and a consequence of using a wide-angle camera lens. +On the right side, we have undistorted the original image using the estimated one term distortion coefficient. Similarly, +a straight line is drawn from the extracted chessboard corner locations. The distortion effect is reduced but using a distortion model with +only one term is not enough to accurately model the distortion effect at the border of the image. + +\image html img-calib-dist-vs_undist-GOPR0262-OpenCV.jpg Visualisation of an undistorted image from a wide-angle camera lens (OpenCV model). + +The same operation is performed using this time a camera lens distortion with \f$ (k_1, k_2, k_3) \f$ coefficients for the radial distortion and +\f$ (p_1, p_2) \f$ coefficients for the tangential distortion model. +Taking into account higher order coefficients for the polynomial radial distortion allows better modelling of the radial distortion, +as it can be seen in the previous image, the tangential distortion is often negligeable with correct camera lens (\cite Tsai87 \cite devernay:hal-00821474) + +For camera lens which does not exhibit a high amount of distortion, modelling the radial distortion with only one term can bring sufficient precision +(\cite devernay:hal-00821474) and has the advantage that computing the undistorted coordinates has a closed-form solution while a solver such as a +Newton-Raphson method needs to be used with a higher order distortion model (e.g. \cite KannalaBrandt). + +The following mosaic shows the original GoPro images used for the calibration and is generated when using the `--save` command line: + +\image html img-calib-GoPro-mosaics.jpg Images mosaic corresponding to the original calibration images. + +The following mosaic shows the undistorted images when using the ViSP calibration result: + +\image html img-calib-GoPro-mosaics-undist-ViSP.jpg Images mosaic corresponding to undistorted images using the ViSP calibration result and distortion model. + +The following mosaic shows the undistorted images when using the OpenCV calibration result: + +\image html img-calib-GoPro-mosaics-undist-OpenCV.jpg Images mosaic corresponding to undistorted images using the OpenCV calibration result and distortion model. + +Finally, we can also plot the reprojection errors in the uv-image plane, without and with using a distortion model with ViSP: + +\image html img-calib-GoPro-reprojection-error-graph-ViSP.jpg Reprojection errors in the uv-image plane for all the extracted chessboard corners without and with taking into account a distortion model with ViSP. + +And with OpenCV: + +\image html img-calib-GoPro-reprojection-error-graph-OpenCV.jpg Reprojection errors in the uv-image plane for all the extracted chessboard corners with OpenCV. + +The choice of a distortion model is dependent on the type of camera lens and also a tradeoff between accuracy and computation time. +For instance, it can be cheaper to perform the undistortion directly on the image coordinates or it can be necessary to first compute the uv-distortion map and +then undistort the whole image for certain types of machine vision application. + +\subsubsection calibration_tools_poses 4.4.8. Camera poses There is `camera_calibration_show_extrinsics.py` script that allows to display camera poses. Prior to use that script, you need to install `scipy`: \code{.sh} @@ -479,7 +602,7 @@ $ python camera_calibration_show_extrinsics.py --calibration camera.xml --scale_ \endcode It reads the camera poses saved in `camera.xml` file and display them with respect to the calibration grid considered as static. -\image html img-calib-script-extrinsic.jpg +\image html img-calib-script-extrinsic.jpg 3D visualisation of the different (estimated) camera poses corresponding to the different captured viewpoints for camera calibration. \subsection calibration_init 4.5. How to get over calibration error @@ -608,7 +731,7 @@ Results are shown in the next images: - right image is the resulting `chessboard-undistort.jpg` image where distortion is removed thanks to the camera intrinsic parameters. Here you can see that chessboard lines are straight. -\image html img-chessboard-undistort.jpg +\image html img-chessboard-undistort.jpg Original distorted image and undistorted image using the estimated distortion coefficient from camera calibration. \subsection calibration_undistort_multi 5.2. Undistort a sequence of images diff --git a/modules/core/src/camera/vpXmlParserCamera.cpp b/modules/core/src/camera/vpXmlParserCamera.cpp index 4385605f48..93fe856d94 100644 --- a/modules/core/src/camera/vpXmlParserCamera.cpp +++ b/modules/core/src/camera/vpXmlParserCamera.cpp @@ -736,13 +736,17 @@ class vpXmlParserCamera::Impl subsampling_height_tmp = node.text().as_uint(); break; + case CODE_XML_MODEL: + break; + case CODE_XML_ADDITIONAL_INFO: + break; + case CODE_XML_BAD: case CODE_XML_OTHER: case CODE_XML_CAMERA: case CODE_XML_FULL_HEIGHT: case CODE_XML_FULL_WIDTH: - case CODE_XML_MODEL: case CODE_XML_MODEL_TYPE: case CODE_XML_U0: case CODE_XML_V0: diff --git a/modules/vision/include/visp3/vision/vpCalibration.h b/modules/vision/include/visp3/vision/vpCalibration.h index f25e3cbe48..d8a9164daf 100644 --- a/modules/vision/include/visp3/vision/vpCalibration.h +++ b/modules/vision/include/visp3/vision/vpCalibration.h @@ -72,9 +72,9 @@ class VISP_EXPORT vpCalibration distortion. */ CALIB_VIRTUAL_VS, /*!< Virtual visual servoing approach without estimation of the distortion (results are similar to Lowe - approach). */ + approach) @cite marchand:inria-00072535. */ CALIB_VIRTUAL_VS_DIST, /*!< Virtual visual servoing approach with - estimation of the distortion. */ + estimation of the distortion @cite marchand:inria-00072535.*/ CALIB_LAGRANGE_VIRTUAL_VS, /*!< Lagrange approach first, than virtual visual servoing approach, without estimation of the distortion. */