【OpenCV 】对极几何标定质量验证

标定质量验证:
寻找一对对应点,已经知道对应关系及其详细坐标,根据对极几何推导实现

  ///get the camera intrinsics and T_Ci_B
  std::vector<Eigen::Matrix3d> M_K;
  std::vector<Eigen::Matrix4d> T_Ci_B;
  for (int i = 0; i < new_frames->size(); ++i) {
    auto frame = new_frames->frames_[i];
    auto project = cams_->getCameraShared(i);
    auto k = project->getIntrinsicParameters();
    Eigen::Matrix3d M_Ki;
    M_Ki << k(0), 0, k(2), 0, k(1), k(3), 0, 0, 1;
    M_K.push_back(M_Ki);

    Eigen::Matrix4d T_Ci_B_temp = cams_->get_T_C_B(i).getTransformationMatrix();
    T_Ci_B.emplace_back(T_Ci_B_temp);
  }
  ///get T_C0_Ci
  std::vector<Eigen::Matrix4d> T_C0_Ci;
  for (int i = 1; i < new_frames->size(); ++i) {
    Eigen::Matrix4d T_C0_Ci_temp = T_Ci_B[0].inverse() * T_Ci_B[i];
    T_C0_Ci.emplace_back(T_C0_Ci_temp);
  }
  std::cout<<"T_C0_Ci : "<<T_C0_Ci.size()<<std::endl;

  ///get t^R
  std::vector<Eigen::Matrix3d> R_C0_Ci;
  std::vector<Eigen::Matrix3d> t_transpose_R;
  for (int i = 0; i < T_C0_Ci.size(); ++i) {
    Eigen::Vector3d t_C0_Ci_temp;
    Eigen::Matrix3d R_C0_Ci_temp,skew_t,skew_t_transpose_R_temp;
    R_C0_Ci_temp = T_C0_Ci[i].block(0,0,3,3);
    t_C0_Ci_temp = T_C0_Ci[i].block(0,3,3,1);
    skew_t << 0, -t_C0_Ci_temp(2), t_C0_Ci_temp(1),
        t_C0_Ci_temp(2), 0, -t_C0_Ci_temp(0),
        -t_C0_Ci_temp(1), t_C0_Ci_temp(0), 0;
    R_C0_Ci.emplace_back(R_C0_Ci_temp);
    skew_t_transpose_R_temp = skew_t.transpose() * R_C0_Ci_temp;
    t_transpose_R.emplace_back(skew_t_transpose_R_temp);
  }

  /// l2 = K(^-1)_2 * t^R * K(^-1)_1 * p1 =F2 * p1
  for (int i = 0; i < new_frames->size(); ++i) {
    auto ld = new_frames->at(i)->landmark_vec_;
    for (int j = 0; j < ld.size(); ++j) {
      if(ld[j] == nullptr)
        continue;
      auto px = new_frames->frames_[i]->px_vec_.col(j);
      Eigen::Vector3d p_norm;
      p_norm << px.x(),px.y(),1;

      Eigen::Vector3d p_norm_corresponding;
      Eigen::Vector3d l_0i = M_K[i+1].inverse() * t_transpose_R[i] *  M_K[i] * p_norm;
      double dist = std::abs(p_norm_corresponding.dot(l_0i.head<3>()) + l_0i(3)) / l_0i.head<3>().norm();
      std::cout<<"dist: "<<dist<<std::endl;

      if(1){
        cv::Mat img_show = new_frames->frames_[i]->image_;
        std::string name = "Calib_Check";
        cv::namedWindow(name, cv::WINDOW_NORMAL);
        cv::resizeWindow(name, img_show.cols, img_show.rows);
        cv::KeyPoint kp_show = cv::KeyPoint(px.x(),px.y(),1);
        std::vector<cv::KeyPoint> kps;
        kps.emplace_back(kp_show);

        cv::drawKeypoints(img_show, kps, img_show, cv::Scalar(0, 0, 255),
            cv::DrawMatchesFlags::DRAW_RICH_KEYPOINTS);
        cv::waitKey(0);
      }

      break;
    }
  }
  NOLOCalibParams<float> calib_params = m_calib_params;
  //  std::vector K0 = m_calib_params.Ks[0];
  //  std::vector D0 = m_calib_params.Ds[0];
  //  std::vector K1 = m_calib_params.Ks[1];
  //  std::vector D1 = m_calib_params.Ds[1];
  //  std::vector K2 = m_calib_params.Ks[2];
  //  std::vector D2 = m_calib_params.Ds[2];
  //  std::vector K3 = m_calib_params.Ks[3];
  //  std::vector D3 = m_calib_params.Ds[3];
  //  cv::Mat cur_K0 = (cv::Mat_(3, 3) << K0[0], 0, K0[2], 0, K0[1],
  //  K0[3], 0, 0, 1); cv::Mat cur_K1 = (cv::Mat_(3, 3) << K1[0], 0,
  //  K1[2], 0, K1[1], K1[3], 0, 0, 1); cv::Mat cur_K2 = (cv::Mat_(3, 3)
  //  << K2[0], 0, K2[2], 0, K2[1], K2[3], 0, 0, 1); cv::Mat cur_K3 =
  //  (cv::Mat_(3, 3) << K3[0], 0, K3[2], 0, K3[1], K3[3], 0, 0, 1);
  //  cv::Mat cur_D_fisheye = (cv::Mat_(4, 1) << D0[0], D0[1], D0[2],
  //  D0[3]);
  //
  //  Eigen::Map>
  //  eigenMat0(cur_K0.ptr()); Eigen::Matrix3d M_K0 = eigenMat0;
  //  Eigen::Map>
  //  eigenMat1(cur_K1.ptr()); Eigen::Matrix3d M_K1 = eigenMat1;
  //  Eigen::Map>
  //  eigenMat2(cur_K2.ptr()); Eigen::Matrix3d M_K2 = eigenMat2;
  //  Eigen::Map>
  //  eigenMat3(cur_K3.ptr()); Eigen::Matrix3d M_K3 = eigenMat3;

  std::vector<Eigen::Matrix3d> M_K;
  for (int i = 0; i < m_calib_params.T_C_Cs.size(); ++i) {
    std::vector<float> Ki = m_calib_params.Ks[i];
    cv::Mat cur_Ki =
        (cv::Mat_<double>(3, 3) << Ki[0], 0, Ki[2], 0, Ki[1], Ki[3], 0, 0, 1);
    Eigen::Map<const Eigen::Matrix<double, 3, 3, Eigen::RowMajor>> eigenMati(
        cur_Ki.ptr<double>());
    Eigen::Matrix3d M_Ki = eigenMati;
    M_K.emplace_back(M_Ki);
  }

  std::vector<std::vector<float>> T_C_Ci = calib_params.T_C_Cs;
  std::vector<Eigen::Matrix3d> cur_R_C_Ci;
  std::vector<Eigen::Vector3d> cur_t_C_Ci;
  std::vector<Eigen::Matrix3d> skew_t_C_Ci;
  for (auto &data : m_calib_params.T_C_Cs) {
    Eigen::Vector3d t;
    std::vector<float> tq;
    vector2tq(tq, data);
    t[0] = (double)tq[0];
    t[1] = (double)tq[1];
    t[2] = (double)tq[2];
    Eigen::Matrix3d skew_t;
    skew_t << 0, -t(2), t(1), t(2), 0, -t(0), -t(1), t(0), 0;
    skew_t_C_Ci.emplace_back(skew_t);
    Eigen::Quaterniond q(tq[3], tq[4], tq[5], tq[6]);
    Eigen::Matrix3d rot = q.normalized().toRotationMatrix();
    cur_R_C_Ci.emplace_back(rot);
    cur_t_C_Ci.emplace_back(t);
  }

  // l2 = K(^-1)_2 t^R K(^-1)_1 p1 =F2 * p1
  std::cout << " skew_t_C_Ci size " << skew_t_C_Ci.size() << std::endl;
  auto sorted_p2ds = img_info.sorted_p2ds;
  auto p = sorted_p2ds[0][0];
  for (int i = 0; i < skew_t_C_Ci.size(); ++i) {
    Eigen::Vector3d l0i = M_K[i+1].inverse() * skew_t_C_Ci[i] * cur_R_C_Ci[i] * M_K[0] *
               Eigen::Vector3d(sorted_p2ds[i+1][0].x, sorted_p2ds[i+1][0].y, 1);

    Vector2d p_image(sorted_p2ds[i+1][0].x, sorted_p2ds[i+1][0].y);
    double dist = std::abs(p_image.dot(l0i.head<3>()) + l0i(3)) / l0i.head<3>().norm();

    // closed loop proof and adaptor
    if(0){
      cv::Mat img_show = images[i+1].clone();
      std::string name = "Funda";
      cv::namedWindow(name, cv::WINDOW_NORMAL);
      cv::resizeWindow(name, img_show.cols, img_show.rows);
      auto cur_kpts = img_info.sorted_kpts[i];
      cv::drawKeypoints(img_show, cur_kpts, img_show, cv::Scalar(0, 0, 255),
                        cv::DrawMatchesFlags::DRAW_RICH_KEYPOINTS);

    }
  }

你可能感兴趣的:(#,opencv,人工智能,计算机视觉)