非线性VINS估计器的性能对于初始的速度,尺度,重力向量,空间点3D位置,以及外参等非常敏感。在很多场合中,能做到相机和IMU即插即用,线上自动校准与初始化,将会给用户带来极大的方便性。VINS里面分四步进行,第一个就是上次讲的旋转外参校准,第二个就是找到某帧作为系统初始化原点,计算3D地图点,第三就是将相机坐标系转到IMU坐标系中,第四就是相机与IMU对齐,包括IMU零偏初始化,速度,重力向量,尺度初始化

  1. bool Estimator::initialStructure()
  2. {
  3. TicToc t_sfm;
  4. //check imu observibility, 通过协方差检测IMU的可观测性
  5. {
  6. map<double, ImageFrame>::iterator frame_it;
  7. Vector3d sum_g;
  8. for (frame_it = all_image_frame.begin(), frame_it++; frame_it != all_image_frame.end(); frame_it++)
  9. {
  10. double dt = frame_it->second.pre_integration->sum_dt;
  11. Vector3d tmp_g = frame_it->second.pre_integration->delta_v / dt;
  12. sum_g += tmp_g;
  13. }
  14. Vector3d aver_g;
  15. aver_g = sum_g * 1.0 / ((int)all_image_frame.size() - 1);
  16. double var = 0;
  17. for (frame_it = all_image_frame.begin(), frame_it++; frame_it != all_image_frame.end(); frame_it++)
  18. {
  19. double dt = frame_it->second.pre_integration->sum_dt;
  20. Vector3d tmp_g = frame_it->second.pre_integration->delta_v / dt;
  21. var += (tmp_g - aver_g).transpose() * (tmp_g - aver_g);
  22. //cout << "frame g " << tmp_g.transpose() << endl;
  23. }
  24. var = sqrt(var / ((int)all_image_frame.size() - 1));
  25. //ROS_WARN("IMU variation %f!", var);
  26. if(var < 0.25)
  27. {
  28. ROS_INFO("IMU excitation not enouth!");
  29. //return false;
  30. }
  31. }
  32. // global sfm 准备
  33. Quaterniond Q[frame_count + 1];
  34. Vector3d T[frame_count + 1];
  35. map<int, Vector3d> sfm_tracked_points;
  36. vector<SFMFeature> sfm_f;
  37. for (auto &it_per_id : f_manager.feature)
  38. {
  39. int imu_j = it_per_id.start_frame - 1;
  40. SFMFeature tmp_feature;
  41. tmp_feature.state = false;
  42. tmp_feature.id = it_per_id.feature_id;
  43. for (auto &it_per_frame : it_per_id.feature_per_frame)
  44. {
  45. imu_j++;
  46. Vector3d pts_j = it_per_frame.point;
  47. tmp_feature.observation.push_back(make_pair(imu_j, Eigen::Vector2d{pts_j.x(), pts_j.y()}));
  48. }
  49. sfm_f.push_back(tmp_feature);
  50. }
  51. //在窗口内选择跟最后一帧视差最大的帧,利用五点法计算相对旋转和平移量
  52. Matrix3d relative_R;
  53. Vector3d relative_T;
  54. int l;
  55. if (!relativePose(relative_R, relative_T, l))
  56. {
  57. ROS_INFO("Not enough features or parallax; Move device around");
  58. return false;
  59. }
  60. // 为了更方便理解,把relativePose 函数取了出来
  61. | ----------------------------------------------------------------------------------------------------------
  62. | bool Estimator::relativePose(Matrix3d &relative_R, Vector3d &relative_T, int &l)
  63. | {
  64. | // find previous frame which contians enough correspondance and parallex with newest frame,循环找出和最新帧视差最大的历史帧
  65. | for (int i = 0; i < WINDOW_SIZE; i++)
  66. | {
  67. | vector<pair<Vector3d, Vector3d>> corres;
  68. | corres = f_manager.getCorresponding(i, WINDOW_SIZE);
  69. | if (corres.size() > 20)
  70. | {
  71. | double sum_parallax = 0;
  72. | double average_parallax;
  73. | for (int j = 0; j < int(corres.size()); j++)
  74. | {
  75. | Vector2d pts_0(corres[j].first(0), corres[j].first(1));
  76. | Vector2d pts_1(corres[j].second(0), corres[j].second(1));
  77. | double parallax = (pts_0 - pts_1).norm();
  78. | sum_parallax = sum_parallax + parallax;
  79. | }
  80. | average_parallax = 1.0 * sum_parallax / int(corres.size());
  81. | // 找到后就可以利用五点法计算,如果计算失败就继续循环
  82. | if(average_parallax * 460 > 30 && m_estimator.solveRelativeRT(corres, relative_R, relative_T))
  83. | {
  84. | l = i;
  85. | ROS_DEBUG("average_parallax %f choose l %d and newest frame to triangulate the whole structure", average_parallax * 460, l);
  86. | return true;
  87. | }
  88. | }
  89. | }
  90. | return false;
  91. | }
  92. | // 核心的五点法,也是求本质矩阵
  93. | bool MotionEstimator::solveRelativeRT(const vector<pair<Vector3d, Vector3d>> &corres, Matrix3d &Rotation, Vector3d &Translation)
  94. | {
  95. | if (corres.size() >= 15)
  96. | {
  97. | vector<cv::Point2f> ll, rr;
  98. | for (int i = 0; i < int(corres.size()); i++)
  99. | {
  100. | ll.push_back(cv::Point2f(corres[i].first(0), corres[i].first(1)));
  101. | rr.push_back(cv::Point2f(corres[i].second(0), corres[i].second(1)));
  102. | }
  103. | cv::Mat mask;
  104. | cv::Mat E = cv::findFundamentalMat(ll, rr, cv::FM_RANSAC, 0.3 / 460, 0.99, mask);
  105. | cv::Mat cameraMatrix = (cv::Mat_<double>(3, 3) << 1, 0, 0, 0, 1, 0, 0, 0, 1);
  106. | cv::Mat rot, trans;
  107. | // recoverPose内用了一些复杂的条件判断如何选取R和T,但是其用了单位矩阵作为cameraMatrix,感觉不太对
  108. | int inlier_cnt = cv::recoverPose(E, ll, rr, cameraMatrix, rot, trans, mask);
  109. | //cout << "inlier_cnt " << inlier_cnt << endl;
  110. |
  111. | Eigen::Matrix3d R;
  112. | Eigen::Vector3d T;
  113. | for (int i = 0; i < 3; i++)
  114. | {
  115. | T(i) = trans.at<double>(i, 0);
  116. | for (int j = 0; j < 3; j++)
  117. | R(i, j) = rot.at<double>(i, j);
  118. | }
  119. |
  120. | Rotation = R.transpose();
  121. | Translation = -R.transpose() * T;
  122. | if(inlier_cnt > 12)
  123. | return true;
  124. | else
  125. | return false;
  126. | }
  127. | return false;
  128. | }
  129. | ----------------------------------------------------------------------------------------------------------
  130. GlobalSFM sfm;
  131. /* sfm.construct 函数本身就很巨大,里面很多步骤,代码就不贴了,大致如下:
  132. 1. 三角化l 帧与frame_num - 1帧,得到一些地图点。
  133. 2. 通过这些地图点,利用pnp的方法计算l+1, l+2, l+3, …… frame_num-2 相对于l的位姿。而且每计算一帧,就会与frame_num - 1帧进行三角化,得出更多地图点
  134. 3. 三角化l+1, l+2 …… frame_num-2帧与l帧
  135. 4. 对l-1, l-2, l-3 等帧与sfm_f的特征点队列进行pnp求解,得出相对于l的位姿,并三角化其与l帧。
  136. 5. 三角化剩余的点
  137. 6. ceres全局BA优化,最小化3d投影误差
  138. */
  139. if(!sfm.construct(frame_count + 1, Q, T, l,
  140. relative_R, relative_T,
  141. sfm_f, sfm_tracked_points))
  142. {
  143. ROS_DEBUG("global SFM failed!");
  144. marginalization_flag = MARGIN_OLD;
  145. return false;
  146. }
  147. //solve pnp for all frame,因为有continue存在,怎么会solve all,感觉只是不满足时间戳相等的才pnp求解
  148. map<double, ImageFrame>::iterator frame_it;
  149. map<int, Vector3d>::iterator it;
  150. frame_it = all_image_frame.begin( );
  151. for (int i = 0; frame_it != all_image_frame.end( ); frame_it++)
  152. {
  153. // provide initial guess
  154. cv::Mat r, rvec, t, D, tmp_r;
  155. if((frame_it->first) == Headers[i].stamp.toSec())
  156. {
  157. frame_it->second.is_key_frame = true;
  158. frame_it->second.R = Q[i].toRotationMatrix() * RIC[0].transpose();
  159. frame_it->second.T = T[i];
  160. i++;
  161. continue;
  162. }
  163. if((frame_it->first) > Headers[i].stamp.toSec())
  164. {
  165. i++;
  166. }
  167. Matrix3d R_inital = (Q[i].inverse()).toRotationMatrix();
  168. Vector3d P_inital = - R_inital * T[i];
  169. cv::eigen2cv(R_inital, tmp_r);
  170. cv::Rodrigues(tmp_r, rvec);
  171. cv::eigen2cv(P_inital, t);
  172. frame_it->second.is_key_frame = false;
  173. vector<cv::Point3f> pts_3_vector;
  174. vector<cv::Point2f> pts_2_vector;
  175. for (auto &id_pts : frame_it->second.points)
  176. {
  177. int feature_id = id_pts.first;
  178. for (auto &i_p : id_pts.second)
  179. {
  180. it = sfm_tracked_points.find(feature_id);
  181. if(it != sfm_tracked_points.end())
  182. {
  183. Vector3d world_pts = it->second;
  184. cv::Point3f pts_3(world_pts(0), world_pts(1), world_pts(2));
  185. pts_3_vector.push_back(pts_3);
  186. Vector2d img_pts = i_p.second.head<2>();
  187. cv::Point2f pts_2(img_pts(0), img_pts(1));
  188. pts_2_vector.push_back(pts_2);
  189. }
  190. }
  191. }
  192. cv::Mat K = (cv::Mat_<double>(3, 3) << 1, 0, 0, 0, 1, 0, 0, 0, 1);
  193. if(pts_3_vector.size() < 6)
  194. {
  195. cout << "pts_3_vector size " << pts_3_vector.size() << endl;
  196. ROS_DEBUG("Not enough points for solve pnp !");
  197. return false;
  198. }
  199. if (! cv::solvePnP(pts_3_vector, pts_2_vector, K, D, rvec, t, 1))
  200. {
  201. ROS_DEBUG("solve pnp fail!");
  202. return false;
  203. }
  204. cv::Rodrigues(rvec, r);
  205. MatrixXd R_pnp,tmp_R_pnp;
  206. cv::cv2eigen(r, tmp_R_pnp);
  207. R_pnp = tmp_R_pnp.transpose();
  208. MatrixXd T_pnp;
  209. cv::cv2eigen(t, T_pnp);
  210. T_pnp = R_pnp * (-T_pnp);
  211. frame_it->second.R = R_pnp * RIC[0].transpose();
  212. frame_it->second.T = T_pnp;
  213. }
  214. if (visualInitialAlign())
  215. return true;
  216. else
  217. {
  218. ROS_INFO("misalign visual structure with IMU");
  219. return false;
  220. }
  221. }
  1. bool Estimator::visualInitialAlign()
  2. {
  3. TicToc t_g;
  4. VectorXd x;
  5. /*里面包含
  6. 1. solveGyroscopeBias()零偏初始化: 在滑动窗口中,每两帧之间的相对旋转与IMU预积分产生的旋转进行最小二乘法优化,用ldlt求解
  7. 2. LinearAlignment() 尺度初始化:由于在视觉初始化SFM的过程中,将其中位姿变化较大的两帧中使用E矩阵求解出来旋转和位移,后续的PnP和三角化都是在这个尺度下完成的。所以当前的尺度与IMU测量出来的真实世界尺度肯定不是一致的,所以需要这里进行对齐。这里对齐的方法主要是通过在滑动窗口中每两帧之间的位置和速度与IMU预积分出来的位置和速度组成一个最小二乘法的形式,然后求解出来
  8. 3. RefineGravity()重力向量优化:进一步细化重力加速度,提高估计值的精度,形式与LinearAlignment()是一致的,只是将g改为g⋅ĝ +w1b1+w2b2
  9. 2和3的公式相对难懂,请参考下面图片
  10. */
  11. bool result = VisualIMUAlignment(all_image_frame, Bgs, g, x);
  12. if(!result)
  13. {
  14. ROS_DEBUG("solve g failed!");
  15. return false;
  16. }
  17. // change state
  18. for (int i = 0; i <= frame_count; i++)
  19. {
  20. Matrix3d Ri = all_image_frame[Headers[i].stamp.toSec()].R;
  21. Vector3d Pi = all_image_frame[Headers[i].stamp.toSec()].T;
  22. Ps[i] = Pi;
  23. Rs[i] = Ri;
  24. all_image_frame[Headers[i].stamp.toSec()].is_key_frame = true;
  25. }
  26. VectorXd dep = f_manager.getDepthVector();
  27. for (int i = 0; i < dep.size(); i++)
  28. dep[i] = -1;
  29. f_manager.clearDepth(dep);
  30. //triangulat on cam pose , no tic三角化特征点,
  31. Vector3d TIC_TMP[NUM_OF_CAM];
  32. for(int i = 0; i < NUM_OF_CAM; i++)
  33. TIC_TMP[i].setZero();
  34. ric[0] = RIC[0];
  35. f_manager.setRic(ric);
  36. f_manager.triangulate(Ps, &(TIC_TMP[0]), &(RIC[0]));
  37. //更新相机速度,位置和旋转量(通过精确求解的尺度,重力向量)
  38. double s = (x.tail<1>())(0);
  39. for (int i = 0; i <= WINDOW_SIZE; i++)
  40. {
  41. pre_integrations[i]->repropagate(Vector3d::Zero(), Bgs[i]);
  42. }
  43. for (int i = frame_count; i >= 0; i--)
  44. Ps[i] = s * Ps[i] - Rs[i] * TIC[0] - (s * Ps[0] - Rs[0] * TIC[0]);
  45. int kv = -1;
  46. map<double, ImageFrame>::iterator frame_i;
  47. for (frame_i = all_image_frame.begin(); frame_i != all_image_frame.end(); frame_i++)
  48. {
  49. if(frame_i->second.is_key_frame)
  50. {
  51. kv++;
  52. Vs[kv] = frame_i->second.R * x.segment<3>(kv * 3);
  53. }
  54. }
  55. for (auto &it_per_id : f_manager.feature)
  56. {
  57. it_per_id.used_num = it_per_id.feature_per_frame.size();
  58. if (!(it_per_id.used_num >= 2 && it_per_id.start_frame < WINDOW_SIZE - 2))
  59. continue;
  60. it_per_id.estimated_depth *= s;
  61. }
  62. Matrix3d R0 = Utility::g2R(g);
  63. double yaw = Utility::R2ypr(R0 * Rs[0]).x();
  64. R0 = Utility::ypr2R(Eigen::Vector3d{-yaw, 0, 0}) * R0;
  65. g = R0 * g;
  66. //Matrix3d rot_diff = R0 * Rs[0].transpose();
  67. Matrix3d rot_diff = R0;
  68. for (int i = 0; i <= frame_count; i++)
  69. {
  70. Ps[i] = rot_diff * Ps[i];
  71. Rs[i] = rot_diff * Rs[i];
  72. Vs[i] = rot_diff * Vs[i];
  73. }
  74. ROS_DEBUG_STREAM("g0 " << g.transpose());
  75. ROS_DEBUG_STREAM("my R0 " << Utility::R2ypr(Rs[0]).transpose());
  76. return true;
  77. }

版权声明:本文为easonslam原创文章,遵循 CC 4.0 BY-SA 版权协议,转载请附上原文出处链接和本声明。
本文链接:https://www.cnblogs.com/easonslam/p/8881266.html