国产亚洲精品福利在线无卡一,国产精久久一区二区三区,亚洲精品无码国模,精品久久久久久无码专区不卡

當前位置: 首頁 > news >正文

怎么建設(shè)淘寶聯(lián)盟的網(wǎng)站百度搜索引擎的原理

怎么建設(shè)淘寶聯(lián)盟的網(wǎng)站,百度搜索引擎的原理,域名有永久的嗎,wordpress計算器代碼VINS中沒有直接使用opencv的去畸變函數(shù),而是自己編寫了迭代函數(shù)完成去畸變操作,主要是為了加快去畸變計算速度 本文對二者的結(jié)果精度和耗時進行了對比 VINS-Mono/Fusion與OpenCV去畸變對比1 去畸變原理2 代碼實現(xiàn)2.1 OpenCV去畸變2.2 VINS去畸變3 二者對…

VINS中沒有直接使用opencv的去畸變函數(shù),而是自己編寫了迭代函數(shù)完成去畸變操作,主要是為了加快去畸變計算速度
本文對二者的結(jié)果精度和耗時進行了對比

VINS-Mono/Fusion與OpenCV去畸變對比

  • 1 去畸變原理
  • 2 代碼實現(xiàn)
    • 2.1 OpenCV去畸變
    • 2.2 VINS去畸變
  • 3 二者對比

1 去畸變原理

opencv去畸變操作由cv::undistortPoints實現(xiàn)
VINS去畸變由PinholeCamera::liftProjective實現(xiàn)(以針孔相機為例)

二者均采用了迭代求解,通過多次迭代逼近真值。其中cv::undistortPoints方法中默認迭代5次,并計算每次重投影誤差是否小于閾值,VINS去畸變方法只設(shè)置了迭代8次。
二者均輸入像素坐標,輸出歸一化坐標。


2 代碼實現(xiàn)

2.1 OpenCV去畸變

opencv去畸變操作由cv::undistortPoints實現(xiàn),代碼在opencv-3.4.13/modules/imgproc/src
undistortPoints首先處理了輸入?yún)?shù),主要實現(xiàn)部分調(diào)用cvUndistortPointsInternal

void undistortPoints( InputArray _src, OutputArray _dst,InputArray _cameraMatrix, InputArray _distCoeffs,InputArray _Rmat, InputArray _Pmat, TermCriteria criteria)

void undistortPoints( InputArray _src, OutputArray _dst,InputArray _cameraMatrix,InputArray _distCoeffs,InputArray _Rmat,InputArray _Pmat,TermCriteria criteria)
{Mat src = _src.getMat(), cameraMatrix = _cameraMatrix.getMat();Mat distCoeffs = _distCoeffs.getMat(), R = _Rmat.getMat(), P = _Pmat.getMat();int npoints = src.checkVector(2), depth = src.depth();if (npoints < 0)src = src.t();npoints = src.checkVector(2);CV_Assert(npoints >= 0 && src.isContinuous() && (depth == CV_32F || depth == CV_64F));if (src.cols == 2)src = src.reshape(2);_dst.create(npoints, 1, CV_MAKETYPE(depth, 2), -1, true);Mat dst = _dst.getMat();CvMat _csrc = cvMat(src), _cdst = cvMat(dst), _ccameraMatrix = cvMat(cameraMatrix);CvMat matR, matP, _cdistCoeffs, *pR=0, *pP=0, *pD=0;if( !R.empty() )pR = &(matR = cvMat(R));if( !P.empty() )pP = &(matP = cvMat(P));if( !distCoeffs.empty() )pD = &(_cdistCoeffs = cvMat(distCoeffs));cvUndistortPointsInternal(&_csrc, &_cdst, &_ccameraMatrix, pD, pR, pP, criteria);
}

static void cvUndistortPointsInternal( const CvMat* _src, CvMat* _dst, const CvMat* _cameraMatrix, const CvMat* _distCoeffs, const CvMat* matR, const CvMat* matP, cv::TermCriteria criteria)

static void cvUndistortPointsInternal( const CvMat* _src, CvMat* _dst, const CvMat* _cameraMatrix,const CvMat* _distCoeffs,const CvMat* matR, const CvMat* matP, cv::TermCriteria criteria)
{CV_Assert(criteria.isValid());double A[3][3], RR[3][3], k[14]={0,0,0,0,0,0,0,0,0,0,0,0,0,0};CvMat matA=cvMat(3, 3, CV_64F, A), _Dk;CvMat _RR=cvMat(3, 3, CV_64F, RR);cv::Matx33d invMatTilt = cv::Matx33d::eye();cv::Matx33d matTilt = cv::Matx33d::eye();CV_Assert( CV_IS_MAT(_src) && CV_IS_MAT(_dst) &&(_src->rows == 1 || _src->cols == 1) &&(_dst->rows == 1 || _dst->cols == 1) &&_src->cols + _src->rows - 1 == _dst->rows + _dst->cols - 1 &&(CV_MAT_TYPE(_src->type) == CV_32FC2 || CV_MAT_TYPE(_src->type) == CV_64FC2) &&(CV_MAT_TYPE(_dst->type) == CV_32FC2 || CV_MAT_TYPE(_dst->type) == CV_64FC2));CV_Assert( CV_IS_MAT(_cameraMatrix) &&_cameraMatrix->rows == 3 && _cameraMatrix->cols == 3 );cvConvert( _cameraMatrix, &matA );if( _distCoeffs ){CV_Assert( CV_IS_MAT(_distCoeffs) &&(_distCoeffs->rows == 1 || _distCoeffs->cols == 1) &&(_distCoeffs->rows*_distCoeffs->cols == 4 ||_distCoeffs->rows*_distCoeffs->cols == 5 ||_distCoeffs->rows*_distCoeffs->cols == 8 ||_distCoeffs->rows*_distCoeffs->cols == 12 ||_distCoeffs->rows*_distCoeffs->cols == 14));_Dk = cvMat( _distCoeffs->rows, _distCoeffs->cols,CV_MAKETYPE(CV_64F,CV_MAT_CN(_distCoeffs->type)), k);cvConvert( _distCoeffs, &_Dk );if (k[12] != 0 || k[13] != 0){cv::detail::computeTiltProjectionMatrix<double>(k[12], k[13], NULL, NULL, NULL, &invMatTilt);cv::detail::computeTiltProjectionMatrix<double>(k[12], k[13], &matTilt, NULL, NULL);}}if( matR ){CV_Assert( CV_IS_MAT(matR) && matR->rows == 3 && matR->cols == 3 );cvConvert( matR, &_RR );}elsecvSetIdentity(&_RR);if( matP ){double PP[3][3];CvMat _P3x3, _PP=cvMat(3, 3, CV_64F, PP);CV_Assert( CV_IS_MAT(matP) && matP->rows == 3 && (matP->cols == 3 || matP->cols == 4));cvConvert( cvGetCols(matP, &_P3x3, 0, 3), &_PP );cvMatMul( &_PP, &_RR, &_RR );}const CvPoint2D32f* srcf = (const CvPoint2D32f*)_src->data.ptr;const CvPoint2D64f* srcd = (const CvPoint2D64f*)_src->data.ptr;CvPoint2D32f* dstf = (CvPoint2D32f*)_dst->data.ptr;CvPoint2D64f* dstd = (CvPoint2D64f*)_dst->data.ptr;int stype = CV_MAT_TYPE(_src->type);int dtype = CV_MAT_TYPE(_dst->type);int sstep = _src->rows == 1 ? 1 : _src->step/CV_ELEM_SIZE(stype);int dstep = _dst->rows == 1 ? 1 : _dst->step/CV_ELEM_SIZE(dtype);double fx = A[0][0];double fy = A[1][1];double ifx = 1./fx;double ify = 1./fy;double cx = A[0][2];double cy = A[1][2];int n = _src->rows + _src->cols - 1;for( int i = 0; i < n; i++ ){double x, y, x0 = 0, y0 = 0, u, v;if( stype == CV_32FC2 ){x = srcf[i*sstep].x;y = srcf[i*sstep].y;}else{x = srcd[i*sstep].x;y = srcd[i*sstep].y;}u = x; v = y;x = (x - cx)*ifx;y = (y - cy)*ify;if( _distCoeffs ) {// compensate tilt distortioncv::Vec3d vecUntilt = invMatTilt * cv::Vec3d(x, y, 1);double invProj = vecUntilt(2) ? 1./vecUntilt(2) : 1;x0 = x = invProj * vecUntilt(0);y0 = y = invProj * vecUntilt(1);double error = std::numeric_limits<double>::max();// compensate distortion iterativelyfor( int j = 0; ; j++ ){//在這里判斷if ((criteria.type & cv::TermCriteria::COUNT) && j >= criteria.maxCount)break;if ((criteria.type & cv::TermCriteria::EPS) && error < criteria.epsilon)break;double r2 = x*x + y*y;double icdist = (1 + ((k[7]*r2 + k[6])*r2 + k[5])*r2)/(1 + ((k[4]*r2 + k[1])*r2 + k[0])*r2);if (icdist < 0)  // test: undistortPoints.regression_14583{x = (u - cx)*ifx;y = (v - cy)*ify;break;}double deltaX = 2*k[2]*x*y + k[3]*(r2 + 2*x*x)+ k[8]*r2+k[9]*r2*r2;double deltaY = k[2]*(r2 + 2*y*y) + 2*k[3]*x*y+ k[10]*r2+k[11]*r2*r2;x = (x0 - deltaX)*icdist;y = (y0 - deltaY)*icdist;if(criteria.type & cv::TermCriteria::EPS){double r4, r6, a1, a2, a3, cdist, icdist2;double xd, yd, xd0, yd0;cv::Vec3d vecTilt;r2 = x*x + y*y;r4 = r2*r2;r6 = r4*r2;a1 = 2*x*y;a2 = r2 + 2*x*x;a3 = r2 + 2*y*y;cdist = 1 + k[0]*r2 + k[1]*r4 + k[4]*r6;icdist2 = 1./(1 + k[5]*r2 + k[6]*r4 + k[7]*r6);xd0 = x*cdist*icdist2 + k[2]*a1 + k[3]*a2 + k[8]*r2+k[9]*r4;yd0 = y*cdist*icdist2 + k[2]*a3 + k[3]*a1 + k[10]*r2+k[11]*r4;vecTilt = matTilt*cv::Vec3d(xd0, yd0, 1);invProj = vecTilt(2) ? 1./vecTilt(2) : 1;xd = invProj * vecTilt(0);yd = invProj * vecTilt(1);double x_proj = xd*fx + cx;double y_proj = yd*fy + cy;error = sqrt( pow(x_proj - u, 2) + pow(y_proj - v, 2) );}}}double xx = RR[0][0]*x + RR[0][1]*y + RR[0][2];double yy = RR[1][0]*x + RR[1][1]*y + RR[1][2];double ww = 1./(RR[2][0]*x + RR[2][1]*y + RR[2][2]);x = xx*ww;y = yy*ww;if( dtype == CV_32FC2 ){dstf[i*dstep].x = (float)x;dstf[i*dstep].y = (float)y;}else{dstd[i*dstep].x = x;dstd[i*dstep].y = y;}}
}

2.2 VINS去畸變

void
PinholeCamera::liftProjective(const Eigen::Vector2d& p, Eigen::Vector3d& P) const
{double mx_d, my_d,mx2_d, mxy_d, my2_d, mx_u, my_u;double rho2_d, rho4_d, radDist_d, Dx_d, Dy_d, inv_denom_d;//double lambda;// Lift points to normalised planemx_d = m_inv_K11 * p(0) + m_inv_K13;my_d = m_inv_K22 * p(1) + m_inv_K23;if (m_noDistortion){mx_u = mx_d;my_u = my_d;}else{if (0){double k1 = mParameters.k1();double k2 = mParameters.k2();double p1 = mParameters.p1();double p2 = mParameters.p2();// Apply inverse distortion model// proposed by Heikkilamx2_d = mx_d*mx_d;my2_d = my_d*my_d;mxy_d = mx_d*my_d;rho2_d = mx2_d+my2_d;rho4_d = rho2_d*rho2_d;radDist_d = k1*rho2_d+k2*rho4_d;Dx_d = mx_d*radDist_d + p2*(rho2_d+2*mx2_d) + 2*p1*mxy_d;Dy_d = my_d*radDist_d + p1*(rho2_d+2*my2_d) + 2*p2*mxy_d;inv_denom_d = 1/(1+4*k1*rho2_d+6*k2*rho4_d+8*p1*my_d+8*p2*mx_d);mx_u = mx_d - inv_denom_d*Dx_d;my_u = my_d - inv_denom_d*Dy_d;}else{// Recursive distortion modelint n = 8;Eigen::Vector2d d_u;distortion(Eigen::Vector2d(mx_d, my_d), d_u);// Approximate valuemx_u = mx_d - d_u(0);my_u = my_d - d_u(1);for (int i = 1; i < n; ++i){distortion(Eigen::Vector2d(mx_u, my_u), d_u);mx_u = mx_d - d_u(0);my_u = my_d - d_u(1);}}}// Obtain a projective rayP << mx_u, my_u, 1.0;
}

3 二者對比

在相機坐標系下隨機生成了 20 個觀測點,并將其歸算到歸一化坐標系下作為真值。

#include <iostream>
#include <vector>
#include <random>  
#include <Eigen/Core>
#include <Eigen/Geometry>
#include <opencv2/opencv.hpp>
#include <opencv2/core/eigen.hpp>
#include <chrono>#include "Camera.h"using namespace std;int main()
{// 隨機數(shù)生成 20 個 三維特征點int featureNums=20;default_random_engine generator;vector<cv::Point2f> pts_truth;  //歸一化真值vector<cv::Point2f> uv_pts;     //像素坐標vector<cv::Point2f> cv_un_pts, vins_un_pts;  //歸一化坐標for(int i = 0; i < featureNums; ++i){uniform_real_distribution<double> xy_rand(-4, 4.0);uniform_real_distribution<double> z_rand(8., 10.);double tx = xy_rand(generator);double ty = xy_rand(generator);double tz = z_rand(generator);Eigen::Vector2d p(tx/tz, ty/tz);Eigen::Vector2d p_distorted;distortion(p, p_distorted);    //歸一化坐標畸變p_distorted+=p;pts_truth.push_back(cv::Point2f(p(0), p(1)));cv::Point2f uv(fx*p_distorted(0)+cx, fy*p_distorted(1)+cy); //投影到像素坐標uv_pts.push_back(uv);}//OpenCV去畸變,輸入像素坐標,輸出歸一化坐標chrono::steady_clock::time_point cv_t1 = chrono::steady_clock::now();cv::undistortPoints(uv_pts, cv_un_pts, K, distCoeffs);chrono::steady_clock::time_point cv_t2 = chrono::steady_clock::now();double cv_time = chrono::duration_cast<chrono::duration<double,milli>>(cv_t2-cv_t1).count();cout<<"OpenCV"<<endl;cout<<"used time: "<<cv_time/cv_un_pts.size()<<"ms"<<endl;cout<<"pixel error: "<<GetResidual(cv_un_pts, pts_truth)<<endl;//VINS去畸變chrono::steady_clock::time_point vins_t1 = chrono::steady_clock::now();liftProjective(uv_pts, vins_un_pts);chrono::steady_clock::time_point vins_t2 = chrono::steady_clock::now();double vins_time = chrono::duration_cast<chrono::duration<double, milli>>(vins_t2-vins_t1).count();cout<<"VINS"<<endl;cout<<"used time: "<<vins_time/vins_un_pts.size()<<"ms"<<endl;cout<<"pixel error: "<<GetResidual(vins_un_pts, pts_truth)<<endl;return 0;
}

輸出結(jié)果

給出了每個觀測點的平均去畸變耗時和像素坐標系下的重投影誤差。
VINS所采用的去畸變算法耗時更少,重投影誤差平均值更小,opencv方法與其相差一個數(shù)量級。

請?zhí)砑訄D片描述

http://aloenet.com.cn/news/38548.html

相關(guān)文章:

  • 陜西省高速建設(shè)集團公司網(wǎng)站全國免費發(fā)布信息平臺
  • 動態(tài)網(wǎng)站開發(fā) 機械5118
  • 南京網(wǎng)站設(shè)計我選柚米科技指數(shù)分布的期望和方差
  • 做營銷型網(wǎng)站多少錢百度關(guān)鍵詞優(yōu)化詞精靈
  • 專門做批發(fā)的網(wǎng)站seo網(wǎng)絡(luò)推廣經(jīng)理招聘
  • 企業(yè)網(wǎng)站建立制作個人免費網(wǎng)站建設(shè)
  • php網(wǎng)站制作 青島外貿(mào)seo軟件
  • 配置 tomcat 做網(wǎng)站做搜索引擎優(yōu)化的企業(yè)
  • 12個優(yōu)秀平面設(shè)計素材網(wǎng)站南京seo整站優(yōu)化技術(shù)
  • 做網(wǎng)站廣告收入會員卡營銷策劃方案
  • 做視頻找空鏡頭那個網(wǎng)站比較全seo優(yōu)化有百度系和什么
  • 電子商務(wù)網(wǎng)站建設(shè)的技術(shù)綜述論文優(yōu)化排名 生客seo
  • 安卓開發(fā)培訓(xùn)北京百度推廣優(yōu)化
  • 做封面的地圖網(wǎng)站app推廣是什么意思
  • 微信知彼網(wǎng)絡(luò)網(wǎng)站建設(shè)電商網(wǎng)站開發(fā)平臺
  • 深圳市寶安區(qū)怎么樣seo網(wǎng)站推廣的主要目的是什么
  • 鄭州鞏義網(wǎng)站建設(shè)全國人大常委會委員長
  • 網(wǎng)站備案截圖網(wǎng)站收錄免費咨詢
  • 克隆網(wǎng)站后怎么做查詢關(guān)鍵詞網(wǎng)站
  • 銀川app購物網(wǎng)站制作公司西安網(wǎng)站搭建公司
  • 專業(yè)網(wǎng)站設(shè)計公司哪家好百度投訴中心人工電話號碼
  • .net網(wǎng)站開發(fā)實例臨沂seo代理商
  • 做黃色網(wǎng)站被抓了怎么處理公司培訓(xùn)課程有哪些
  • 天龍八部私服怎么做網(wǎng)站百度風(fēng)云排行榜
  • 蘇州做網(wǎng)站推廣哪家好網(wǎng)絡(luò)營銷策劃書論文
  • 專做動漫av的網(wǎng)站市場營銷
  • 網(wǎng)站建設(shè)方向百度搜索風(fēng)云榜總榜
  • 廣州企業(yè)推廣seo工資待遇 seo工資多少
  • 有沒有做維修的網(wǎng)站哪有免費的網(wǎng)站
  • 網(wǎng)站收縮欄免費自助建站網(wǎng)站