轉自:http://blog.csdn.net/lxiaoxiaot/article/details/6539834
不規則區域的矩,表示把一個歸一化的灰度級圖像函數理解為一個二維隨機變量的概率密度。
這個隨機變量的屬性可以用統計特征--矩(Moments)來描述。通過假設非零的像素值表示區域,矩可以用於二值或灰度級的區域描述。
Mpq = sigma(i)sigma(j) ip jq f(i,j)
其中x,y,i,j是區域點的坐標(在數字圖像中的像素坐標)。
令Xc,Yc表示區域重心的坐標,則:
Xc = M10/M00;
Yc = M01/M00;
在二值圖像的情況下,M00表示區域的面積。
OpenCV中可以使用函數cvMoments來計算二值圖像的矩信息。
使用函數cvGetSpatialMoment獲得指定維的矩信息。
例如:
// 計算二值化圖像imgYellowThresh中物體的重心坐標
CvMoments *moments = (CvMoments*)malloc(sizeof(CvMoments));
cvMoments(imgYellowThresh, moments, 1); // The actual moment values double moment10 = cvGetSpatialMoment(moments, 1, 0); double moment01 = cvGetSpatialMoment(moments, 0, 1); double area = cvGetSpatialMoment(moments, 0, 0);
// 計算重心
posX = moment10 / area;
posY = moment01 / area;
轉自:http://blog.163.com/forever_871226/blog/static/34424308201141851736984/
/** 計算二值圖像的重心
* @param[in] src 輸入的待處理圖像
* @param[out] center 重心坐標
* @retval 0 操作成功
* @retval -1 操作失敗
* @note 輸入圖像是二值化圖像
* @note xc=M10/M00, yc=M01/M00, 其中 Mx_order,y_order=SUMx,y(I(x,y)*x^x_order*y^y_order)
*/
static int aoiGravityCenter(IplImage *src, CvPoint ¢er)
{
//if(!src)
// return GRAVITYCENTER__SRC_IS_NULL;
double m00, m10, m01;
CvMoments moment;
cvMoments( src, &moment, 1);
m00 = cvGetSpatialMoment( &moment, 0, 0 );
if( m00 == 0)
return 1;
m10 = cvGetSpatialMoment( &moment, 1, 0 );
m01 = cvGetSpatialMoment( &moment, 0, 1 );
center.x = (int) (m10/m00);
center.y = (int) (m01/m00);
return 0;
}
轉自:http://www.opencv.org.cn/forum/viewtopic.php?p=1016
#include "cv.h"
#include "highgui.h"
void main( int argc, char** argv )
{
IplImage* src;
CvMoments moments;
CvMat *region;
CvPoint pt1,pt2;
double m00 = 0, m10, m01, mu20, mu11, mu02, inv_m00;
double a, b, c;
int xc, yc;
// 第一條命令行參數確定了圖像的文件名。
if( (src=cvLoadImage("two.bmp", 0))!= 0)
//if( (src=cvLoadImage("fbb.jpg", 0))!= 0)
//if( argc == 2 && (src=cvLoadImage(argv[1], 0))!= 0)
{
IplImage* dst = cvCreateImage( cvGetSize(src), 8, 3 );
CvMemStorage* storage = cvCreateMemStorage(0);
CvSeq* contour = 0;
cvThreshold( src, src, 100, 255, CV_THRESH_BINARY );//100 is the thredhold
cvNot( src, src );
cvNamedWindow( "Source", 1 );
cvShowImage( "Source", src );
//cvFindContours( src, storage, &contour, sizeof(CvContour), CV_RETR_CCOMP, CV_CHAIN_APPROX_SIMPLE,cvPoint(0,0) );
cvFindContours( src, storage, &contour, sizeof(CvContour),CV_RETR_TREE, CV_CHAIN_APPROX_SIMPLE, cvPoint(0,0) );
cvZero( dst );
for( ; contour != 0; contour = contour->h_next )
{
//CvScalar color = CV_RGB( rand()&255, rand()&255, rand()&255 );
CvScalar color = CV_RGB( 255, 0,0 );
/* 用1替代 CV_FILLED 所指示的輪廓外形 */
cvDrawContours( dst, contour, color, color, -1, CV_FILLED, 8,cvPoint(0,0) );//you can change 1 to CV_FILLED
contour = cvApproxPoly( contour, sizeof(CvContour), storage, CV_POLY_APPROX_DP, 3, 1 );
//CvRect* r = (CvRect*)cvGetSeqElem( contour,1);
region=(CvMat*)contour;
cvMoments( region, &moments,0 );
//cvMoments( &contour, &moments,0 );
// cvDrawContours( cnt_img, _contours, CV_RGB(255,0,0), CV_RGB(0,255,0), _levels, 3, CV_AA, cvPoint(0,0) ); CV_FILLED
////////////////////////////////////////////////
/////////////////////////////////////////////////
m00 = moments.m00;
m10 = moments.m10;
m01 = moments.m01;
mu11 = moments.mu11;
mu20 = moments.mu20;
mu02 = moments.mu02;
//if( fabs(m00) < DBL_EPSILON )break;
inv_m00 = 1. / m00;
xc = cvRound( m10 * inv_m00 );
yc = cvRound( m01 * inv_m00 );
a = mu20 * inv_m00;
b = mu11 * inv_m00;
c = mu02 * inv_m00;
/////////////////
pt1.x=xc-1;pt1.y=yc;
pt2.x=xc+1;pt2.y=yc;
cvLine( dst, pt1, pt2, CV_RGB(0,255,0), 2, CV_AA, 0 );
pt1.x=xc;pt1.y=yc-1;
pt2.x=xc;pt2.y=yc+1;
cvLine( dst, pt1, pt2, CV_RGB(0,255,0), 2, CV_AA, 0 );
}
cvNamedWindow( "Components", 1 );
cvShowImage( "Components", dst );
cvWaitKey(0);
}
}