opencv找到物体的主要方向

#include
#include 
#include 
#include 
#include 

void drawAxis(Mat& img, Point p, Point q, Scalar colour, const float scale = 0.2)
{
	double angle = atan2((double)p.y - q.y, (double)p.x - q.x); // angle in radians
	double hypotenuse = sqrt((double)(p.y - q.y) * (p.y - q.y) + (p.x - q.x) * (p.x - q.x));
	circle(img, q, 4, Scalar(255, 0, 0), 1, 8);
	// Here we lengthen the arrow by a factor of scale
	q.x = (int)(p.x - scale * hypotenuse * cos(angle));
	q.y = (int)(p.y - scale * hypotenuse * sin(angle));
	line(img, p, q, colour, 1, LINE_AA);
	//circle(img, p, 4, Scalar(0, 0, 255), 1, 8);
	circle(img, q, 4, Scalar(0, 0, 255), 1, 8);
	// create the arrow hooks
	p.x = (int)(q.x + 9 * cos(angle + CV_PI / 4));
	p.y = (int)(q.y + 9 * sin(angle + CV_PI / 4));
	//line(img, p, q, colour, 1, LINE_AA);
	p.x = (int)(q.x + 9 * cos(angle - CV_PI / 4));
	p.y = (int)(q.y + 9 * sin(angle - CV_PI / 4));
	//line(img, p, q, colour, 1, LINE_AA);
}

double getOrientation(const vector<Point> &pts, Mat &img)
{
	//Construct a buffer used by the pca analysis
	int sz = static_cast<int>(pts.size());
	Mat data_pts = Mat(sz, 2, CV_64F);
	for (int i = 0; i < data_pts.rows; i++)
	{
		data_pts.at<double>(i, 0) = pts[i].x;
		data_pts.at<double>(i, 1) = pts[i].y;
	}
	//Perform PCA analysis
	PCA pca_analysis(data_pts, Mat(), PCA::DATA_AS_ROW);
	//Store the center of the object
	Point cntr = Point(static_cast<int>(pca_analysis.mean.at<double>(0, 0)),
		static_cast<int>(pca_analysis.mean.at<double>(0, 1)));
	//Store the eigenvalues and eigenvectors
	vector<Point2d> eigen_vecs(2);
	vector<double> eigen_val(2);
	for (int i = 0; i < 2; i++)
	{
		eigen_vecs[i] = Point2d(pca_analysis.eigenvectors.at<double>(i, 0),
			pca_analysis.eigenvectors.at<double>(i, 1));
		eigen_val[i] = pca_analysis.eigenvalues.at<double>(i);
	}
	// Draw the principal components
	circle(img, cntr, 3, Scalar(255, 0, 255), 2);
	Point p1 = cntr + 0.02 * Point(static_cast<int>(eigen_vecs[0].x * eigen_val[0]), static_cast<int>(eigen_vecs[0].y * eigen_val[0]));
	Point p2 = cntr - 0.02 * Point(static_cast<int>(eigen_vecs[1].x * eigen_val[1]), static_cast<int>(eigen_vecs[1].y * eigen_val[1]));
	drawAxis(img, cntr, p1, Scalar(0, 255, 0), 1);
	drawAxis(img, cntr, p2, Scalar(255, 255, 0), 5);
	double angle = atan2(eigen_vecs[0].y, eigen_vecs[0].x); // orientation in radians
	return angle;
}

int OtsuAlgThreshold(const Mat image)
{
	if (image.channels() != 1)
	{
		std::cout << "Please input Gray-image!" << endl;
		return 0;
	}
	int T = 0; //Otsu算法阈值
	double varValue = 0; //类间方差中间值保存
	double w0 = 0; //前景像素点数所占比例
	double w1 = 0; //背景像素点数所占比例
	double u0 = 0; //前景平均灰度
	double u1 = 0; //背景平均灰度
	double Histogram[256] = { 0 }; //灰度直方图,下标是灰度值,保存内容是灰度值对应的像素点总数
	uchar *data = image.data;
	double totalNum = image.rows*image.cols; //像素总数
											 //计算灰度直方图分布,Histogram数组下标是灰度值,保存内容是灰度值对应像素点数
	for (int i = 0; i<image.rows; i++)   //为表述清晰,并没有把rows和cols单独提出来
	{
		for (int j = 0; j<image.cols; j++)
		{
			Histogram[data[i*image.step + j]]++;
		}
	}
	for (int i = 0; i<255; i++)
	{
		//每次遍历之前初始化各变量
		w1 = 0;		u1 = 0;		w0 = 0;		u0 = 0;
		//***********背景各分量值计算**************************
		for (int j = 0; j <= i; j++) //背景部分各值计算
		{
			w1 += Histogram[j];  //背景部分像素点总数
			u1 += j*Histogram[j]; //背景部分像素总灰度和
		}
		if (w1 == 0) //背景部分像素点数为0时退出
		{
			continue;
		}
		u1 = u1 / w1; //背景像素平均灰度
		w1 = w1 / totalNum; // 背景部分像素点数所占比例
							//***********背景各分量值计算**************************

							//***********前景各分量值计算**************************
		for (int k = i + 1; k<255; k++)
		{
			w0 += Histogram[k];  //前景部分像素点总数
			u0 += k*Histogram[k]; //前景部分像素总灰度和
		}
		if (w0 == 0) //前景部分像素点数为0时退出
		{
			break;
		}
		u0 = u0 / w0; //前景像素平均灰度
		w0 = w0 / totalNum; // 前景部分像素点数所占比例
							//***********前景各分量值计算**************************

							//***********类间方差计算******************************
		double varValueI = w0*w1*(u1 - u0)*(u1 - u0); //当前类间方差计算
		if (varValue<varValueI)
		{
			varValue = varValueI;
			T = i;
		}
	}
	return T;
}

void findImageSymmetryline() {


	Mat src = imread("C:\\Users\\admin_user\\Desktop\\2019-11-04_10-27-05_20200.jpg");
	Mat src_gray_1, src_gray;
	cvtColor(src, src_gray_1, CV_BGR2GRAY);

	Mat resize_src_copy;
	resize(src, resize_src_copy, Size(200, 200), 0, 0);
	resize(src_gray_1, src_gray, Size(200, 200), 0, 0);

	
	src_gray.row(0).setTo(Scalar::all(255));
	src_gray.row(src_gray.rows - 1).setTo(Scalar::all(255));
	src_gray.col(0).setTo(Scalar::all(255));
	src_gray.col(src_gray.cols - 1).setTo(Scalar::all(255));
	//namedWindow("thresh", CV_WINDOW_NORMAL);
	int index_thresh = OtsuAlgThreshold(src_gray);
	std::cout << "阈值:" << index_thresh << endl;
	imshow("原始图", src);

	Mat dst;
	cv::equalizeHist(src_gray, dst);
	//threshold(src_gray, dst, index_thresh, max_thresh, 0);
	threshold(dst, dst, index_thresh, 255, 0);
	imshow("二值化图", dst);
	waitKey(0);
	vector<vector<Point> > contours;
	vector<Vec4i> hirecy;
	RotatedRect minRectBox;
	Point2f point_fouth[4];
	vector<Point2f> res_point_fouth_vec;
	findContours(dst, contours, hirecy, CV_RETR_CCOMP, CV_CHAIN_APPROX_SIMPLE);
	string str_text = "";
	vector<vector<Point>> contours_ploy(contours.size());
	vector<vector<Point>> conhull_ploy(contours.size());
	int useIndex = 0;
	RotatedRect useWideRect;
	Point pt1;
	for (size_t i = 0; i < contours.size(); i++)
	{

		Rect z_RectBox = boundingRect(contours[i]);
		std::cout << "面积:" << z_RectBox.area() << endl;
		//if (z_RectBox.area() > 16*16 && z_RectBox.area()
		if (z_RectBox.area() > 100 * 100 && z_RectBox.area()<resize_src_copy.rows*resize_src_copy.cols)
		{

			//vector tmp_contours;
			approxPolyDP(contours[i], contours_ploy[i], 1, true);
			convexHull(contours_ploy[i], conhull_ploy[i], false, true);
			//drawContours(resize_src_copy, contours, i, Scalar(255, 0, 0), 1, 8);
			drawContours(resize_src_copy, contours_ploy, i, Scalar(255, 255, 0), 1, 8);
			//drawContours(resize_src_copy, conhull_ploy, i, Scalar(255, 0, 255), 1, 8);
			getOrientation(contours_ploy[i], resize_src_copy);
			Moments moment;
			moment = moments(Mat(contours[i]), false);

			if (moment.m00 != 0)//除数不能为0
			{
				pt1.x = cvRound(moment.m10 / moment.m00);//计算重心横坐标
				pt1.y = cvRound(moment.m01 / moment.m00);//计算重心纵坐标
			}
			circle(resize_src_copy, pt1, 5, Scalar(0, 0, 255), 1, 8);

			minRectBox = minAreaRect(contours[i]);
			minRectBox.points(point_fouth);

			//int minRectWidth= minRectBox.size()

			double angle = minRectBox.angle;
			str_text += to_string(int(angle)) + " ";
			for (size_t j = 0; j < 4; j++)
			{
				res_point_fouth_vec.push_back(point_fouth[j]);
				line(resize_src_copy, point_fouth[j], point_fouth[(j + 1) % 4], Scalar(0, 255, 0), 1, 8);
				//imshow("thresh", resize_src_copy);
				//waitKey(0);
			}


			for (size_t j = 0; j <2; j++)
			{
				line(resize_src_copy, point_fouth[j], point_fouth[j + 2], Scalar(255, 0, 0), 1, 8);
			}

			useIndex = i;
			useWideRect = minRectBox;
			//rectangle(dst,z_RectBox,Scalar(0,0,255),1,8);
		}
		putText(resize_src_copy, str_text, Point(5, 10), FONT_HERSHEY_SIMPLEX, 0.5, Scalar(0, 255, 255), 1, 8, 0);
		imshow("thresh", resize_src_copy);
		cv::waitKey(0);
		//imshow("thresh", dst);
		//waitKey(0);

	}
	std::cout << "凸包个数:" << conhull_ploy[useIndex].size() << std::endl;

#if 0
	vector<pair<double, double> > kb_vec;
	vector<Point> SymmetryPoint_vec;  // 找对称点的集合
	kb_vec = findImageSymmetrypoint(res_point_fouth_vec);
	for (size_t i = 0; i < contours[useIndex].size(); i++)
	{
		Point tmppoint;
		tmppoint = contours[useIndex].at(i);

		for (size_t j = 0; j < kb_vec.size(); j++)
		{
			double Y = tmppoint.y;
			double X = tmppoint.x;
			double Y_match = kb_vec[j].first*X + kb_vec[j].second;
			if (abs(Y - Y_match) <= 1)
			{
				SymmetryPoint_vec.push_back(tmppoint);
			}
		}
	}

	std::cout << "一共有:" << SymmetryPoint_vec.size() << std::endl;
#if 0
	for (size_t i = 0; i < SymmetryPoint_vec.size(); i++)
	{
		circle(resize_src_copy, SymmetryPoint_vec[i], 4, Scalar(0, 255, 0), 1, 8);
	}
	imshow("thresh", resize_src_copy);
	waitKey(0);
#endif

	vector<Point> Del_SymmetryPoint_vec;
	if (SymmetryPoint_vec.size() >= 1)
	{
		Del_SymmetryPoint_vec.push_back(SymmetryPoint_vec[0]);
	}

	for (size_t i = 1; i < SymmetryPoint_vec.size(); i++)
	{
		bool tmpflag = true;
		for (size_t j = 0; j < Del_SymmetryPoint_vec.size(); j++)
		{
			double distance = 0;
			distance = sqrt(pow((SymmetryPoint_vec[i].x - Del_SymmetryPoint_vec[j].x), 2) + pow((SymmetryPoint_vec[i].y - Del_SymmetryPoint_vec[j].y), 2));
			if (distance<50)
			{
				tmpflag = false;
				break;
			}
		}
		if (tmpflag)
		{
			Del_SymmetryPoint_vec.push_back(SymmetryPoint_vec[i]);
		}
	}
	std::cout << "删除后一共有:" << Del_SymmetryPoint_vec.size() << std::endl;


	Point WideCenter = useWideRect.center;

	for (size_t i = 0; i < Del_SymmetryPoint_vec.size(); i++)
	{
		Point P1 = Del_SymmetryPoint_vec[i];

		for (size_t j = 0; j < Del_SymmetryPoint_vec.size(); j++)
		{
			if (i != j)
			{
				Point P2 = Del_SymmetryPoint_vec[j];
				int P1_To_WideCenter_Dis = 0;
				int P2_To_WideCenter_Dis = 0;
				P1_To_WideCenter_Dis = sqrt(pow((P1.x - WideCenter.x), 2) + pow((P1.y - WideCenter.y), 2));
				P2_To_WideCenter_Dis = sqrt(pow((P2.x - WideCenter.x), 2) + pow((P2.y - WideCenter.y), 2));

				if (abs(P1_To_WideCenter_Dis - P2_To_WideCenter_Dis) < 10)
				{
					line(resize_src_copy, WideCenter, P1, Scalar(0, 0, 0), 2, 8);
					line(resize_src_copy, WideCenter, P2, Scalar(0, 0, 0), 2, 8);

				}
			}
		}

	}
	imshow("thresh", resize_src_copy);
	waitKey(0);



#endif

#if 0
	vector<pair<Point, Point> > SymmetryPoint_vec;
	std::cout << contours[useIndex].size() << std::endl;
	int contours_len = contours[useIndex].size();
	//Point WideCenter = useWideRect.center;
	Point WideCenter = pt1;
	// 把凸包上到中心点相差小于10的 对称点记录一下
	for (size_t i = 0; i < contours_ploy[useIndex].size(); i++)
	{
		Point P1 = contours_ploy[useIndex].at(i);
		for (size_t j = i + 1; j < contours_ploy[useIndex].size(); j++)
		{
			Point P2 = contours_ploy[useIndex].at(j);


			double angle = 0;
			angle = angledegree(WideCenter, P1, P2);
			if (angle >= 20 && angle<150)
			{
				double distance1 = 0;
				double distance2 = 0;
				distance1 = sqrt(pow((P1.x - WideCenter.x), 2) + pow((P1.y - WideCenter.y), 2));
				distance2 = sqrt(pow((P2.x - WideCenter.x), 2) + pow((P2.y - WideCenter.y), 2));

				if (abs(distance1 - distance2)<2)
				{

					pair<Point, Point> tmppair;
					tmppair = make_pair(P1, P2);
					SymmetryPoint_vec.push_back(tmppair);
					break;
				}
			}
		}
#if 1
		if (SymmetryPoint_vec.size() >= 2)
		{
			break;
		}
#endif	
	}

	std::cout << SymmetryPoint_vec.size() << std::endl;

#if 0
	// 画出对称点
	for (size_t i = 0; i < SymmetryPoint_vec.size(); i++)
	{
		circle(resize_src_copy, SymmetryPoint_vec[i].first, 4, Scalar(0, 0, 255), 1, 8);
		circle(resize_src_copy, SymmetryPoint_vec[i].second, 4, Scalar(0, 0, 255), 1, 8);
		line(resize_src_copy, WideCenter, SymmetryPoint_vec[i].first, Scalar(0, 0, 0), 2, 8);
		line(resize_src_copy, WideCenter, SymmetryPoint_vec[i].second, Scalar(0, 0, 0), 2, 8);

		imshow("thresh", resize_src_copy);
		cv::waitKey(0);
	}
#endif
#endif

	//遍历 凸包点找到俩个最远的点
	double max_len = -1;
	int p1_loc = -1;
	int p2_loc = -1;
	for (size_t i = 0; i < conhull_ploy[useIndex].size(); i++)
	{
		Point p1 = conhull_ploy[useIndex].at(i);
#if 0
		for (size_t j = i + 1; j < conhull_ploy[useIndex].size(); j++)
		{
			Point p2 = conhull_ploy[useIndex].at(j);
			double p1p2dis = 0;
			p1p2dis = sqrt(pow((p1.x - p2.x), 2) + pow((p1.y - p2.y), 2));
			if (p1p2dis > max_len)
			{
				max_len = p1p2dis;
				p1_loc = i;
				p2_loc = j;
			}
		}
#endif
		double p1p2dis = 0;
		p1p2dis = sqrt(pow((p1.x - pt1.x), 2) + pow((p1.y - pt1.y), 2));
		if (p1p2dis > max_len)
		{
			max_len = p1p2dis;
			p1_loc = i;
		}

	}

	circle(resize_src_copy, conhull_ploy[useIndex].at(p1_loc), 4, Scalar(0, 0, 255), 1, 8);
	//circle(resize_src_copy, conhull_ploy[useIndex].at(p2_loc), 4, Scalar(0, 0, 255), 1, 8);
	line(resize_src_copy, conhull_ploy[useIndex].at(p1_loc), pt1, Scalar(0, 0, 0), 2, 8);


	imshow("thresh", resize_src_copy);
	cv::waitKey(0);
}
int main()
{
	findImageSymmetryline();
}

你可能感兴趣的:(基础)