https://note.youdao.com/ynoteshare/index.html?id=e0df335c7bba4d7633874375539c228a&type=note&_time=1648531096400
引入opencv
package com.acts.opencv.base;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import javax.servlet.http.HttpServletResponse;
import org.opencv.core.Core;
import org.opencv.core.Core.MinMaxLocResult;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.MatOfPoint;
import org.opencv.core.MatOfPoint2f;
import org.opencv.core.Point;
import org.opencv.core.Rect;
import org.opencv.core.Scalar;
import org.opencv.highgui.Highgui;
import org.opencv.imgproc.Imgproc;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import com.acts.opencv.common.utils.Constants;
import com.acts.opencv.common.utils.OpenCVUtil;
import com.acts.opencv.common.web.BaseController;
@Controller
@RequestMapping(value = "rect")
public class RectificationController extends BaseController {
private static final Logger logger = LoggerFactory.getLogger(RectificationController.class);
/**
* 图像矫正透视变换
* 创建者 Songer
* 创建时间 2018年4月10日
*/
@RequestMapping(value = "rectification")
public void rectification(HttpServletResponse response, String imagefile, Integer markType) {
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
logger.info("\n 图像矫正透视变换");
String sourcePath = Constants.PATH + imagefile;
logger.info("url==============" + sourcePath);
// 加载为灰度图显示
Mat source1 = Highgui.imread(sourcePath, Highgui.CV_LOAD_IMAGE_COLOR);
Mat source2 = Highgui.imread(sourcePath, Highgui.CV_LOAD_IMAGE_GRAYSCALE);
Point anchor01 = new Point();
Point anchor02 = new Point();
Point anchor03 = new Point();
Point anchor04 = new Point();
if (markType == 1) {// 模板匹配识别定位点
String matchPath = Constants.PATH + Constants.SOURCE_IMAGE_PATH + "z1_temp.png";
fetchAnchorPoints1(sourcePath, matchPath, anchor01, anchor02, anchor03, anchor04);
} else if (markType == 2) {// 霍夫圆检测识别定位点
fetchAnchorPoints2(sourcePath, anchor01, anchor02, anchor03, anchor04);
}
MatOfPoint mop = new MatOfPoint(anchor01, anchor02, anchor03, anchor04);
MatOfPoint2f mat2f = new MatOfPoint2f();
MatOfPoint2f refmat2f = new MatOfPoint2f();
mop.convertTo(mat2f, CvType.CV_32FC1);
List contours = new ArrayList();
contours.add(mop);
Core.polylines(source2, contours, true, new Scalar(0, 0, 255), 1);
String destPath = Constants.PATH + Constants.DEST_IMAGE_PATH + "rect1.png";
Highgui.imwrite(destPath, source2);
// Point point11 = anchor01;
// Point point12 = new Point(anchor02.x, anchor01.y);
// Point point13 = new Point(anchor01.x, anchor03.y);
// Point point14 = new Point(anchor02.x, anchor03.y);
Point point11 = new Point(99, 200);
Point point12 = new Point(2317, 200);
Point point13 = new Point(99, 3300);
Point point14 = new Point(2317, 3300);
Mat dst_vertices = new MatOfPoint(point11, point12, point13, point14);
dst_vertices.convertTo(refmat2f, CvType.CV_32FC1);
Mat warpMatrix = Imgproc.getPerspectiveTransform(mat2f, refmat2f);
Mat dst = new Mat(source1.rows(), source1.cols(), source1.type());
System.out.println(source1.rows() + " " + source1.cols());
Imgproc.warpPerspective(source1, dst, warpMatrix, dst.size(), Imgproc.INTER_LINEAR, 0,
new Scalar(255, 255, 255));
destPath = Constants.PATH + Constants.DEST_IMAGE_PATH + "rect2.png";
Highgui.imwrite(destPath, dst);
try {
byte[] imgebyte = OpenCVUtil.covertMat2Byte1(dst);
renderImage(response, imgebyte);
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* 获得锚点(定位点)
* 方法1,通过模板匹配圆心,应该换成正方形也可以,之前模板匹配不行是因为模板图形不是最小的
* @Author 王嵩
* @param src
* @param mattmp
* @param anchor01
* @param anchor02
* @param anchor03
* @param anchor04 void
* @Date 2018年2月7日
* 更新日志
* 2018年2月7日 王嵩 首次创建
*
*/
public static void fetchAnchorPoints1(String sourcePath, String matchPath, Point anchor01, Point anchor02,
Point anchor03,
Point anchor04) {
Mat imagematch = new Mat();
Mat colorimage = Highgui.imread(sourcePath, Highgui.CV_LOAD_IMAGE_COLOR);
Point maxLoc01, maxLoc02, maxLoc03, maxLoc04;
int srcRows = colorimage.rows();
int srcCols = colorimage.cols();
Mat src01 = colorimage.submat(new Rect(0, 0, srcCols / 2, srcRows / 2));
Mat src02 = colorimage.submat(new Rect(srcCols / 2, 0, srcCols / 2, srcRows / 2));
Mat src03 = colorimage.submat(new Rect(0, srcRows / 2, srcCols / 2, srcRows / 2));
Mat src04 = colorimage.submat(new Rect(srcCols / 2, srcRows / 2, srcCols / 2, srcRows / 2));
// Highgui.imwrite("D://ttt/t1.jpg", src01);
// Highgui.imwrite("D://ttt/t2.jpg", src02);
// Highgui.imwrite("D://ttt/t3.jpg", src03);
// Highgui.imwrite("D://ttt/t4.jpg", src04);
Mat mattmp = Highgui.imread(matchPath, Highgui.CV_LOAD_IMAGE_COLOR);
Imgproc.matchTemplate(mattmp, src01, imagematch, Imgproc.TM_CCOEFF_NORMED);
// Core.normalize(imagematch, imagematch, 0, 1, Core.NORM_MINMAX, -1, new Mat());
MinMaxLocResult minmaxLoc1 = Core.minMaxLoc(imagematch);
System.out.println("minmaxLoc1.maxVal:" + minmaxLoc1.maxVal);
maxLoc01 = minmaxLoc1.maxLoc;
anchor01.x = maxLoc01.x;
anchor01.y = maxLoc01.y;
Core.circle(colorimage, maxLoc01, 3, new Scalar(0, 0, 255), 3);
String destPath = Constants.PATH + Constants.DEST_IMAGE_PATH + "rect_c1.png";
Highgui.imwrite(destPath, colorimage);
Imgproc.matchTemplate(mattmp, src02, imagematch, Imgproc.TM_CCOEFF_NORMED);
// Core.normalize(imagematch, imagematch, 0, 1, Core.NORM_MINMAX, -1, new Mat());
MinMaxLocResult minmaxLoc2 = Core.minMaxLoc(imagematch);
System.out.println("minmaxLoc2.maxVal:" + minmaxLoc2.maxVal);
maxLoc02 = minmaxLoc2.maxLoc;
anchor02.x = maxLoc02.x + srcCols / 2;
anchor02.y = maxLoc02.y;
Core.circle(colorimage, anchor02, 3, new Scalar(0, 0, 255), 3);
destPath = Constants.PATH + Constants.DEST_IMAGE_PATH + "rect_c2.png";
Highgui.imwrite(destPath, colorimage);
Imgproc.matchTemplate(mattmp, src03, imagematch, Imgproc.TM_CCOEFF_NORMED);
// Core.normalize(imagematch, imagematch, 0, 1, Core.NORM_MINMAX, -1, new Mat());
MinMaxLocResult minmaxLoc3 = Core.minMaxLoc(imagematch);
System.out.println("minmaxLoc3.maxVal:" + minmaxLoc3.maxVal);
maxLoc03 = minmaxLoc3.maxLoc;
anchor03.x = maxLoc03.x;
anchor03.y = maxLoc03.y + srcRows / 2;
Core.circle(colorimage, anchor03, 3, new Scalar(0, 0, 255), 3);
destPath = Constants.PATH + Constants.DEST_IMAGE_PATH + "rect_c3.png";
Highgui.imwrite(destPath, colorimage);
Imgproc.matchTemplate(mattmp, src04, imagematch, Imgproc.TM_CCOEFF_NORMED);
// Core.normalize(imagematch, imagematch, 0, 1, Core.NORM_MINMAX, -1, new Mat());
MinMaxLocResult minmaxLoc4 = Core.minMaxLoc(imagematch);
System.out.println("minmaxLoc4.maxVal:" + minmaxLoc4.maxVal);
maxLoc04 = minmaxLoc4.maxLoc;
anchor04.x = maxLoc04.x + srcCols / 2;
anchor04.y = maxLoc04.y + srcRows / 2;
Core.circle(colorimage, anchor04, 3, new Scalar(0, 0, 255), 3);
destPath = Constants.PATH + Constants.DEST_IMAGE_PATH + "rect_c4.png";
Highgui.imwrite(destPath, colorimage);
}
/**
* 获得锚点(定位点)
* 方法2,霍夫曼圆变换查找定位点
* @Author 王嵩
* @param src
* @param mattmp
* @param anchor01
* @param anchor02
* @param anchor03
* @param anchor04 void
* @Date 2018年2月7日
* 更新日志
* 2018年2月7日 王嵩 首次创建
*
*/
public static void fetchAnchorPoints2(String sourcePath, Point anchor01, Point anchor02, Point anchor03,
Point anchor04) {
Mat src = Highgui.imread(sourcePath, Highgui.CV_LOAD_IMAGE_GRAYSCALE);
Mat colorimage = Highgui.imread(sourcePath, Highgui.CV_LOAD_IMAGE_COLOR);
int srcRows = src.rows();
int srcCols = src.cols();
Mat source1 = src.submat(new Rect(0, 0, srcRows / 2, srcRows / 2));
Mat source2 = src.submat(new Rect(srcCols / 2, 0, srcCols / 2, srcRows / 2));
Mat source3 = src.submat(new Rect(0, srcRows / 2, srcCols / 2, srcRows / 2));
Mat source4 = src.submat(new Rect(srcCols / 2, srcRows / 2, srcCols / 2, srcRows / 2));
Mat src01 = colorimage.submat(new Rect(0, 0, srcRows / 2, srcRows / 2));
Mat src02 = colorimage.submat(new Rect(srcCols / 2, 0, srcCols / 2, srcRows / 2));
Mat src03 = colorimage.submat(new Rect(0, srcRows / 2, srcCols / 2, srcRows / 2));
Mat src04 = colorimage.submat(new Rect(srcCols / 2, srcRows / 2, srcCols / 2, srcRows / 2));
// Mat rrr = OpenCVUtil.imageBinary(src01);
Mat circles = new Mat();// 声明一个向量,保存检测出的圆的圆心坐标和半径
Imgproc.HoughCircles(source1, circles, Imgproc.CV_HOUGH_GRADIENT, 1.0, 300 / 8, 200, 90, 10, 50);// 霍夫变换检测圆
System.out.println("图片高 宽:" + src.rows() + " " + src.cols());
System.out.println(circles.cols());
int cols = circles.cols();
if (cols > 0) {
for (int i = 0; i < circles.cols(); i++) {
double vCircle[] = circles.get(0, i);
Point center = new Point(vCircle[0], vCircle[1]);
int radius = (int) Math.round(vCircle[2]);
Core.circle(src01, center, 3, new Scalar(0, 255, 0), -1, 8, 0);
Core.circle(src01, center, radius, new Scalar(0, 0, 255), 3, 8, 0);
anchor01.x = vCircle[0];
anchor01.y = vCircle[1];
}
}
String destPath = Constants.PATH + Constants.DEST_IMAGE_PATH + "rect_cc1.png";
Highgui.imwrite(destPath, src01);
Imgproc.HoughCircles(source2, circles, Imgproc.CV_HOUGH_GRADIENT, 1.0, 300 / 8, 200, 90, 10, 50);// 霍夫变换检测圆
System.out.println(circles.cols());
if (circles.cols() > 0) {
for (int i = 0; i < circles.cols(); i++) {
double vCircle[] = circles.get(0, i);
Point center = new Point(vCircle[0], vCircle[1]);
int radius = (int) Math.round(vCircle[2]);
Core.circle(src02, center, 3, new Scalar(0, 255, 0), -1, 8, 0);
Core.circle(src02, center, radius, new Scalar(0, 0, 255), 3, 8, 0);
anchor02.x = vCircle[0] + srcCols / 2;
anchor02.y = vCircle[1];
}
}
destPath = Constants.PATH + Constants.DEST_IMAGE_PATH + "rect_cc2.png";
Highgui.imwrite(destPath, src02);
Imgproc.HoughCircles(source3, circles, Imgproc.CV_HOUGH_GRADIENT, 1.0, 300 / 8, 200, 90, 10, 50);// 霍夫变换检测圆
System.out.println(circles.cols());
if (circles.cols() > 0) {
for (int i = 0; i < circles.cols(); i++) {
double vCircle[] = circles.get(0, i);
Point center = new Point(vCircle[0], vCircle[1]);
int radius = (int) Math.round(vCircle[2]);
Core.circle(src03, center, 3, new Scalar(0, 255, 0), -1, 8, 0);
Core.circle(src03, center, radius, new Scalar(0, 0, 255), 3, 8, 0);
anchor03.x = vCircle[0];
anchor03.y = vCircle[1] + srcRows / 2;
}
}
destPath = Constants.PATH + Constants.DEST_IMAGE_PATH + "rect_cc3.png";
Highgui.imwrite(destPath, src03);
Imgproc.HoughCircles(source4, circles, Imgproc.CV_HOUGH_GRADIENT, 1.0, 300 / 8, 200, 90, 10, 50);// 霍夫变换检测圆
System.out.println(circles.cols());
if (circles.cols() > 0) {
for (int i = 0; i < circles.cols(); i++) {
double vCircle[] = circles.get(0, i);
Point center = new Point(vCircle[0], vCircle[1]);
int radius = (int) Math.round(vCircle[2]);
Core.circle(src04, center, 3, new Scalar(0, 255, 0), -1, 8, 0);
Core.circle(src04, center, radius, new Scalar(0, 0, 255), 3, 8, 0);
anchor04.x = vCircle[0] + srcCols / 2;
anchor04.y = vCircle[1] + srcRows / 2;
}
}
destPath = Constants.PATH + Constants.DEST_IMAGE_PATH + "rect_cc4.png";
Highgui.imwrite(destPath, src04);
}
}
package com.acts.opencv.base;
import java.awt.image.BufferedImage;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Vector;
import javax.servlet.http.HttpServletResponse;
import org.opencv.core.Core;
import org.opencv.core.Core.MinMaxLocResult;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.MatOfFloat;
import org.opencv.core.MatOfInt;
import org.opencv.core.MatOfPoint;
import org.opencv.core.MatOfPoint2f;
import org.opencv.core.Point;
import org.opencv.core.Rect;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.highgui.Highgui;
import org.opencv.imgproc.Imgproc;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import com.acts.opencv.common.utils.CommonUtil;
import com.acts.opencv.common.utils.Constants;
import com.acts.opencv.common.utils.OpenCVUtil;
import com.acts.opencv.common.web.BaseController;
import com.acts.opencv.demo.DemoController;
import com.google.zxing.Binarizer;
import com.google.zxing.BinaryBitmap;
import com.google.zxing.DecodeHintType;
import com.google.zxing.LuminanceSource;
import com.google.zxing.MultiFormatReader;
import com.google.zxing.Result;
import com.google.zxing.client.j2se.BufferedImageLuminanceSource;
import com.google.zxing.common.HybridBinarizer;
import cn.hutool.core.util.NumberUtil;
@Controller
@RequestMapping(value = "base")
public class BaseMethodController extends BaseController {
private static final Logger logger = LoggerFactory.getLogger(DemoController.class);
/**
* 二值化方法测试
* 创建者 Songer
* 创建时间 2018年3月9日
*/
@RequestMapping(value = "binary")
public void binary(HttpServletResponse response, String imagefile, Integer binaryType, Double thresh, Double maxval) {
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
logger.info("\n 二值化方法");
// 灰度化
// Imgproc.cvtColor(source, destination, Highgui.CV_LOAD_IMAGE_GRAYSCALE);
String sourcePath = Constants.PATH + imagefile;
logger.info("url==============" + sourcePath);
// 加载为灰度图显示
Mat source = Highgui.imread(sourcePath, Highgui.CV_LOAD_IMAGE_GRAYSCALE);
Mat destination = new Mat(source.rows(), source.cols(), source.type());
logger.info("binaryType:{},thresh:{},maxval:{}", binaryType, thresh, maxval);
switch (binaryType) {
case 0:
binaryType = Imgproc.THRESH_BINARY;
break;
case 1:
binaryType = Imgproc.THRESH_BINARY_INV;
break;
case 2:
binaryType = Imgproc.THRESH_TRUNC;
break;
case 3:
binaryType = Imgproc.THRESH_TOZERO;
break;
case 4:
binaryType = Imgproc.THRESH_TOZERO_INV;
break;
default:
break;
}
Imgproc.threshold(source, destination, Double.valueOf(thresh), Double.valueOf(maxval), binaryType);
// Imgproc.adaptiveThreshold(source, destination, 255, Imgproc.ADAPTIVE_THRESH_MEAN_C, Imgproc.THRESH_BINARY_INV, 31, 15);
// Imgproc.threshold(source, destination, 170, 255, Imgproc.THRESH_BINARY_INV);
// Imgproc.threshold(source, destination, 127, 255, Imgproc.THRESH_TOZERO);
// Imgproc.threshold(source, destination, 0, 255, Imgproc.THRESH_TOZERO_INV);
// String filename = imagefile.substring(imagefile.lastIndexOf("/"), imagefile.length());
// String filename_end = filename.substring(filename.lastIndexOf("."), filename.length());
// String filename_pre = filename.substring(0, filename.lastIndexOf("."));
// System.out.println(filename_pre);
// System.out.println(filename_end);
// filename = filename_pre + "_" + binaryType + "_" + thresh + "_" + maxval + "_" + filename_end;
// 原方式1生成图片后,页面读取的方式,但是实时性不好改为方式2
// String destPath = Constants.DIST_IMAGE_PATH + filename;
// File dstfile = new File(destPath);
// if (StringUtils.isNotBlank(filename) && dstfile.isFile() && dstfile.exists()) {
// dstfile.delete();
// logger.info("删除图片:" + filename);
// }
// Highgui.imwrite(destPath, destination);
// logger.info("生成目标图片==============" + destPath);
// renderString(response, filename);
// renderString(response, Constants.SUCCESS);
// 方式1end//
// 方式2,回写页面图片流
try {
byte[] imgebyte = OpenCVUtil.covertMat2Byte1(destination);
renderImage(response, imgebyte);
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* 自适用二值化
* @Author 王嵩
* @param response
* @param imagefile
* @param binaryType 二值化类型
* @param blockSize 附近区域面积
* @param constantC 它只是一个常数,从平均值或加权平均值中减去的常数
* @Date 2018年4月9日
* 更新日志
* 2018年4月9日 王嵩 首次创建
*/
@RequestMapping(value = "adaptiveBinary")
public void adaptiveBinary(HttpServletResponse response, String imagefile, Integer adaptiveMethod,
Integer binaryType, Integer blockSize,
Double constantC) {
//
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
logger.info("\n 自适用二值化方法");
// 灰度化
// Imgproc.cvtColor(source, destination, Highgui.CV_LOAD_IMAGE_GRAYSCALE);
String sourcePath = Constants.PATH + imagefile;
logger.info("url==============" + sourcePath);
// 加载为灰度图显示
Mat source = Highgui.imread(sourcePath, Highgui.CV_LOAD_IMAGE_GRAYSCALE);
Mat destination = new Mat(source.rows(), source.cols(), source.type());
logger.info("binaryType:{},blockSize:{},constantC:{}", binaryType, blockSize, constantC);
switch (adaptiveMethod) {
case 0:
adaptiveMethod = Imgproc.ADAPTIVE_THRESH_MEAN_C;
break;
case 1:
adaptiveMethod = Imgproc.ADAPTIVE_THRESH_GAUSSIAN_C;
break;
}
switch (binaryType) {
case 0:
binaryType = Imgproc.THRESH_BINARY;
break;
case 1:
binaryType = Imgproc.THRESH_BINARY_INV;
break;
case 2:
binaryType = Imgproc.THRESH_TRUNC;
break;
case 3:
binaryType = Imgproc.THRESH_TOZERO;
break;
case 4:
binaryType = Imgproc.THRESH_TOZERO_INV;
break;
default:
break;
}
Imgproc.adaptiveThreshold(source, destination, 255, adaptiveMethod, binaryType,
blockSize, constantC);
// 方式2,回写页面图片流
try {
byte[] imgebyte = OpenCVUtil.covertMat2Byte1(destination);
renderImage(response, imgebyte);
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* 自适用二值化+zxing识别条形码
* @Author 王嵩
* @param response
* @param imagefile
* @param binaryType 二值化类型
* @param blockSize 附近区域面积
* @param constantC 它只是一个常数,从平均值或加权平均值中减去的常数
* @Date 2018年5月17日
* 更新日志
* 2018年5月17日 王嵩 首次创建
*/
@RequestMapping(value = "zxing")
public void zxing(HttpServletResponse response, String imagefile, Integer adaptiveMethod, Integer binaryType,
Integer blockSize, Double constantC) {
//
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
logger.info("\n 自适用二值化方法");
// 灰度化
// Imgproc.cvtColor(source, destination, Highgui.CV_LOAD_IMAGE_GRAYSCALE);
String sourcePath = Constants.PATH + imagefile;
logger.info("url==============" + sourcePath);
// 加载为灰度图显示
Mat source = Highgui.imread(sourcePath, Highgui.CV_LOAD_IMAGE_GRAYSCALE);
Mat destination = new Mat(source.rows(), source.cols(), source.type());
logger.info("binaryType:{},blockSize:{},constantC:{}", binaryType, blockSize, constantC);
switch (adaptiveMethod) {
case 0:
adaptiveMethod = Imgproc.ADAPTIVE_THRESH_MEAN_C;
break;
case 1:
adaptiveMethod = Imgproc.ADAPTIVE_THRESH_GAUSSIAN_C;
break;
}
switch (binaryType) {
case 0:
binaryType = Imgproc.THRESH_BINARY;
break;
case 1:
binaryType = Imgproc.THRESH_BINARY_INV;
break;
case 2:
binaryType = Imgproc.THRESH_TRUNC;
break;
case 3:
binaryType = Imgproc.THRESH_TOZERO;
break;
case 4:
binaryType = Imgproc.THRESH_TOZERO_INV;
break;
default:
break;
}
// Imgproc.adaptiveThreshold(source, destination, 255, adaptiveMethod, binaryType, blockSize, constantC);
Imgproc.threshold(source, destination, 190, 255, Imgproc.THRESH_BINARY);
String result = parseCode(destination);
renderString(response, result);
}
private static String parseCode(Mat mat) {
String resultText = "无法识别!!!";
try {
MultiFormatReader formatReader = new MultiFormatReader();
// if (!file.exists()) {
// System.out.println("nofile");
// return;
// }
// BufferedImage image = ImageIO.read(file);
BufferedImage image = OpenCVUtil.toBufferedImage(mat);
LuminanceSource source = new BufferedImageLuminanceSource(image);
Binarizer binarizer = new HybridBinarizer(source);
BinaryBitmap binaryBitmap = new BinaryBitmap(binarizer);
Map hints = new HashMap();
hints.put(DecodeHintType.CHARACTER_SET, "UTF-8");
Result result = formatReader.decode(binaryBitmap, hints);
StringBuffer sbuffer = new StringBuffer();
sbuffer.append("解析结果 = " + result.toString() + "\n");
sbuffer.append("二维码格式类型 = " + result.getBarcodeFormat() + "\n");
sbuffer.append("二维码文本内容 = " + result.getText() + "\n");
resultText = sbuffer.toString();
} catch (Exception e) {
e.printStackTrace();
}
return resultText;
}
/**
* 高斯滤波方法测试
* 创建者 Songer
* 创建时间 2018年3月9日
*/
@RequestMapping(value = "gaussian")
public void gaussian(HttpServletResponse response, String imagefile, String kwidth, String kheight, String sigmaX,
String sigmaY) {
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
logger.info("\n 二值化方法");
String sourcePath = Constants.PATH + imagefile;
logger.info("url==============" + sourcePath);
Mat source = Highgui.imread(sourcePath, Highgui.CV_LOAD_IMAGE_COLOR);
Mat destination = new Mat(source.rows(), source.cols(), source.type());
logger.info("kwidth:{},kheight:{},sigmaX:{},sigmaY:{}", kwidth, kheight, sigmaX, sigmaY);
Imgproc.GaussianBlur(source, destination,
new Size(2 * Integer.valueOf(kwidth) + 1, 2 * Integer.valueOf(kheight) + 1),
Integer.valueOf(sigmaX), Integer.valueOf(sigmaY));
try {
byte[] imgebyte = OpenCVUtil.covertMat2Byte1(destination);
renderImage(response, imgebyte);
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* 图像锐化操作
* @Author 王嵩
* @param response
* @param imagefile
* @param ksize 中值滤波内核size
* @param alpha 控制图层src1的透明度
* @param beta 控制图层src2的透明度
* @param gamma gamma越大合并的影像越明亮 void
* @Date 2018年5月18日
* 更新日志
* 2018年5月18日 王嵩 首次创建
*
*/
@RequestMapping(value = "sharpness")
public void sharpness(HttpServletResponse response, String imagefile, int ksize, double alpha, double beta,
double gamma) {
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
logger.info("\n 锐化操作");
String sourcePath = Constants.PATH + imagefile;
logger.info("url==============" + sourcePath);
Mat source = Highgui.imread(sourcePath, Highgui.CV_LOAD_IMAGE_COLOR);
Mat destination = new Mat(source.rows(), source.cols(), source.type());
// 先进行中值滤波操作
Imgproc.medianBlur(source, destination, 2 * ksize + 1);
// 通过合并图层的方式进行效果增强 alpha控制src1的透明度,beta控制src2 的透明图;gamma越大合并的影像越明亮
// public static void addWeighted(Mat src1, double alpha, Mat src2, double beta, double gamma, Mat dst)
Core.addWeighted(source, alpha, destination, beta, gamma, destination);
try {
byte[] imgebyte = OpenCVUtil.covertMat2Byte1(destination);
renderImage(response, imgebyte);
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* 漫水填充
* @Author 王嵩
* @param response
* @param imagefile
* @param ksize
* @param alpha
* @param beta
* @param gamma void
* @Date 2018年5月24日
* 更新日志
* 2018年5月24日 王嵩 首次创建
*
*/
@RequestMapping(value = "floodfill")
public void floodfill(HttpServletResponse response, String imagefile, double graysize, double lodiff,
double updiff, int flag) {
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
logger.info("\n 漫水填充操作");
String sourcePath = Constants.PATH + imagefile;
logger.info("url==============" + sourcePath);
Mat source = Highgui.imread(sourcePath, Highgui.CV_LOAD_IMAGE_GRAYSCALE);
// Mat mask = new Mat(source.rows() + 2, source.cols() + 2, source.type());
Mat mask = new Mat();
Rect rect = new Rect();
//简单调用方式
// Imgproc.floodFill(source, mask, new Point(0, 0), new Scalar(graysize));
// 表示floodFill函数标识符低八位的连通值4、8 0-7位可以设置为4或8
int g_nConnectivity = 4;
// 中间八位部分,新的重新绘制的像素值 255,中8为8-15位,当此值不设置或为0时掩码mask会默认设置为1
int g_nNewMaskVal = 255;
/**
* 漫水填充的模式:0 默认方式,既不选FLOODFILL_FIXED_RANGE又不选FLOODFILL_MASK_ONLY
* FLOODFILL_FIXED_RANGE:每个像素点都将于种子点,而不是相邻点相比较。即设置此值时,
* 则只有当某个相邻点与种子像素之间的差值在指定范围内才填充,否则考虑当前点与其相邻点的差是否落在制定范围
* FLOODFILL_MASK_ONLY 如果设置,函数不填充原始图像,而去填充掩码图像。
*/
int g_nFillMode = 0;
if(flag == 0){//默认方式
g_nFillMode = 0;
}else if(flag == 1){//FLOODFILL_FIXED_RANGE方式
g_nFillMode = Imgproc.FLOODFILL_FIXED_RANGE;
}else{//FLOODFILL_MASK_ONLY方式
g_nFillMode = Imgproc.FLOODFILL_MASK_ONLY;
mask = new Mat(source.rows() + 2, source.cols() + 2, source.type());// 延展图像
}
System.out.println(g_nNewMaskVal << 8);
int flags = g_nConnectivity | (g_nNewMaskVal << 8) | g_nFillMode;
//使用mask调用方式
Imgproc.floodFill(source, mask, new Point(0, 0), new Scalar(graysize), rect, new Scalar(lodiff), new Scalar(
updiff), flags);
try {
if(flag==2){//FLOODFILL_MASK_ONLY方式填充的是掩码图像
byte[] imgebyte = OpenCVUtil.covertMat2Byte1(mask);
renderImage(response, imgebyte);
}else{
byte[] imgebyte = OpenCVUtil.covertMat2Byte1(source);
renderImage(response, imgebyte);
}
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* 图片缩放方法测试
* 创建者 Songer
* 创建时间 2018年3月15日
*/
@RequestMapping(value = "resize")
public void resize(HttpServletResponse response, String imagefile, Double rewidth, Double reheight,
Integer resizeType) {
// 默认都是放大
double width = rewidth;
double height = reheight;
if (resizeType == 2) {
width = 1 / width;
height = 1 / height;
}
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
logger.info("\n 图片缩放方法测试");
String sourcePath = Constants.PATH + imagefile;
logger.info("url==============" + sourcePath);
Mat source = Highgui.imread(sourcePath, Highgui.CV_LOAD_IMAGE_COLOR);
Mat destination = new Mat(source.rows(), source.cols(), source.type());
logger.info("resizeType:{},rewidth:{},reheight:{}", resizeType, rewidth, reheight);
Imgproc.resize(source, destination, new Size(0, 0), width, height, 0);
try {
byte[] imgebyte = OpenCVUtil.covertMat2Byte1(destination);
renderImage(response, imgebyte);
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* 腐蚀膨胀测试
* 创建者 Songer
* 创建时间 2018年3月15日
*/
@RequestMapping(value = "erodingAndDilation")
public void erodingAndDilation(HttpServletResponse response, String imagefile, Double kSize, Integer operateType,
Integer shapeType, boolean isBinary) {
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
logger.info("\n 腐蚀膨胀测试测试");
String sourcePath = Constants.PATH + imagefile;
logger.info("url==============" + sourcePath);
Mat source = Highgui.imread(sourcePath, Highgui.CV_LOAD_IMAGE_COLOR);
Mat destination = new Mat(source.rows(), source.cols(), source.type());
if (isBinary) {
source = Highgui.imread(sourcePath, Highgui.CV_LOAD_IMAGE_GRAYSCALE);
// Imgproc.threshold(source, source, 100, 255, Imgproc.THRESH_BINARY);
}
double size = Double.valueOf(kSize);
int shape = 0;
switch (shapeType) {
case 0:
shape = Imgproc.MORPH_RECT;
break;
case 1:
shape = Imgproc.MORPH_CROSS;
break;
case 2:
shape = Imgproc.MORPH_ELLIPSE;
break;
}
Mat element = Imgproc.getStructuringElement(shape, new Size(2 * size + 1, 2 * size + 1));
logger.info("kSize:{},operateType:{},shapeType:{},isBinary:{}", kSize, operateType, shapeType, isBinary);
if (operateType == 1) {// 腐蚀
Imgproc.erode(source, destination, element);
} else {// 膨胀
Imgproc.dilate(source, destination, element);
}
try {
byte[] imgebyte = OpenCVUtil.covertMat2Byte1(destination);
renderImage(response, imgebyte);
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* 腐蚀膨胀使用进阶
* 更高级的形态学变换处理:morphologyEx
* 创建者 Songer
* 创建时间 2018年3月15日
*/
@RequestMapping(value = "morphologyEx")
public void morphologyEx(HttpServletResponse response, String imagefile, Double kSize, Integer operateType,
Integer shapeType, boolean isBinary) {
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
logger.info("\n 腐蚀膨胀测试测试");
String sourcePath = Constants.PATH + imagefile;
logger.info("url==============" + sourcePath);
Mat source = Highgui.imread(sourcePath, Highgui.CV_LOAD_IMAGE_COLOR);
Mat destination = new Mat(source.rows(), source.cols(), source.type());
if (isBinary) {
source = Highgui.imread(sourcePath, Highgui.CV_LOAD_IMAGE_GRAYSCALE);
// Imgproc.threshold(source, source, 100, 255, Imgproc.THRESH_BINARY);
}
double size = Double.valueOf(kSize);
int shape = 0;
switch (shapeType) {
case 0:
shape = Imgproc.MORPH_RECT;
break;
case 1:
shape = Imgproc.MORPH_CROSS;
break;
case 2:
shape = Imgproc.MORPH_ELLIPSE;
break;
}
int op = 2;
switch (operateType) {// 主要是为了方便查看参数是哪一个
case 2:
op = Imgproc.MORPH_OPEN;
break;
case 3:
op = Imgproc.MORPH_CLOSE;
break;
case 4:
op = Imgproc.MORPH_GRADIENT;
break;
case 5:
op = Imgproc.MORPH_TOPHAT;
break;
case 6:
op = Imgproc.MORPH_BLACKHAT;
break;
case 7:
op = Imgproc.MORPH_HITMISS;
break;
}
Mat element = Imgproc.getStructuringElement(shape, new Size(2 * size + 1, 2 * size + 1));
logger.info("kSize:{},operateType:{},shapeType:{},isBinary:{}", kSize, operateType, shapeType, isBinary);
Imgproc.morphologyEx(source, destination, op, element);
try {
byte[] imgebyte = OpenCVUtil.covertMat2Byte1(destination);
renderImage(response, imgebyte);
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* 边缘检测Canny
* 创建者 Songer
* 创建时间 2018年3月15日
*/
@RequestMapping(value = "canny")
public void canny(HttpServletResponse response, String imagefile, Double threshold1, Double threshold2,
boolean isBinary) {
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
logger.info("\n 边缘检测测试");
String sourcePath = Constants.PATH + imagefile;
logger.info("url==============" + sourcePath);
Mat source = Highgui.imread(sourcePath, Highgui.CV_LOAD_IMAGE_GRAYSCALE);
Mat destination = new Mat(source.rows(), source.cols(), source.type());
Imgproc.Canny(source, destination, threshold1, threshold2);
try {
byte[] imgebyte = OpenCVUtil.covertMat2Byte1(destination);
renderImage(response, imgebyte);
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* 霍夫线变换
* 创建者 Songer
* 创建时间 2018年3月19日
*/
@RequestMapping(value = "houghline")
public void houghline(HttpServletResponse response, String imagefile, Double threshold1, Double threshold2,
Integer threshold, Double minLineLength, Double maxLineGap) {
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
logger.info("\n 霍夫线变换测试");
String sourcePath = Constants.PATH + imagefile;
logger.info("url==============" + sourcePath);
Mat source1 = Highgui.imread(sourcePath, Highgui.CV_LOAD_IMAGE_COLOR);// 彩色图
Mat source2 = Highgui.imread(sourcePath, Highgui.CV_LOAD_IMAGE_GRAYSCALE);// 灰度图
Mat lineMat = new Mat(source2.rows(), source2.cols(), source2.type());
Mat destination = new Mat(source2.rows(), source2.cols(), source2.type());
Imgproc.Canny(source2, destination, threshold1, threshold2);
Imgproc.HoughLinesP(destination, lineMat, 1, Math.PI / 180, threshold, minLineLength, maxLineGap);
int[] a = new int[(int) lineMat.total() * lineMat.channels()]; // 数组a存储检测出的直线端点坐标
lineMat.get(0, 0, a);
for (int i = 0; i < a.length; i += 4) {
// new Scalar(255, 0, 0) blue
// new Scalar(0, 255, 0) green
// new Scalar(0, 0, 255) red
Core.line(source1, new Point(a[i], a[i + 1]), new Point(a[i + 2], a[i + 3]), new Scalar(0, 255, 0), 2);
}
try {
byte[] imgebyte = OpenCVUtil.covertMat2Byte1(source1);
renderImage(response, imgebyte);
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* 霍夫圆变换
* 创建者 Songer
* 创建时间 2018年3月20日
*/
@RequestMapping(value = "houghcircle")
public void houghcircle(HttpServletResponse response, String imagefile, Double minDist, Double param1,
Double param2, Integer minRadius, Integer maxRadius) {
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
logger.info("\n 霍夫圆变换测试");
String sourcePath = Constants.PATH + imagefile;
logger.info("url==============" + sourcePath);
Mat source1 = Highgui.imread(sourcePath, Highgui.CV_LOAD_IMAGE_COLOR);// 彩色图
Mat source2 = Highgui.imread(sourcePath, Highgui.CV_LOAD_IMAGE_GRAYSCALE);// 灰度图
Mat circleMat = new Mat(source2.rows(), source2.cols(), source2.type());
Imgproc.HoughCircles(source2, circleMat, Imgproc.CV_HOUGH_GRADIENT, 1.0, minDist, param1, param2, minRadius,
maxRadius);// 霍夫变换检测圆
System.out.println("----------------" + circleMat.cols());
int cols = circleMat.cols();
// Point anchor01 = new Point();
if (cols > 0) {
for (int i = 0; i < cols; i++) {
double vCircle[] = circleMat.get(0, i);
Point center = new Point(vCircle[0], vCircle[1]);
int radius = (int) Math.round(vCircle[2]);
Core.circle(source1, center, 3, new Scalar(0, 255, 0), -1, 8, 0);// 绿色圆心
Core.circle(source1, center, radius, new Scalar(0, 0, 255), 3, 8, 0);// 红色圆边
// anchor01.x = vCircle[0];
// anchor01.y = vCircle[1];
}
}
try {
byte[] imgebyte = OpenCVUtil.covertMat2Byte1(source1);
renderImage(response, imgebyte);
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* 颜色识别测试
* 创建者 Songer
* 创建时间 2018年3月20日
*/
@RequestMapping(value = "findcolor")
public void findcolor(HttpServletResponse response, String imagefile, Integer color,
Integer colorType) {
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
logger.info("\n 查找颜色测试");
String sourcePath = Constants.PATH + imagefile;
logger.info("url==============" + sourcePath);
Mat source = Highgui.imread(sourcePath, Highgui.CV_LOAD_IMAGE_COLOR);
Mat destination = new Mat(source.rows(), source.cols(), source.type());
if (colorType == 1) {// 1为RGB方式,2为HSV方式
double B = 0;
double G = 0;
double R = 0;
switch (color) {
case 1:// red
B = 0;
G = 0;
R = 255;
break;
case 2:// blue
B = 255;
G = 0;
R = 0;
break;
case 3:// green
B = 0;
G = 255;
R = 0;
break;
case 4:// yellow
B = 0;
G = 255;
R = 255;
break;
}
Core.inRange(source, new Scalar(B, G, R), new Scalar(B, G, R), destination);
} else {// HSV方式
Imgproc.cvtColor(source, source, Imgproc.COLOR_BGR2HSV);
double min = 0;
double max = 0;
// 泛红色系(176,90,90)-(0, 90, 90)-(20,255,255) 简易:0-20
// 泛蓝色系(100, 90, 90)-(120,255,255)
// 泛绿色系(60, 90, 90)-(80,255,255)
// 泛黄色系(23, 90, 90)-(38,255,255)
switch (color) {
case 1:// red
min = 0;
max = 20;
break;
case 2:// blue
min = 100;
max = 120;
break;
case 3:// green
min = 60;
max = 80;
break;
case 4:// yellow
min = 23;
max = 38;
break;
}
Core.inRange(source, new Scalar(min, 90, 90), new Scalar(max, 255, 255), destination);
}
try {
byte[] imgebyte = OpenCVUtil.covertMat2Byte1(destination);
renderImage(response, imgebyte);
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* 轮廓识别测试
* 创建者 Songer
* 创建时间 2018年3月20日
*/
@RequestMapping(value = "contours")
public void contours(HttpServletResponse response, String imagefile, Integer mode, Integer method,
Integer contourNum) {
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
logger.info("\n 轮廓识别测试");
String sourcePath = Constants.PATH + imagefile;
logger.info("url==============" + sourcePath);
logger.info("mode:{},method:{}", mode, method);
switch (mode) {
case 0:
mode = Imgproc.RETR_EXTERNAL;
break;
case 1:
mode = Imgproc.RETR_LIST;
break;
case 2:
mode = Imgproc.RETR_CCOMP;
break;
case 3:
mode = Imgproc.RETR_TREE;
break;
}
switch (method) {
case 0:
method = Imgproc.CV_CHAIN_CODE;
break;
case 1:
method = Imgproc.CHAIN_APPROX_NONE;
break;
case 2:
method = Imgproc.CHAIN_APPROX_SIMPLE;
break;
case 3:
method = Imgproc.CHAIN_APPROX_TC89_L1;
break;
case 4:
method = Imgproc.CHAIN_APPROX_TC89_KCOS;
break;
case 5:
method = Imgproc.CV_LINK_RUNS;
break;
}
Mat source = Highgui.imread(sourcePath, Highgui.CV_LOAD_IMAGE_GRAYSCALE);
// Mat destination = new Mat(source.rows(), source.cols(), source.type());
Mat destination = Mat.zeros(source.size(), CvType.CV_8UC3);
Mat hierarchy = new Mat(source.rows(), source.cols(), CvType.CV_8UC1, new Scalar(0));
Vector contours = new Vector();
Imgproc.findContours(source, contours, hierarchy, mode, method, new Point());
System.out.println(contours.size());
logger.info("轮廓数量为:{},当前请求要展现第{}个轮廓", contours.size(), contourNum);
// contourNum因为轮廓计数是从0开始
if (contourNum == -1 || (contourNum + 1) > contours.size()) {
logger.info("轮廓数量已经超出,默认显示所有轮廓,轮廓数量:{}", contours.size());
contourNum = -1;
}
Imgproc.drawContours(destination, contours, contourNum, new Scalar(0, 255, 0), 1);
try {
byte[] imgebyte = OpenCVUtil.covertMat2Byte1(destination);
renderImage(response, imgebyte);
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* 模板查找测试
* 创建者 Songer
* 创建时间 2018年3月21日
*/
@RequestMapping(value = "findtemplate")
public void findtemplate(HttpServletResponse response, String imagefile, Integer method, Integer imageType,
Double x1, Double y1, Double x2, Double y2, Double width, Double height) {
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
logger.info("\n 模板查找测试");
String sourcePath = Constants.PATH + imagefile;
logger.info("url==============" + sourcePath);
Mat source = Highgui.imread(sourcePath, Highgui.CV_LOAD_IMAGE_COLOR);
// Mat destination = new Mat(source.rows(), source.cols(), source.type());
// String templateimage = Constants.SOURCE_IMAGE_PATH + "/template.png";
// System.out.println(templateimage);
// Mat matchtemp = Highgui.imread(templateimage);
// 优化代码,模板图像直接通过前端截取或取得,而不是写死,此处用到了OpenCV的截取图像功能
logger.info("{},{},{},{}", x1, y1, width, height);
Mat matchtemp = source.submat(new Rect(Integer.valueOf(CommonUtil.setScare(x1.toString(), 0)), Integer
.valueOf(CommonUtil.setScare(y1.toString(), 0)), Integer.valueOf(CommonUtil.setScare(width.toString(),
0)), Integer.valueOf(CommonUtil.setScare(height.toString(), 0))));
int result_cols = source.cols() - matchtemp.cols() + 1;
int result_rows = source.rows() - matchtemp.rows() + 1;
Mat destination = new Mat(result_rows, result_cols, CvType.CV_32FC1);
Imgproc.matchTemplate(source, matchtemp, destination, method);
// 矩阵归一化处理
Core.normalize(destination, destination, 0, 255, Core.NORM_MINMAX, -1, new Mat());
// minMaxLoc(imagematch, minVal, maxVal2, minLoc, maxLoc01, new Mat());
MinMaxLocResult minmaxLoc = Core.minMaxLoc(destination);
logger.info("相似值=================:最大:" + minmaxLoc.maxVal + " 最小:" + minmaxLoc.minVal);
Point matchLoc = new Point();
switch (method) {
case 0:
// method = Imgproc.TM_SQDIFF;
matchLoc = minmaxLoc.minLoc;
break;
case 1:
// method = Imgproc.TM_SQDIFF_NORMED;
matchLoc = minmaxLoc.minLoc;
break;
case 2:
// method = Imgproc.TM_CCORR;
matchLoc = minmaxLoc.maxLoc;
break;
case 3:
// method = Imgproc.TM_CCORR_NORMED;
matchLoc = minmaxLoc.maxLoc;
break;
case 4:
// method = Imgproc.TM_CCOEFF;
matchLoc = minmaxLoc.maxLoc;
break;
case 5:
// method = Imgproc.TM_CCOEFF_NORMED;
matchLoc = minmaxLoc.maxLoc;
break;
default:
// method = Imgproc.TM_SQDIFF;
matchLoc = minmaxLoc.minLoc;
break;
}
if (imageType == 0) {// 显示过程图片
source = destination;
} else {// 显示最终框选结果
Core.rectangle(source, matchLoc, new Point(matchLoc.x + matchtemp.cols(), matchLoc.y + matchtemp.rows()),
new Scalar(0, 255, 0), 2);
}
try {
byte[] imgebyte = OpenCVUtil.covertMat2Byte1(source);
renderImage(response, imgebyte);
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* 灰度直方图
* @Author 王嵩
* @param response
* @param imagefile
* @param cols
* @return Mat
* @Date 2018年4月2日
* 更新日志
* 2018年4月2日 王嵩 首次创建
*
*/
@RequestMapping(value = "grayHistogram")
public void grayHistogram(HttpServletResponse response, String imagefile, Integer cols, Integer imageW,
Integer imageH, Integer imageKedu, boolean isShow) {
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
logger.info("\n 灰度直方图测试");
String sourcePath = Constants.PATH + imagefile;
Mat source = Highgui.imread(sourcePath, Highgui.CV_LOAD_IMAGE_GRAYSCALE);
List images = new ArrayList();
images.add(source);
MatOfInt channels = new MatOfInt(0); // 图像通道数,0表示只有一个通道
MatOfInt histSize = new MatOfInt(cols); // CV_8U类型的图片范围是0~255,共有256个灰度级
Mat histogramOfGray = new Mat(); // 输出直方图结果,共有256行,行数的相当于对应灰度值,每一行的值相当于该灰度值所占比例
MatOfFloat histRange = new MatOfFloat(0, 255);
Imgproc.calcHist(images, channels, new Mat(), histogramOfGray, histSize, histRange, false); // 计算直方图
MinMaxLocResult minmaxLoc = Core.minMaxLoc(histogramOfGray);
// 按行归一化
// Core.normalize(histogramOfGray, histogramOfGray, 0, histogramOfGray.rows(), Core.NORM_MINMAX, -1, new Mat());
// 创建画布
int histImgRows = imageH;
int histImgCols = imageW;
int colStep = (int) Math.floor((histImgCols) / histSize.get(0, 0)[0]);
Mat histImg = new Mat(histImgRows, histImgCols, CvType.CV_8UC3, new Scalar(255, 255, 255)); // 重新建一张图片,绘制直方图
int max = (int) minmaxLoc.maxVal;
System.out.println("max--------" + max);
double bin_u = (double) (histImg.height() - 20) / max; // max: 最高条的像素个数,则 bin_u 为单个像素的高度
int kedu = 0;
for (int i = 1; kedu <= minmaxLoc.maxVal; i++) {
kedu = i * max / 10;
// 在图像中显示文本字符串
Core.putText(histImg, kedu + "", new Point(0, histImg.height() - 5 - kedu * bin_u), 1, 1, new Scalar(255,0, 0));
if (isShow) {
// 附上高度坐标线,因为高度在画图时-了20,此处也减掉
Core.line(histImg, new Point(0, histImg.height() - 20 - kedu * bin_u),
new Point(imageW, histImg.height() - 20 - (kedu + 1) * bin_u), new Scalar(255, 0, 0), 1, 8, 0);
}
}
System.out.println("灰度级:" + histSize.get(0, 0)[0]);
for (int i = 0; i < histSize.get(0, 0)[0]; i++) { // 画出每一个灰度级分量的比例,注意OpenCV将Mat最左上角的点作为坐标原点
Core.rectangle(histImg, new Point(colStep * i, histImgRows - 20), new Point(colStep * (i + 1), histImgRows
- bin_u * Math.round(histogramOfGray.get(i, 0)[0]) - 20), new Scalar(0, 0, 0), 1, 8, 0);
// if (i % 10 == 0) {
// Core.putText(histImg, Integer.toString(i), new Point(colStep * i, histImgRows - 5), 1, 1, new Scalar(255,
// 0, 0)); // 附上x轴刻度
// }
// 每隔10画一下刻度,方式2
kedu = i * imageKedu;
Core.rectangle(histImg, new Point(colStep * kedu, histImgRows - 20), new Point(colStep * (kedu + 1),
histImgRows - 20), new Scalar(255, 0, 0), 2, 8, 0);
Core.putText(histImg, kedu + "", new Point(histImgCols / 256 * kedu, histImgRows - 5), 1, 1, new Scalar(
255, 0, 0)); // 附上x轴刻度
}
try {
byte[] imgebyte = OpenCVUtil.covertMat2Byte1(histImg);
renderImage(response, imgebyte);
} catch (IOException e) {
e.printStackTrace();
}
}
// public void qrCode(HttpServletResponse response, String imagefile, Integer binaryType, Double thresh, Double maxval) {
// System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
// String sourcePath = Constants.PATH + imagefile;
// // 加载为灰度图显示
// Mat imageGray = Highgui.imread(sourcePath, Highgui.CV_LOAD_IMAGE_GRAYSCALE);
// Mat image = new Mat(imageGray.rows(), imageGray.cols(), imageGray.type());
// Mat imageGuussian = new Mat(imageGray.rows(), imageGray.cols(), imageGray.type());
// Mat imageSobelX,imageSobelY,imageSobelOut;
// imageGray.copyTo(image);
//
// // imshow("Source Image",image);
//
// GaussianBlur(imageGray,imageGuussian,Size(3,3),0);
// Imgproc.GaussianBlur(imageGray, imageGuussian,new Size(5, 5),
// Integer.valueOf(sigmaX), Integer.valueOf(sigmaY));
//
// //水平和垂直方向灰度图像的梯度和,使用Sobel算子
// Mat imageX16S,imageY16S;
// Sobel(imageGuussian,imageX16S,CV_16S,1,0,3,1,0,4);
// Sobel(imageGuussian,imageY16S,CV_16S,0,1,3,1,0,4);
// convertScaleAbs(imageX16S,imageSobelX,1,0);
// convertScaleAbs(imageY16S,imageSobelY,1,0);
// imageSobelOut=imageSobelX+imageSobelY;
// imshow("XY方向梯度和",imageSobelOut);
// Mat srcImg =imageSobelOut;
// //宽高扩充,非必须,特定的宽高可以提高傅里叶运算效率
// Mat padded;
// int opWidth = getOptimalDFTSize(srcImg.rows);
// int opHeight = getOptimalDFTSize(srcImg.cols);
// copyMakeBorder(srcImg, padded, 0, opWidth-srcImg.rows, 0, opHeight-srcImg.cols, BORDER_CONSTANT, Scalar::all(0));
// Mat planes[] = {Mat_(padded), Mat::zeros(padded.size(), CV_32F)};
// Mat comImg;
// //通道融合,融合成一个2通道的图像
// merge(planes,2,comImg);
// dft(comImg, comImg);
// split(comImg, planes);
// magnitude(planes[0], planes[1], planes[0]);
// Mat magMat = planes[0];
// magMat += Scalar::all(1);
// log(magMat, magMat); //对数变换,方便显示
// magMat = magMat(Rect(0, 0, magMat.cols & -2, magMat.rows & -2));
// //以下把傅里叶频谱图的四个角落移动到图像中心
// int cx = magMat.cols/2;
// int cy = magMat.rows/2;
// Mat q0(magMat, Rect(0, 0, cx, cy));
// Mat q1(magMat, Rect(0, cy, cx, cy));
// Mat q2(magMat, Rect(cx, cy, cx, cy));
// Mat q3(magMat, Rect(cx, 0, cx, cy));
// Mat tmp;
// q0.copyTo(tmp);
// q2.copyTo(q0);
// tmp.copyTo(q2);
// q1.copyTo(tmp);
// q3.copyTo(q1);
// tmp.copyTo(q3);
// normalize(magMat, magMat, 0, 1, CV_MINMAX);
// Mat magImg(magMat.size(), CV_8UC1);
// magMat.convertTo(magImg,CV_8UC1,255,0);
// imshow("傅里叶频谱", magImg);
// //HoughLines查找傅里叶频谱的直线,该直线跟原图的一维码方向相互垂直
// threshold(magImg,magImg,180,255,CV_THRESH_BINARY);
// imshow("二值化", magImg);
// vector lines;
// float pi180 = (float)CV_PI/180;
// Mat linImg(magImg.size(),CV_8UC3);
// HoughLines(magImg,lines,1,pi180,100,0,0);
// int numLines = lines.size();
// float theta;
// for(int l=0; lget_type_name()<get_data()< contours = new Vector();
//轮廓识别,查找外轮廓
Imgproc.findContours(edges, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE, new Point());
List listPoint = new ArrayList<>();
for(int i =0; i listPoint) {
//获得点的顺序
List newOrderList = orderPoints(listPoint);
for (Point point : newOrderList) {
System.out.println(point);
}
//计算新图像的宽度,它将是右下角和左下角x坐标之间或右上角和左上角x坐标之间的最大距离
//此处的顺序别搞错0,1,2,3依次是左上[0],右上[1],右下[2],左下[3]
Point leftTop = newOrderList.get(0);
Point rightTop = newOrderList.get(1);
Point rightBottom = newOrderList.get(2);
Point leftBottom = newOrderList.get(3);
double widthA = Math.sqrt(Math.pow(rightBottom.x-leftBottom.x, 2)
+Math.pow(rightBottom.y-leftBottom.y, 2));
double widthB = Math.sqrt(Math.pow(rightTop.x-leftTop.x, 2)
+Math.pow(rightTop.y-leftTop.y, 2));
int maxWidth = Math.max((int)widthA, (int)widthB);
//计算新图像的高度,这将是右上角和右下角y坐标或左上角和左下角y坐标之间的最大距离,
//这里用到的初中数学知识点和点的距离计算(x1,y1),(x2,y2)距离=√((x2-x1)^2+(y2-y1)^2)
double heightA = Math.sqrt(Math.pow(rightTop.x-rightBottom.x, 2)
+Math.pow(rightTop.y-rightBottom.y, 2));
double heightB = Math.sqrt(Math.pow(leftTop.x-leftBottom.x, 2)
+Math.pow(leftTop.y-leftBottom.y, 2));
int maxHeight = Math.max((int)heightA, (int)heightB);
System.out.println("宽度:"+maxWidth);
System.out.println("高度:"+maxHeight);
//现在我们指定目标图像的尺寸,构造目标点集以获得图像的“鸟瞰图”(即自上而下的视图),
//再次指定左上角,右上角的点,右下角和左下角的顺序
Point dstPoint1 = new Point(0,0);
Point dstPoint2 = new Point(maxWidth-1,0);
Point dstPoint3 = new Point(maxWidth-1,maxHeight-1);
Point dstPoint4 = new Point(0,maxHeight-1);
//计算透视变换矩阵rectMat原四顶点位置,dstMat目标顶点位置
MatOfPoint2f rectMat = new MatOfPoint2f(leftTop,rightTop,rightBottom,leftBottom);
MatOfPoint2f dstMat = new MatOfPoint2f(dstPoint1, dstPoint2, dstPoint3, dstPoint4);
//opencv透视转换方法
Mat transmtx = Imgproc.getPerspectiveTransform(rectMat, dstMat);
//注意定义的新图像宽高设置
Mat resultMat = Mat.zeros((int)maxHeight-1, (int)maxWidth-1, CvType.CV_8UC3);
Imgproc.warpPerspective(source, resultMat, transmtx, resultMat.size());
Highgui.imwrite("D:\\test\\abc\\t2.png", resultMat);
//返回矫正后的图像
return resultMat;
}
/**
* 4点排序,四个点按照左上、右上、右下、左下组织返回
* @author song.wang
* @date 2019年8月16日
* @param listPoint
* @return List
*
* 更新日志
* 2019年8月16日 song.wang 首次创建
*/
private static List orderPoints(List listPoint) {
//python中有很多关于数组的函数处理如排序、比较、加减乘除等,在这里我们使用List进行操作
//如numpy.argsort;numpy.argmin;numpy.argmax;sum(axis = 1);diff(pts, axis = 1)等等,有兴趣的可以查阅相关资料
//四个点按照左上、右上、右下、左下组织返回
//直接在这里添加我们的排序规则,按照x坐标轴升序排列,小的放前面
Collections.sort(listPoint, new Comparator() {
public int compare(Point arg0, Point arg1) {
if(arg0.x < arg1.x){
return -1;
}else if (arg0.x> arg1.x){
return 1;
}else{
return 0;
}
}
});
//排序之后前2个点就是左侧的点,后2个点为右侧的点
//对比Y轴,y值小的是左上的点,y大的是左下的点
Point top_left = new Point();
Point bottom_left = new Point();
Point top_right = new Point();
Point bottom_right = new Point();
Point leftPoint1 = listPoint.get(0);
Point leftPoint2 = listPoint.get(1);
Point rightPoint1 = listPoint.get(2);
Point rightPoint2 = listPoint.get(3);
if(leftPoint1.y > leftPoint2.y){
top_left = leftPoint2;
bottom_left = leftPoint1;
}else{
top_left = leftPoint1;
bottom_left = leftPoint2;
}
//定位右侧的2个点右上和右下使用方法是毕达哥拉斯定理,就是勾股定理距离长的认为是右下角
//计算左上方点和右侧两个点的欧氏距离
//(y2-y1)^2+(x2-x1)^2 开根号
double rightLength1 = Math.sqrt(Math.pow((rightPoint1.y - top_left.y), 2)
+ Math.pow((rightPoint1.x - top_left.x), 2));
double rightLength2 = Math.sqrt(Math.pow((rightPoint2.y - top_left.y), 2)
+ Math.pow((rightPoint2.x - top_left.x), 2));
if(rightLength1>rightLength2){
//长度长的那个是右下角,短的为右上角;这个算法有一种情况会有可能出问题,比如倒梯形,但是在正常的俯角拍摄时不会出现这种情况
//还有一种方案是按照左侧的那种对比方案,根据y轴的高度判断。
top_right = rightPoint2;
bottom_right = rightPoint1;
}else{
top_right = rightPoint1;
bottom_right = rightPoint2;
}
//按照左上,右上,右下,左下的顺时针顺序排列,这点很重要,透视变换时根据这个顺序进行对应
List newListPoint = new ArrayList<>();
newListPoint.add(top_left);
newListPoint.add(top_right);
newListPoint.add(bottom_right);
newListPoint.add(bottom_left);
return newListPoint;
}
}
工具类
加减乘除类
package com.acts.opencv.common.utils;
import java.math.BigDecimal;
import java.text.NumberFormat;
import java.util.ResourceBundle;
import java.util.UUID;
import org.apache.commons.lang.StringUtils;
public class CommonUtil {
/**
* 除法
*/
public static BigDecimal divide(String arg1, String arg2) {
if (StringUtils.isEmpty(arg1)) {
arg1 = "0.0";
}
if (StringUtils.isEmpty(arg2)) {
arg2 = "0.0";
}
BigDecimal big3 = new BigDecimal("0.00");
if (Double.parseDouble(arg2)!=0) {
BigDecimal big1 = new BigDecimal(arg1);
BigDecimal big2 = new BigDecimal(arg2);
big3 = big1.divide(big2, 6, BigDecimal.ROUND_HALF_EVEN);
}
return big3;
}
/**
* 乘法
*/
public static String mul(String arg1, String arg2) {
if (StringUtils.isEmpty(arg1)) {
arg1 = "0.0";
}
if (StringUtils.isEmpty(arg2)) {
arg2 = "0.0";
}
BigDecimal big1 = new BigDecimal(arg1);
BigDecimal big2 = new BigDecimal(arg2);
BigDecimal big3 = big1.multiply(big2);
return big3.toString();
}
/**
* 减法
*/
public static BigDecimal sub(String arg1, String arg2) {
if (StringUtils.isEmpty(arg1)) {
arg1 = "0.0";
}
if (StringUtils.isEmpty(arg2)) {
arg2 = "0.0";
}
BigDecimal big1 = new BigDecimal(arg1);
BigDecimal big2 = new BigDecimal(arg2);
BigDecimal big3 = big1.subtract(big2);
return big3;
}
/**
* 加法
*/
public static String add(String arg1, String arg2) {
if (StringUtils.isEmpty(arg1)) {
arg1 = "0.0";
}
if (StringUtils.isEmpty(arg2)) {
arg2 = "0.0";
}
BigDecimal big1 = new BigDecimal(arg1);
BigDecimal big2 = new BigDecimal(arg2);
BigDecimal big3 = big1.add(big2);
return big3.toString();
}
/**
* 加法
*/
public static String add2(String arg1, String arg2) {
if (StringUtils.isEmpty(arg1)) {
arg1 = "0.0";
}
if (StringUtils.isEmpty(arg2)) {
arg2 = "0.0";
}
BigDecimal big1 = new BigDecimal(arg1);
BigDecimal big2 = new BigDecimal(arg2);
BigDecimal big3 = big1.add(big2);
return big3.toString();
}
/**
* 四舍五入保留N位小数 先四舍五入在使用double值自动去零
*
* @param arg
* @param scare
* 保留位数
* @return
*/
public static String setScare(BigDecimal arg, int scare) {
BigDecimal bl = arg.setScale(scare, BigDecimal.ROUND_HALF_UP);
return String.valueOf(bl.doubleValue());
}
/**
* 四舍五入保留两位小数 先四舍五入在使用double值自动去零
*
* @param arg
* @return
*/
public static String setDifScare(String arg) {
BigDecimal bd = new BigDecimal(arg);
BigDecimal bl = bd.setScale(2, BigDecimal.ROUND_HALF_UP);
return bl.toString();
}
/**
* 四舍五入保留N位小数 先四舍五入在使用double值自动去零(传参String类型)
*
* @param arg
* @return
*/
public static String setScare(String arg, int i) {
BigDecimal bd = new BigDecimal(arg);
BigDecimal bl = bd.setScale(i, BigDecimal.ROUND_HALF_UP);
return bl.toString();
}
/**
* 判空
*
* @param obj
* @return boolean true为空,false不为空
*/
public static boolean isMissing(Object obj) {
if (null == obj || obj.toString().trim().equals("")) {
return true;
} else {
if (obj instanceof String) {
obj = obj.toString().trim();
}
return false;
}
}
/**
* 获取配置文件参数
* @Author Songer
* @param key
* @return String
* @Date 2017-11-12
* 更新日志
* 2015年3月3日 张志朋 首次创建
*
*/
public static String getPropertiesValue(String key){
ResourceBundle resource = ResourceBundle.getBundle("config");
return resource.getString(key);
}
public static String get32UUID() {
String uuid = UUID.randomUUID().toString().trim().replaceAll("-", "");
return uuid;
}
/**
*@Function: 判断字符串是否是整数
*@Author: Songer
*@param value
*@return boolean
*@Date: 2017-11-12
*@Modifications:
*@Modifier Name; Date; The Reason for Modifying
*
*/
public static boolean isInteger(String value){
try {
Integer.parseInt(value);
return true;
}catch (NumberFormatException e){
return false;
}
}
/**
* 去除小数末尾可删除的0 1.0->1 1.50->1.5
* @Author Songer
* @return String
* @Date 2017-5-12
* 更新日志
* 2017-12-1 Songer 首次创建
*/
private static NumberFormat removeZero = NumberFormat.getInstance();
public static String removeZero(String num){
return removeZero(Double.valueOf(num));
}
public static String removeZero(Double num){
return removeZero.format(num);
}
}
静态资源类
package com.acts.opencv.common.utils;
import org.springframework.web.context.ContextLoader;
/**
* 常量 创建者 Songer 创建时间 2018年3月09日
*
*/
public class Constants {
public static final String CURRENT_USER = "UserInfo";
public static final String WECHAT_USER = "weChatUserInfo";
public static final String REFERENCE_CODE = "referenceCode";
public static final String SUCCESS = "success";
public static final String ERROR = "error";
public static final String SF_FILE_SEPARATOR = System.getProperty("file.separator");// 文件分隔符
public static final String SF_LINE_SEPARATOR = System.getProperty("line.separator");// 行分隔符
public static final String SF_PATH_SEPARATOR = System.getProperty("path.separator");// 路径分隔符
public static final String PATH = ContextLoader.getCurrentWebApplicationContext().getServletContext().getRealPath("/");
/**
* 文件
*/
public static final String SOURCE_IMAGE_PATH = Constants.SF_FILE_SEPARATOR + "statics"
+ Constants.SF_FILE_SEPARATOR + "sourceimage" + Constants.SF_FILE_SEPARATOR;// 图片原地址
public static final String DEST_IMAGE_PATH = Constants.SF_FILE_SEPARATOR + "statics" + Constants.SF_FILE_SEPARATOR
+ "destimage" + Constants.SF_FILE_SEPARATOR;// 图片生成地址
/**
* 返回参数规范
*/
/** 区分类型 1 -- 无错误,Code重复 */
public static final String CODE_DUPLICATE = "1";
/** 区分类型 2 -- 无错误,名称重复 */
public static final String NAME_DUPLICATE = "2";
/** 区分类型 3 -- 数量超出 */
public static final String NUMBER_OVER = "3";
/** 区分类型 0 -- 无错误,程序正常执行 */
public static final String NO_ERROR = "0";
/** 区分类型 -1 -- 无错误,返回结果为空 */
public static final String NULL_POINTER = "-1";
/** 区分类型 -2 -- 错误,参数不正确 */
public static final String INCORRECT_PARAMETER = "-2";
/** 区分类型 -3 -- 错误,程序执行错误 */
public static final String PROGRAM_EXECUTION_ERROR = "-3";
/** 区分类型 -5 -- 错误,数据已删除 */
public static final String DATA_DELETED = "-5";
/** 区分类型 -6 -- 错误,参数不一致(验证码) */
public static final String DATA_NOT_SAME = "-6";
/**json文件缺失 */
public static final String NO_JSON_FILE = "-7";
/**
* 分页中可能用到的常量
*/
public static final Integer PAGE_SIZE=10;//一页共有十条内容
}
opencv工具类
package com.acts.opencv.common.utils;
import java.awt.image.BufferedImage;
import java.awt.image.DataBufferByte;
import java.io.IOException;
import java.util.Date;
import java.util.Vector;
import org.opencv.core.Mat;
import org.opencv.core.MatOfByte;
import org.opencv.core.MatOfPoint;
import org.opencv.core.Point;
import org.opencv.core.Size;
import org.opencv.highgui.Highgui;
import org.opencv.imgproc.Imgproc;
public class OpenCVUtil {
public static BufferedImage covertMat2Buffer(Mat mat) throws IOException {
long time1 = new Date().getTime();
// Mat 转byte数组
BufferedImage originalB = toBufferedImage(mat);
long time3 = new Date().getTime();
System.out.println("保存读取方法2转=" + (time3 - time1));
return originalB;
// ImageIO.write(originalB, "jpg", new File("D:\\test\\testImge\\ws2.jpg"));
}
public static byte[] covertMat2Byte(Mat mat) throws IOException {
long time1 = new Date().getTime();
// Mat 转byte数组
byte[] return_buff = new byte[(int) (mat.total() * mat.channels())];
Mat mat1 = new Mat();
mat1.get(0, 0, return_buff);
long time3 = new Date().getTime();
System.out.println(mat.total() * mat.channels());
System.out.println("保存读取方法2转=" + (time3 - time1));
return return_buff;
}
public static byte[] covertMat2Byte1(Mat mat) throws IOException {
long time1 = new Date().getTime();
MatOfByte mob = new MatOfByte();
Highgui.imencode(".jpg", mat, mob);
long time3 = new Date().getTime();
// System.out.println(mat.total() * mat.channels());
System.out.println("Mat转byte[] 耗时=" + (time3 - time1));
return mob.toArray();
}
public static BufferedImage toBufferedImage(Mat m) {
int type = BufferedImage.TYPE_BYTE_GRAY;
if (m.channels() > 1) {
type = BufferedImage.TYPE_3BYTE_BGR;
}
int bufferSize = m.channels() * m.cols() * m.rows();
byte[] b = new byte[bufferSize];
m.get(0, 0, b); // get all the pixels
BufferedImage image = new BufferedImage(m.cols(), m.rows(), type);
final byte[] targetPixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
System.arraycopy(b, 0, targetPixels, 0, b.length);
return image;
}
/**
* 腐蚀膨胀是针对于白色区域来说的,腐蚀即腐蚀白色区域
* 腐蚀算法(黑色区域变大)
* @param source
* @return
*/
public static Mat eroding(Mat source) {
return eroding(source, 1);
}
public static Mat eroding(Mat source, double erosion_size) {
Mat resultMat = new Mat(source.rows(), source.cols(), source.type());
Mat element = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(2 * erosion_size + 1,
2 * erosion_size + 1));
Imgproc.erode(source, resultMat, element);
return resultMat;
}
/**
* 腐蚀膨胀是针对于白色区域来说的,膨胀是膨胀白色区域
* 膨胀算法(白色区域变大)
* @param source
* @return
*/
public static Mat dilation(Mat source) {
return dilation(source, 1);
}
/**
* 腐蚀膨胀是针对于白色区域来说的,膨胀是膨胀白色区域
* @Author 王嵩
* @param source
* @param dilationSize 膨胀因子2*x+1 里的x
* @return Mat
* @Date 2018年2月5日
* 更新日志
* 2018年2月5日 王嵩 首次创建
*
*/
public static Mat dilation(Mat source, double dilation_size) {
Mat resultMat = new Mat(source.rows(), source.cols(), source.type());
Mat element = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(2 * dilation_size + 1,
2 * dilation_size + 1));
Imgproc.dilate(source, resultMat, element);
return resultMat;
}
/**
* 轮廓识别,使用最外轮廓发抽取轮廓RETR_EXTERNAL,轮廓识别方法为CHAIN_APPROX_SIMPLE
* @param source 传入进来的图片Mat对象
* @return 返回轮廓结果集
*/
public static Vector findContours(Mat source) {
Mat rs = new Mat();
/**
* 定义轮廓抽取模式
*RETR_EXTERNAL:只检索最外面的轮廓;
*RETR_LIST:检索所有的轮廓,并将其放入list中;
*RETR_CCOMP:检索所有的轮廓,并将他们组织为两层:顶层是各部分的外部边界,第二层是空洞的边界;
*RETR_TREE:检索所有的轮廓,并重构嵌套轮廓的整个层次。
*/
int mode = Imgproc.RETR_EXTERNAL;
// int mode = Imgproc.RETR_TREE;
/**
* 定义轮廓识别方法
* 边缘近似方法(除了RETR_RUNS使用内置的近似,其他模式均使用此设定的近似算法)。可取值如下:
*CV_CHAIN_CODE:以Freeman链码的方式输出轮廓,所有其他方法输出多边形(顶点的序列)。
*CHAIN_APPROX_NONE:将所有的连码点,转换成点。
*CHAIN_APPROX_SIMPLE:压缩水平的、垂直的和斜的部分,也就是,函数只保留他们的终点部分。
*CHAIN_APPROX_TC89_L1,CV_CHAIN_APPROX_TC89_KCOS:使用the flavors of Teh-Chin chain近似算法的一种。
*LINK_RUNS:通过连接水平段的1,使用完全不同的边缘提取算法。使用CV_RETR_LIST检索模式能使用此方法。
*/
int method = Imgproc.CHAIN_APPROX_SIMPLE;
Vector contours = new Vector();
Imgproc.findContours(source, contours, rs, mode, method, new Point());
return contours;
}
}