透视变换-鸟瞰图

原理介绍

仿射变换

仿射变换是一种常用的图像几何变换方法,它能够将平行四边形 ABCD 变换为另一个平行四边形 ABCD
透视变换-鸟瞰图_第1张图片
其仿射变换的数学表达式为

xyw=[m11m21m12m22m13m23]xyw
其中矩阵 [m11m21m12m22] 是旋转矩阵, [m13m23] 是平移向量。
M=[m11m21m12m22m13m23]
欲求得矩阵M,则需要六个独立的方程,即需要3组不共线的点才能求解。

透视变换

透视变换又叫做投影映射,是将图像投影到另一个平面而形成的像,其中不要求投影平面与图像平面互相平行。由此,得到的投影图像将可能不会再是平行四边形,而有可能出现梯形。
透视变换-鸟瞰图_第2张图片
其数学表达式为:

xyw=h11h21h31h12h22h32h13h23h33uv1

其中,变换矩阵可以分为四个部分, [h11h21h12h22] ,表示线性变换,比如scaling,shearing,ratotion, [h13h23] 表示平移变换, [h31h32] 产生透视变换。
并且有透视变换图像的实际坐标为方程组为:
x=xw=h11u+h12v+h13h31u+h32v+h33y=yw=h21u+h22v+h23h31u+h32v+h33

由此可知,透视变换需要用4组对应点去求解其变换矩阵。


opencv相关函数介绍

仿射变换

void cv::warpAffine ( InputArray src,
OutputArray dst,
InputArray M,
Size dsize,
int flags = INTER_LINEAR,
int borderMode = BORDER_CONSTANT,
const Scalar & borderValue = Scalar()
)
仿射变换实现函数
重要参数介绍
M-变换需要的2X3矩阵
dsize-输出图像的大小
flags-图像插值方法
三个主要参数为:

INTER_NEAREST nearest neighbor interpolation
INTER_LINEAR bilinear interpolation
WARP_INVERSE_MAP flag, inverse transformation

其中第三个参数表示将M变换为其逆矩阵 M1 来计算
flag is not set : dst(u,v)=src(x,y)
flag is set: dst(x,y)=src(u,v)


Mat cv::getAffineTransform ( const Point2f src[],const Point2f dst[])
函数的功能是在输入三组对应点计算出相应的仿射变换矩阵


透视变换
void cv::warpPerspective ( InputArray src,
OutputArray dst,
InputArray M,
Size dsize,
int flags = INTER_LINEAR,
int borderMode = BORDER_CONSTANT,
const Scalar & borderValue = Scalar()
)
透视变换生成函数
参数与仿射变换的参数含义基本一致


Mat cv::getPerspectiveTransform ( const Point2f src[],
const Point2f dst[]
)
输入4个对应的参数计算透视变换所需的矩阵

鸟瞰图

在机器人导航的一项中常见的工作就是将机器人场景的摄像机视图转换到从上到下的“俯视”视图,即鸟瞰图
其算法的基本流程如下
透视变换-鸟瞰图_第3张图片
根据学习opencv书上的源码改写的opencv3的源码如下

//@vs2013
//opencv 3.00
//函数实现鸟瞰图变换,但是存在问题,并不是所有的图片的都能转化为平面图,目前还没有完全搞清楚原因所在
//
#include
using namespace cv;
using namespace std;
int main(int argc, char** argv)
{
    if (argc != 5)
        return false;
    Mat birdImage;
    int board_w = atoi(argv[1]);
    int board_h = atoi(argv[2]);
    int board_n = board_w*board_h;
    Size board_sz = Size(board_w, board_h);
    FileStorage fs(argv[3], FileStorage::READ);
    Mat intrinsicMatrix, distortionCoeffs;
    fs["intrinsicMatrix"] >> intrinsicMatrix;
    fs["distortionCoeffs"] >> distortionCoeffs;
    fs.release();
    cout << intrinsicMatrix << "\n" << distortionCoeffs << endl;
    Mat srcImage = imread(argv[4], 1);
    if (srcImage.empty())
    {
        cout << "Error:Could`t load" << endl;
        return false;
    }
    Mat grayImage;
    //cvtColor(srcImage, grayImage, CV_BGR2GRAY);
    //undistort image
    //
    Mat R;
        //= Mat::zeros(Size(3,3),CV_32FC1);
    Mat newCamaraMatrix = intrinsicMatrix.clone();
    Mat mapx, mapy;
    //去除镜头畸变
    initUndistortRectifyMap(intrinsicMatrix,
        distortionCoeffs,
        R,
        newCamaraMatrix,
        srcImage.size(),
        CV_32FC1, mapx, mapy);
    //Mat undisImage;
    Mat t = srcImage.clone();
    remap(t, srcImage, mapx, mapy, INTER_NEAREST);
    cvtColor(srcImage, grayImage, CV_BGR2GRAY);
    namedWindow("The original image");
    imshow("The original image", t);
    t.release();
    namedWindow("The undistort image");

    namedWindow("grayImage");
    imshow("grayImage", grayImage);
    waitKey(100);
    vector corners;
    //寻找4组对应点坐标
    bool found = findChessboardCorners(srcImage, board_sz, corners);
    if (found == 1)
    {
        cornerSubPix(grayImage, corners, Size(5, 5), Size(-1, -1),
            TermCriteria(TermCriteria::COUNT + TermCriteria::EPS, 30, 0.1));
    }
    cout << corners << endl;
    drawChessboardCorners(srcImage, board_sz, corners, found);
    namedWindow("chess");
    imshow("chess", srcImage);
    Point2f imagePoints[4], objectPoints[4];
    objectPoints[0].x = 0;  objectPoints[0].y = 0;
    objectPoints[1].x = board_w - 1;    objectPoints[1].y = 0;
    objectPoints[2].x = 0;  objectPoints[2].y = board_h-1;
    objectPoints[3].x = board_w-1;  objectPoints[3].y = board_h-1;
    imagePoints[0] = corners.at(0);
    imagePoints[1] = corners.at(board_w - 1);
    imagePoints[2] = corners.at(board_w*(board_h - 1));
    imagePoints[3] = corners.at(board_n - 1);
    circle(srcImage, imagePoints[0], 5, Scalar(0, 0, 255),-1);
    circle(srcImage, imagePoints[1], 5, Scalar(0, 255, 0), -1);
    circle(srcImage, imagePoints[2], 5, Scalar(255, 0, 0), -1);
    circle(srcImage, imagePoints[3], 5, Scalar(255, 255, 255), -1);
    cout << objectPoints[3] << imagePoints[3] << endl;
    Mat H;
    //计算单应矩阵
    /*计算obj到img的单应矩阵而不是直接计算img到obj单应矩阵的主要原因是可控转换后输入图像的大小
    其控制参数即为h33,经过MATLAB的运算可知h33与坐标成线性单增关系,从而能保证图像不失真*/
    H = getPerspectiveTransform(objectPoints,imagePoints);
    /*H = getPerspectiveTransform(imagePoints, objectPoints);*/
    H.at<double>(2,2) = 25;//控制图像大小参数
    Mat a=H.inv();
    cout << H << "\n"</*birdImage = srcImage.clone();*/
    warpPerspective(srcImage, birdImage, H, srcImage.size() , WARP_INVERSE_MAP+INTER_LINEAR);
    /*Mat M(Size(3, 2), CV_32FC1);
    M.at(0, 0) = 1;  M.at(0, 1) = 0;  M.at(0, 2) = 0;
    M.at(1, 0) = 0;  M.at(1, 1) = 1;  M.at(1, 2) = 50;
    cout << "M" << M << endl;
    Mat bird = birdImage.clone();
    warpAffine(bird, birdImage, M, bird.size());*/
    /*warpPerspective(srcImage, birdImage, H, srcImage.size());*/
    /*Mat tempBirdImage = birdImage.clone();
    Mat M = Mat::ones(Size(3, 2), CV_32FC1);
    M.at(0, 2) = 100;
    cout << M;
    warpAffine(tempBirdImage, birdImage, M, tempBirdImage.size());*/
    namedWindow("birdImage");
    imshow("birdImage", birdImage);
    imshow("The undistort image", srcImage);
    waitKey(0);
    return 0;
}

实现的效果图:
原图
透视变换-鸟瞰图_第4张图片
畸变矫正图
透视变换-鸟瞰图_第5张图片
鸟瞰图
透视变换-鸟瞰图_第6张图片

备注

此程序有问题有的图像不能得到足够大小的鸟瞰图,即使适当调整h33参数
如下
畸变矫正图
透视变换-鸟瞰图_第7张图片
鸟瞰图(h33=1的情况下得到)
透视变换-鸟瞰图_第8张图片

你可能感兴趣的:(计算机视觉)