Image warping (using opencv findHomography, warpPerspective)

fig 1. Left: set 4 points (Left Top, Right Top, Right Bottom, Left Bottom), right:warped image to (0,0) (300,0), (300,300), (0,300)


Firstly, we have to know Homography matrix for image warping.
A homography matrix is that the converting matrix can transform from A plane to B plane in 3D space.
See more detail about Homography in here
http://en.wikipedia.org/wiki/Homography_%28computer_vision%29


So, as the above equation, H matrix convert A matrix to B matrix.
In here, A is left, B is right 4 points in fig 1.

In OpenCV function, findHomography function gives H matrix.
Input parameter is findHomography(A, B). Do not confuse.

After get H matrix, we can warp image using various transform functions in opencv.
In this example, I use warpPerspective function, because rectangle shape is a trapezoidal model.
Input parameter is warpPerspective(Origin_mage, warped_image, H, cv::Size(cols, rows));

see the test video of this example source code in here


In source code, actually to get homography and warping part is 88 ~ 108 lines.
And 109~142 lines are the part for calculated value confirm.
Left code is for interface and selection point ordering.
About interface and 4 points ordering refer to this page
http://study.marearts.com/2015/03/any-4-points-odering-by-lefttop.html

...
#include < opencv2\opencv.hpp>  
#include < string>  
#include < stdio.h>  

#ifdef _DEBUG          
#pragma comment(lib, "opencv_core249d.lib")  
#pragma comment(lib, "opencv_imgproc249d.lib")   //MAT processing  
#pragma comment(lib, "opencv_highgui249d.lib")
#pragma comment(lib, "opencv_calib3d249d.lib") 
#else  
#pragma comment(lib, "opencv_core249.lib")  
#pragma comment(lib, "opencv_highgui249.lib")
#pragma comment(lib, "opencv_calib3d249.lib") 
#endif     


using namespace std;  
using namespace cv;  

static void onMouse( int event, int x, int y, int, void* );
Point2f roi4point[4]={0,};
int roiIndex=0;
bool oksign = false;

Point2f MinDistFind(float x, float y, Point2f* inPoints);
void PointOrderbyConner(Point2f* inPoints, int w, int h );

int main()  
{  
 //image loading
 //char fileName[100] = "./road-ahead.jpg";
 char fileName[100] = "./chess.jpg";

 //origin
 Mat GetImg = imread( fileName );
 //copy for drawing
 Mat RoiImg;
 
 //window
 namedWindow( "set roi by 4 points", 0 );  

 //mouse callback
 setMouseCallback( "set roi by 4 points", onMouse, 0 );  
 
 //point selection until 4 points setting
 while(1)
 {

  if(oksign == true) //right button click
   break;

  //draw point
  RoiImg = GetImg.clone();
  for(int i=0; i< roiIndex; ++i)
   circle(RoiImg, roi4point[i], 5,CV_RGB(255,0,255),5);
  imshow("set roi by 4 points", RoiImg);
    
  waitKey(10);
 }



 printf("points ordered by LT, RT, RB, LB \n");
 PointOrderbyConner(roi4point, GetImg.size().width,  GetImg.size().height);
 for(int i=0; i< 4; ++i)
 {
  printf("[%d] (%.2lf, %.2lf) \n",i, roi4point[i].x, roi4point[i].y );
 }


 //drwaring
 RoiImg = GetImg.clone();
 string TestStr[4]={"LT","RT","RB","LB"};  
 putText(RoiImg, TestStr[0].c_str(), roi4point[0], CV_FONT_NORMAL, 1, Scalar(0,0,255),3);
 circle(RoiImg, roi4point[0], 3,CV_RGB(0,0,255));
 int i;
 for(i=1; i< roiIndex; ++i)
 {
  line(RoiImg, roi4point[i-1], roi4point[i], CV_RGB(255,0,0),1 );
  circle(RoiImg, roi4point[i], 1,CV_RGB(0,0,255),3);  
  putText(RoiImg, TestStr[i].c_str(), roi4point[i], CV_FONT_NORMAL, 1, Scalar(0,0,255),3);
 }

 line(RoiImg, roi4point[0], roi4point[i-1], CV_RGB(255,0,0),1 );
 imshow("set roi by 4 points2", RoiImg);


 //prepare to get homography matrix
 vector< Point2f> P1; //clicked positions
 vector< Point2f> P2(4); //user setting positions
 for(int i=0; i< 4; ++i)
  P1.push_back( roi4point[i] );

 //user setting position
 P2[0].x = 0; P2[0].y = 0; 
 P2[1].x = 300; P2[1].y = 0; 
 P2[2].x = 300; P2[2].y = 300; 
 P2[3].x = 0; P2[3].y = 300; 

 //get homography
 Mat H = findHomography(P1, P2);

 //warping
 Mat warped_image;
 warpPerspective(GetImg, warped_image, H,cv::Size(GetImg.cols, GetImg.rows));
 rectangle(warped_image, Point(0,0), Point(300,300), CV_RGB(255,0,0) );
 imshow("warped_image", warped_image);
 

 ///////////////////////////
 //calculation confirm
 cout << "h" << endl << H << endl;
 cout << "size rows and cols " << H.rows << " " << H.cols << endl;

 Mat A(3,4,CV_64F); //3xN, P1
 Mat B(3,4,CV_64F); //3xN, P2
 //B = H*A  (P2 = h(P1))


 for(int i=0; i< 4; ++i)
 {
  A.at< double>(0,i) = P1[i].x;
  A.at< double>(1,i) = P1[i].y;
  A.at< double>(2,i) = 1;
  

  B.at< double>(0,i) = P2[i].x;
  B.at< double>(1,i) = P2[i].y;
  B.at< double>(2,i) = 1;
 }

 cout << "a" << endl << A << endl;
 cout << "b" << endl << B << endl;
 Mat HA = H*A;
 
for(int i=0; i< 4; ++i)
 {
  HA.at< double>(0,i) /= HA.at< double>(2,i);
  HA.at< double>(1,i) /= HA.at< double>(2,i);
  HA.at< double>(2,i) /= HA.at< double>(2,i);
 }

 cout << "HA" << endl << HA << endl;

 waitKey(0);
}  

void PointOrderbyConner(Point2f* inPoints, int w, int h )
{

 vector< pair< float, float> > s_point;
 for(int i=0; i< 4; ++i)
  s_point.push_back( make_pair(inPoints[i].x, inPoints[i].y) );

 //sort
 sort(s_point.begin(), s_point.end(), [](const pair< float, float>& A, const pair< float, float>& B){ return A.second < B.second; } );

 if( s_point[0].first < s_point[1].first )
 {
  inPoints[0].x = s_point[0].first;
  inPoints[0].y = s_point[0].second;

  inPoints[1].x = s_point[1].first;
  inPoints[1].y = s_point[1].second;

 }else{
  inPoints[0].x = s_point[1].first;
  inPoints[0].y = s_point[1].second;

  inPoints[1].x = s_point[0].first;
  inPoints[1].y = s_point[0].second;
 }

 if( s_point[2].first > s_point[3].first )
 {
  inPoints[2].x = s_point[2].first;
  inPoints[2].y = s_point[2].second;

  inPoints[3].x = s_point[3].first;
  inPoints[3].y = s_point[3].second;

 }else{
  inPoints[2].x = s_point[3].first;
  inPoints[2].y = s_point[3].second;

  inPoints[3].x = s_point[2].first;
  inPoints[3].y = s_point[2].second;
 }

  

}


static void onMouse( int event, int x, int y, int, void* )  
{  
 
 
    if( event == CV_EVENT_LBUTTONDOWN && oksign==false)
 {
  //4 point select
  if(roiIndex>=4)
  {
   roiIndex=0;  
   for(int i=0; i< 4; ++i)
    roi4point[i].x = roi4point[i].y =0;
  }

  roi4point[roiIndex].x = x;
  roi4point[roiIndex].y = y;

  //point coordinate print
  printf("-(%..2lf,%.2lf), 2:(%.2lf,%.2lf), 3:(%.2lf,%.2lf), 4:(%.2lf,%.2lf)\n",  
   roi4point[0].x, roi4point[0].y,roi4point[1].x, roi4point[1].y,roi4point[2].x, roi4point[2].y,roi4point[3].x, roi4point[3].y );  
  
  roiIndex++;
 }

 if(event == CV_EVENT_RBUTTONDOWN)
 {
  //set point.
  if(roiIndex == 4)
  {
   oksign = true;
   printf("Warping Start!!!\n");
  }
 }

 
 
}  


///

This is matlab source code for confirm.
x1 is clicked 4 point in opencv(I did value copy into matlab), matlabH is calculated by homography2d function. (refer to peter homepage for this function detail http://www.csse.uwa.edu.au/~pk/Research/MatlabFns/index.html)
x2 is calculate exactly when matlabH*x1.

I try in opencv with same values of x1, x2.
opencvH is calculated value from opencv source code.
Value is slightly different. Because scaling, OpenCV H and Matlab H will be same when (3,3) value will be divided by equal to 1.



clc;
clear all;

x1 =[259 126 1; 566 222 1; 400 473 1; 33 305 1]';
x2 =[0 0 1; 300 0 1; 300 300 1; 0 300 1]';

matlabH = homography2d(x1, x2)


matlab_X2= matlabH*x1;
matlab_X2(:,1) = matlab_X2(:,1)/matlab_X2(3,1);
matlab_X2(:,2) = matlab_X2(:,2)/matlab_X2(3,2);
matlab_X2(:,3) = matlab_X2(:,3)/matlab_X2(3,3);
matlab_X2(:,4) = matlab_X2(:,4)/matlab_X2(3,4);

matlab_X2





opencvH = [1.021877004679779, 1.290191078534245, -427.2302201073777;
  -0.6109166533338892, 1.953660547640664, -87.93381578924605;
  5.540800373074552e-006, 0.002051557898988468, 1]

opencv_x2 = opencvH * x1;
opencv_x2(:,1) = opencv_x2(:,1)/opencv_x2(3,1);
opencv_x2(:,2) = opencv_x2(:,2)/opencv_x2(3,2);
opencv_x2(:,3) = opencv_x2(:,3)/opencv_x2(3,3);
opencv_x2(:,4) = opencv_x2(:,4)/opencv_x2(3,4);
opencv_x2



...

Comments

Popular posts from this blog

OpenCV Stitching example (Stitcher class, Panorama)

(OpenCV Study) Background subtractor MOG, MOG2, GMG example source code (BackgroundSubtractorMOG, BackgroundSubtractorMOG2, BackgroundSubtractorGMG)

Example source code of extract HOG feature from images, save descriptor values to xml file, using opencv (using HOGDescriptor )

Real-time N camera stitching Class.

8 point algorithm (Matlab source code) / The method to get the Fundamental Matrix and the Essential matrix

Optical Flow sample source code using OpenCV

Video Stabilization example source code, (using cvFindHomography, cvWarpPerspective functions in openCV)

(OpenCV Study) calcOpticalFlowFarneback example source code ( dense optical flow )

yuv422(YUYV) to RGB and RGB to yuv422(YUYV), (Using OpenCV and TBB)

OpenCV Drawing Example, (line, circle, rectangle, ellipse, polyline, fillConvexPoly, putText, drawContours)