5/01/2014

(OpenCV Study) setMouseCallback function example source code, get rectangle coordinate of mouse drag on image.

This  source code is useful when you need retangle coordinate of mouse drag.
The rectangle coordinate is applied initial region for tracking.

refer to video and source code.

#include < stdio.h>
#include < iostream>

#include < opencv2\opencv.hpp>
//#include < opencv2/core/core.hpp>
//#include < opencv2/highgui/highgui.hpp>
//#include < opencv2/video/background_segm.hpp>
//#include < opencv2\gpu\gpu.hpp>
//#include < opencv2\legacy\legacy.hpp>

#ifdef _DEBUG        
#pragma comment(lib, "opencv_core247d.lib")
//#pragma comment(lib, "opencv_imgproc247d.lib")   //MAT processing
//#pragma comment(lib, "opencv_objdetect247d.lib") //HOGDescriptor
//#pragma comment(lib, "opencv_gpu247d.lib")
//#pragma comment(lib, "opencv_features2d247d.lib")
#pragma comment(lib, "opencv_highgui247d.lib")
//#pragma comment(lib, "opencv_ml247d.lib")
//#pragma comment(lib, "opencv_stitching247d.lib");
//#pragma comment(lib, "opencv_nonfree247d.lib");
//#pragma comment(lib, "opencv_video247d.lib")
//#pragma comment(lib, "opencv_legacy247d.lib")
#else
#pragma comment(lib, "opencv_core247.lib")
//#pragma comment(lib, "opencv_imgproc247.lib")
//#pragma comment(lib, "opencv_objdetect247.lib")
//#pragma comment(lib, "opencv_gpu247.lib")
//#pragma comment(lib, "opencv_features2d247.lib")
#pragma comment(lib, "opencv_highgui247.lib")
//#pragma comment(lib, "opencv_ml247.lib")
//#pragma comment(lib, "opencv_stitching247.lib");
//#pragma comment(lib, "opencv_nonfree247.lib");
//#pragma comment(lib, "opencv_video247.lib")
//#pragma comment(lib, "opencv_legacy247.lib")
#endif 

using namespace std;
using namespace cv;

bool selectObject = false;
Rect selection;
Point origin;
int trackObject = 0;
Mat image;


static void onMouse( int event, int x, int y, int, void* )
{
    if( selectObject )
    {
        selection.x = MIN(x, origin.x);
        selection.y = MIN(y, origin.y);
        selection.width = std::abs(x - origin.x);
        selection.height = std::abs(y - origin.y);

        selection &= Rect(0, 0, image.cols, image.rows);
    }

    switch( event )
    {
    case CV_EVENT_LBUTTONDOWN:
        origin = Point(x,y);
        selection = Rect(x,y,0,0);
        selectObject = true;
        break;
    case CV_EVENT_LBUTTONUP:
        selectObject = false;
        if( selection.width > 0 && selection.height > 0 )
            trackObject = -1;
        break;
    }
}


int main (void)  
{  
 

 VideoCapture cap(0);
 Mat frame;
 namedWindow( "Demo", 0 );
 setMouseCallback( "Demo", onMouse, 0 );

    for(;;)
    {
        
  cap >> frame;
        if( frame.empty() )
   break;

        frame.copyTo(image);
  

  if( selectObject && selection.width > 0 && selection.height > 0 )
        {
            Mat roi(image, selection);
            bitwise_not(roi, roi);
   printf("%d %d %d %d\n", selection.x, selection.y, selection.width, selection.height);
        }

  imshow( "Demo", image );

  if( waitKey(10) > 10 )
   break;
 }

 return 0;  
}  



4/28/2014

(OpenCV Study) the example source code for using FarnebackOpticalFlow function (dense opticlal flow, gpu function)

This is GPU version of this post.
http://feelmare.blogspot.kr/2014/04/opencv-study-calcopticalflowfarneback.html

In the GPU mode, the return value of the function is two vector direction, that is x direction and y direction.
In the GPU mode, functions of resize, cvtColor cannot copy to same variable of gpumat.
And gputmat cannot access to the pixel point using at(x,y).


GPU is more fast about 10 times than cpu mode in my computer environment.

refer to this example source code and video.




#include < stdio.h>
#include < iostream>

#include < opencv2\opencv.hpp>
#include < opencv2/core/core.hpp>
#include < opencv2/highgui/highgui.hpp>
#include < opencv2/video/background_segm.hpp>
#include < opencv2\gpu\gpu.hpp>

#ifdef _DEBUG        
#pragma comment(lib, "opencv_core247d.lib")
#pragma comment(lib, "opencv_imgproc247d.lib")   //MAT processing
#pragma comment(lib, "opencv_objdetect247d.lib") //HOGDescriptor
#pragma comment(lib, "opencv_gpu247d.lib")
//#pragma comment(lib, "opencv_features2d247d.lib")
#pragma comment(lib, "opencv_highgui247d.lib")
#pragma comment(lib, "opencv_ml247d.lib")
//#pragma comment(lib, "opencv_stitching247d.lib");
//#pragma comment(lib, "opencv_nonfree247d.lib");
#pragma comment(lib, "opencv_video247d.lib")
#else
#pragma comment(lib, "opencv_core247.lib")
#pragma comment(lib, "opencv_imgproc247.lib")
#pragma comment(lib, "opencv_objdetect247.lib")
#pragma comment(lib, "opencv_gpu247.lib")
//#pragma comment(lib, "opencv_features2d247.lib")
#pragma comment(lib, "opencv_highgui247.lib")
#pragma comment(lib, "opencv_ml247.lib")
//#pragma comment(lib, "opencv_stitching247.lib");
//#pragma comment(lib, "opencv_nonfree247.lib");
#pragma comment(lib, "opencv_video247d.lib")
#endif 

using namespace cv;
using namespace std;


void drawOptFlowMap_gpu (const Mat& flow_x, const Mat& flow_y, Mat& cflowmap, int step, const Scalar& color) {

 

 for(int y = 0; y < cflowmap.rows; y += step)
        for(int x = 0; x < cflowmap.cols; x += step)
        {
   Point2f fxy; 
   fxy.x = cvRound( flow_x.at< float >(y, x) + x );
   fxy.y = cvRound( flow_y.at< float >(y, x) + y );
   
   line(cflowmap, Point(x,y), Point(fxy.x, fxy.y), color);
   circle(cflowmap, Point(fxy.x, fxy.y), 1, color, -1);
        }
}



int main()
{
 //resize scale
 int s=4;

 unsigned long AAtime=0, BBtime=0;

 //variables
 Mat GetImg, flow_x, flow_y, next, prvs;
 
 //gpu variable
 gpu::GpuMat prvs_gpu, next_gpu, flow_x_gpu, flow_y_gpu;
 gpu::GpuMat prvs_gpu_o, next_gpu_o;
 gpu::GpuMat prvs_gpu_c, next_gpu_c;

 //file name
 char fileName[100] = ".\\mm2.avi"; //Gate1_175_p1.avi"; //video\\mm2.avi"; //mm2.avi"; //cctv 2.mov"; //mm2.avi"; //";//_p1.avi";
 //video file open
 VideoCapture stream1(fileName);   //0 is the id of video device.0 if you have only one camera   
 if(!(stream1.read(GetImg))) //get one frame form video
  return 0;



 //////////////////////////////////////////////////////////////////////////////////////////////
 //resize(GetImg, prvs, Size(GetImg.size().width/s, GetImg.size().height/s) );
 //cvtColor(prvs, prvs, CV_BGR2GRAY);
 //prvs_gpu.upload(prvs);
 //////////////////////////////////////////////////////////////////////////////////////////////
 //gpu upload, resize, color convert
 prvs_gpu_o.upload(GetImg);
 gpu::resize(prvs_gpu_o, prvs_gpu_c, Size(GetImg.size().width/s, GetImg.size().height/s) );
 gpu::cvtColor(prvs_gpu_c, prvs_gpu, CV_BGR2GRAY);
 /////////////////////////////////////////////////////////////////////////////////////////////

 //dense optical flow
 gpu::FarnebackOpticalFlow fbOF;

 //unconditional loop   
 while (true) {   
  
  if(!(stream1.read(GetImg))) //get one frame form video   
   break;

  ///////////////////////////////////////////////////////////////////
  //resize(GetImg, next, Size(GetImg.size().width/s, GetImg.size().height/s) );
  //cvtColor(next, next, CV_BGR2GRAY);
  //next_gpu.upload(next);
  ///////////////////////////////////////////////////////////////////
  //gpu upload, resize, color convert
  next_gpu_o.upload(GetImg);
  gpu::resize(next_gpu_o, next_gpu_c, Size(GetImg.size().width/s, GetImg.size().height/s) );
  gpu::cvtColor(next_gpu_c, next_gpu, CV_BGR2GRAY);
  ///////////////////////////////////////////////////////////////////

  AAtime = getTickCount();
  //dense optical flow
  fbOF.operator()(prvs_gpu, next_gpu, flow_x_gpu, flow_y_gpu);
  //fbOF(prvs_gpu, next_gpu, flow_x_gpu, flow_y_gpu);
  BBtime = getTickCount();
  float pt = (BBtime - AAtime)/getTickFrequency();
  float fpt = 1/pt;
  printf("%.2lf / %.2lf \n",  pt, fpt );

  //copy for vector flow drawing
  Mat cflow;
  resize(GetImg, cflow, Size(GetImg.size().width/s, GetImg.size().height/s) );
  flow_x_gpu.download( flow_x );
  flow_y_gpu.download( flow_y );
  drawOptFlowMap_gpu(flow_x, flow_y, cflow, 10 , CV_RGB(0, 255, 0));
  imshow("OpticalFlowFarneback", cflow);

  ///////////////////////////////////////////////////////////////////
  //Display gpumat
  next_gpu.download( next );
  prvs_gpu.download( prvs );
  imshow("next", next );
  imshow("prvs", prvs );

  //prvs mat update
  prvs_gpu = next_gpu.clone();
  
  if (waitKey(5) >= 0)   
   break;
 }
}