Showing posts with label FarnebackOpticalFlow. Show all posts
Showing posts with label FarnebackOpticalFlow. Show all posts

5/28/2015

OpenCV 3.0 rc1, FarnebackOpticalFlow(dense optical flow) example source code.

This article introduces FarnebackOpticalFlow example in opencv 3.0 rc1.
FarnebackOpticalFlow class also was changed dll name to reference.

This example is similar with here(http://study.marearts.com/2014/04/opencv-study-example-source-code-for.html).


This example will help what dll and header file need to use FarnebackOpticalFlow  class.

Required Dlls are follows :
(tbb.dll is my option)

And refer to this example.

#include < iostream>  
#include < opencv2\opencv.hpp>  
#include < opencv2\highgui.hpp>  
//#include < opencv2\imgcodecs.hpp>  
#include < opencv2\videoio.hpp> 
#include < opencv2\core\cuda.hpp>
#include < opencv2\imgproc.hpp>
#include < opencv2\cudawarping.hpp>
#include < opencv2\cudaimgproc.hpp>
//#include < opencv2\cudaarithm.hpp>
#include < opencv2\cudaoptflow.hpp>


#ifdef _DEBUG             
#pragma comment(lib, "opencv_core300d.lib")     
#pragma comment(lib, "opencv_highgui300d.lib")  
//#pragma comment(lib, "opencv_imgcodecs300d.lib")  //imread
#pragma comment(lib, "opencv_videoio300d.lib") //video capture
#pragma comment(lib, "opencv_imgproc300d.lib") //line, circle
#pragma comment(lib, "opencv_cudawarping300d.lib") //cuda::resize
#pragma comment(lib, "opencv_cudaimgproc300.lib") //cuda::cvtcolor
#pragma comment(lib, "opencv_cudaarithm300d.lib") //cuda::farnebackOpticalFlow
#pragma comment(lib, "opencv_cudaoptflow300d.lib") 
#else     
#pragma comment(lib, "opencv_core300.lib")     
#pragma comment(lib, "opencv_highgui300.lib")  
//#pragma comment(lib, "opencv_imgcodecs300.lib")  //imread
#pragma comment(lib, "opencv_videoio300.lib") //video capture
#pragma comment(lib, "opencv_imgproc300.lib") // //line, circle
#pragma comment(lib, "opencv_cudawarping300.lib") //cuda::resize
#pragma comment(lib, "opencv_cudaimgproc300.lib") //cuda::cvtcolor
#pragma comment(lib, "opencv_cudaarithm300.lib") //cuda::farnebackOpticalFlow
#pragma comment(lib, "opencv_cudaoptflow300.lib") 

#endif      

using namespace std;
using namespace cv;

void drawOptFlowMap_gpu(const Mat& flow_xy, Mat& cflowmap, int step, const Scalar& color);

int main()
{

 int s = 1;

 unsigned long AAtime = 0, BBtime = 0;

 //variables  
 Mat GetImg, flow_x, flow_y, next, prvs, flow_xy;

 //gpu variable  
 cuda::GpuMat prvs_gpu, next_gpu, flow_x_gpu, flow_y_gpu, flow_xy_gpu;
 cuda::GpuMat prvs_gpu_o, next_gpu_o;
 cuda::GpuMat prvs_gpu_c, next_gpu_c;

 //file name  
 char fileName[100] = "M:\\____videoSample____\\Rendering\\Wildlife.avi";
 //video file open  
 VideoCapture stream1(fileName);   //0 is the id of video device.0 if you have only one camera
 if (!(stream1.read(GetImg))) //get one frame form video
  return 0;

 //gpu upload, resize, color convert
 prvs_gpu_o.upload(GetImg);

 
 cuda::resize(prvs_gpu_o, prvs_gpu_c, Size(GetImg.size().width / s, GetImg.size().height / s));
 cuda::cvtColor(prvs_gpu_c, prvs_gpu, CV_BGR2GRAY);

 //dense optical flow
 Ptr< cuda::FarnebackOpticalFlow > fbOF = cuda::FarnebackOpticalFlow::create();

 

 //unconditional loop
 while (true) {

  if (!(stream1.read(GetImg))) //get one frame form video     
   break;

   ///////////////////////////////////////////////////////////////////  
  //gpu upload, resize, color convert  
  next_gpu_o.upload(GetImg);
  cuda::resize(next_gpu_o, next_gpu_c, Size(GetImg.size().width / s, GetImg.size().height / s));
  cuda::cvtColor(next_gpu_c, next_gpu, CV_BGR2GRAY);
  ///////////////////////////////////////////////////////////////////  

  AAtime = getTickCount();
  //dense optical flow  
  fbOF->calc(prvs_gpu, next_gpu, flow_xy_gpu);

  BBtime = getTickCount();
  float pt = (BBtime - AAtime) / getTickFrequency();
  float fpt = 1 / pt;
  printf("%.2lf / %.2lf \n", pt, fpt);

  //copy for vector flow drawing  
  Mat cflow;
  resize(GetImg, cflow, Size(GetImg.size().width / s, GetImg.size().height / s));  
  flow_xy_gpu.download(flow_xy);
  drawOptFlowMap_gpu(flow_xy, cflow, 10, CV_RGB(0, 255, 0));
  imshow("OpticalFlowFarneback", cflow);

  ///////////////////////////////////////////////////////////////////  
  //Display gpumat  
  next_gpu.download(next);
  prvs_gpu.download(prvs);
  imshow("next", next);
  imshow("prvs", prvs);

  //prvs mat update  
  prvs_gpu = next_gpu.clone();

  if (waitKey(5) >= 0)
   break;
 }


 return 0;
}

void drawOptFlowMap_gpu(const Mat& flow_xy, Mat& cflowmap, int step, const Scalar& color)
{

 for (int y = 0; y < cflowmap.rows; y += step)
  for (int x = 0; x < cflowmap.cols; x += step)
  {
   Point2f fxy;
   fxy.x = cvRound(flow_xy.at< Vec2f >(y, x)[0] + x);
   fxy.y = cvRound(flow_xy.at< Vec2f >(y, x)[1] + y);


   cv::line(cflowmap, Point(x, y), Point(fxy.x, fxy.y), color);
   cv::circle(cflowmap, Point(fxy.x, fxy.y), 1, color, -1);

  }

}




4/27/2015

To categorize big, middle, small of camera movement using k-means

Mr. Juan ask me that to categorize camera movement rate into big, middle, small.
He is researching to find meaningful scene in endoscopic video.

His one of approach is using optical flow.
refer to this page.
http://study.marearts.com/2015/01/the-method-to-check-that-camera-is.html
Dense optical flow can know how much video moving.
The source code of reference page checked camera move or not by the percentage of movement.
There are 2 threshold values.
Count number of moved pixels, this is counted when pixels moved over than threshold-1 value.
And check whether the counted pixels is over than threshold-2 percent or not in all pixels.

By the way, he ask me to separate video moving by big, middle, small using k-means clustering algorithm without threshold.
In past, I introduced the usage of k-mean algorithm using openCV.
See this page.
http://study.marearts.com/search/label/K-means


This work is separated by 3 steps.
Step 1 is to calculate movement rate in video using optical flow.
And save fame and movement rate information to txt file.

Step 2 is clustering by 3 class degree movement using k-means.
read step 1 file and write clustering information to txt file.

Step 3 is for display.
read step 3 txt file and display moving rate on the video.

refer to these source code.
Thank you.

Step 1.
Getting movement rate of frames.


moving rate is calculated.

...
#include < stdio.h>  

#include < opencv2\opencv.hpp>  
#include < opencv2/core/core.hpp>  
#include < opencv2/highgui/highgui.hpp>  
#include < opencv2\gpu\gpu.hpp>  
#include < opencv2\nonfree\features2d.hpp >      



#ifdef _DEBUG          
#pragma comment(lib, "opencv_core249d.lib")  
#pragma comment(lib, "opencv_imgproc249d.lib")   //MAT processing  
#pragma comment(lib, "opencv_objdetect249d.lib") //HOGDescriptor  
#pragma comment(lib, "opencv_gpu249d.lib")  
#pragma comment(lib, "opencv_features2d249d.lib")  
#pragma comment(lib, "opencv_highgui249d.lib")  
#else  
#pragma comment(lib, "opencv_core249.lib")  
#pragma comment(lib, "opencv_imgproc249.lib")  
#pragma comment(lib, "opencv_objdetect249.lib")  
#pragma comment(lib, "opencv_gpu249.lib")  
#pragma comment(lib, "opencv_features2d249.lib")  
#pragma comment(lib, "opencv_highgui249.lib")  
#endif   

using namespace std;  
using namespace cv;  

#define WIDTH_DENSE (80)  
#define HEIGHT_DENSE (60)  

#define DENSE_DRAW 0 //dense optical flow arrow drawing or not  
#define GLOBAL_MOTION_TH1 1  
#define GLOBAL_MOTION_TH2 70  


float drawOptFlowMap_gpu (const Mat& flow_x, const Mat& flow_y, Mat& cflowmap, int step, float scaleX, float scaleY, int drawOnOff);  


int main()  
{  
 //stream /////////////////////////////////////////////////  
 VideoCapture stream1("M:\\____videoSample____\\medical\\HUV-03-14.wmv");

 //variables /////////////////////////////////////////////  
 Mat O_Img; //Mat  
 gpu::GpuMat O_Img_gpu; //GPU  
 gpu::GpuMat R_Img_gpu_dense; //gpu dense resize  
 gpu::GpuMat R_Img_gpu_dense_gray_pre; //gpu dense resize gray  
 gpu::GpuMat R_Img_gpu_dense_gray; //gpu dense resize gray  
 gpu::GpuMat flow_x_gpu, flow_y_gpu;  
 Mat flow_x, flow_y;  

 //algorithm *************************************  
 //dense optical flow  
 gpu::FarnebackOpticalFlow fbOF;  


 //running once //////////////////////////////////////////  
 if(!(stream1.read(O_Img))) //get one frame form video  
 {  
  printf("Open Fail !!\n");  
  return 0;   
 }  

 //for rate calucation  
 float scaleX, scaleY;  
 scaleX = O_Img.cols/WIDTH_DENSE;  
 scaleY = O_Img.rows/HEIGHT_DENSE;  

 O_Img_gpu.upload(O_Img);   
 gpu::resize(O_Img_gpu, R_Img_gpu_dense, Size(WIDTH_DENSE, HEIGHT_DENSE));  
 gpu::cvtColor(R_Img_gpu_dense, R_Img_gpu_dense_gray_pre, CV_BGR2GRAY);  


 //////////////////////////////////////////////////////////
 FILE *fp = fopen("DataOutput.txt","w");

 //unconditional loop   ///////////////////////////////////  
 int frame=0;
 int untilFrame=1000;
 while (true) {  
  frame++;
  if(frame>untilFrame)  //stop point.
   break;

  //reading  
  if( stream1.read(O_Img) == 0) //get one frame form video     
   break;  

  // ---------------------------------------------------  
  //upload cou mat to gpu mat  
  O_Img_gpu.upload(O_Img);   
  //resize  
  gpu::resize(O_Img_gpu, R_Img_gpu_dense, Size(WIDTH_DENSE, HEIGHT_DENSE));  
  //color to gray  
  gpu::cvtColor(R_Img_gpu_dense, R_Img_gpu_dense_gray, CV_BGR2GRAY);  

  //calculate dense optical flow using GPU version  
  fbOF.operator()(R_Img_gpu_dense_gray_pre, R_Img_gpu_dense_gray, flow_x_gpu, flow_y_gpu);  
  flow_x_gpu.download( flow_x );  
  flow_y_gpu.download( flow_y );  


  //calculate motion rate in whole image  
  float motionRate = drawOptFlowMap_gpu(flow_x, flow_y, O_Img, 1, scaleX, scaleY, DENSE_DRAW);  
  //update pre image  
  R_Img_gpu_dense_gray_pre = R_Img_gpu_dense_gray.clone();  



  //display "moving rate (0~100%)" and save to txt with frame
  char TestStr[100];  
  sprintf(TestStr, "%.2lf %% moving", motionRate);
  putText(O_Img, TestStr, Point(30,60), CV_FONT_NORMAL, 1, Scalar(255,255,255),2,2); //OutImg is Mat class;     
  
  //output "frame, motionRate" to txt
  fprintf(fp,"%d %.2lf\n", frame, motionRate);

  // show image ----------------------------------------  
  imshow("Origin", O_Img);     

  // wait key  
  if( cv::waitKey(100) > 30)  
   break;  
 }  

 fclose(fp);
}  



float drawOptFlowMap_gpu (const Mat& flow_x, const Mat& flow_y, Mat& cflowmap, int step, float scaleX, float scaleY, int drawOnOff)  
{  
 double count=0;  

 float countOverTh1 = 0;  
 int sx,sy;  
 for(int y = 0; y < HEIGHT_DENSE; y += step)  
 {  
  for(int x = 0; x < WIDTH_DENSE; x += step)  
  {  

   if(drawOnOff)  
   {  
    Point2f fxy;      
    fxy.x = cvRound( flow_x.at< float >(y, x)*scaleX + x*scaleX );     
    fxy.y = cvRound( flow_y.at< float >(y, x)*scaleY + y*scaleY );     
    line(cflowmap, Point(x*scaleX,y*scaleY), Point(fxy.x, fxy.y), CV_RGB(0, 255, 0));     
    circle(cflowmap, Point(fxy.x, fxy.y), 1, CV_RGB(0, 255, 0), -1);     
   }  

   float xx = fabs(flow_x.at< float >(y, x) );  
   float yy = fabs(flow_y.at< float >(y, x) );  

   float xxyy = sqrt(xx*xx + yy*yy);  
   if( xxyy > GLOBAL_MOTION_TH1 )  
    countOverTh1 = countOverTh1 +1;  

   count=count+1;  
  }  
 }  
 return (countOverTh1 / count) * 100;  

}  

...


step 2. clustering movement rate to 3 classes.
The movement rates are clustered by 3 values

Frames and class ID

...
#include <  stdio.h>     
#include <  iostream>     
#include <  opencv2\opencv.hpp>     


#ifdef _DEBUG             
#pragma comment(lib, "opencv_core249d.lib")     
#pragma comment(lib, "opencv_imgproc249d.lib")   //MAT processing     
#pragma comment(lib, "opencv_highgui249d.lib")     
#else     
#pragma comment(lib, "opencv_core249.lib")     
#pragma comment(lib, "opencv_imgproc249.lib")     
#pragma comment(lib, "opencv_highgui249.lib")     
#endif    

using namespace cv;  
using namespace std;  

void main()  
{  

 //read data
 FILE* fp = fopen("DataOutput.txt","r");
 vector< float > readDataV;
 int frames;
 double movingRate;
 while(fscanf(fp,"%d %lf", &frames, &movingRate) != EOF )
 {
  readDataV.push_back( movingRate );
  //printf("%d %lf \n", frames, movingRate);
 }
 fclose(fp);
 
 //preparing variables for kmeans
 Mat samples(readDataV.size(), 1, CV_32F);  
 //copy vector to mat
 memcpy(samples.data, readDataV.data(), readDataV.size()*sizeof(float) );

 //kmean
 int clusterCount = 3;
 Mat labels;
 int attempts = 10;
 Mat centers;
 kmeans(samples, clusterCount, labels, 
  TermCriteria(CV_TERMCRIT_ITER|CV_TERMCRIT_EPS, 10, 1.0), 
  attempts, KMEANS_RANDOM_CENTERS, centers );

 //result out
 //frames, class index
 for(int i=0; i< clusterCount; ++i)
 {
  printf("%d class center %lf\n",  i, centers.at< float>(i,0) );
 }
 printf("\n");
 
 
 FILE* fp2 = fopen("ResultKmeans.txt", "w");
 for(int i=0; i< readDataV.size(); ++i)
 {
  //printf("%d %d\n", i, labels.at< int>(i,0) );
  fprintf(fp2, "%d %d\n", i+1, labels.at< int>(i,0) );
 }
 fclose(fp2);

}  
...

step 3. display movement labels on the video.

display 3 type of movement category(big, middle, small).

...
#include < time.h>
#include < opencv2\opencv.hpp>
#include < string>
#include < stdio.h>


#ifdef _DEBUG        
#pragma comment(lib, "opencv_core249d.lib")
#pragma comment(lib, "opencv_imgproc249d.lib")   //MAT processing
#pragma comment(lib, "opencv_highgui249d.lib")
#else
#pragma comment(lib, "opencv_core249.lib")
#pragma comment(lib, "opencv_imgproc249.lib")
#pragma comment(lib, "opencv_highgui249.lib")
#endif   

using namespace std;
using namespace cv;

int main()
{
 /////////////////////////////////////////////////////////////////////////
 //read file
 FILE* fp = fopen("ResultKmeans.txt","r");
 vector< int > readDataV;
 int frames;
 int labels;
 while(fscanf(fp,"%d %d", &frames, &labels) != EOF )
 {
  readDataV.push_back( labels );
  //printf("%d %d \n", frames, labels);
 }
 fclose(fp);


 //Load avi file 
 VideoCapture stream1("M:\\____videoSample____\\medical\\HUV-03-14.wmv");
 /////////////////////////////////////////////////////////////////////////

 //Mat and GpuMat
 Mat o_frame; 

 //capture
 stream1 >> o_frame;
 if( o_frame.empty() )
   return 0; 
 //////////////////////////////////////////////////////////////////////////

 int frame=0;
 int untilFrame=1000;
 while(1)
 {
  frame++;
  if(frame>untilFrame)  //stop point.
   break;

  /////////////////////////////////////////////////////////////////////////
  stream1 >> o_frame;
  if( o_frame.empty() )
   return 0;


  char TestStr[100];    
  if(readDataV[frame-1] == 0)  
   sprintf(TestStr, "Big moving");   
  else if(readDataV[frame-1] == 1)
   sprintf(TestStr, "middle moving");
  else
   sprintf(TestStr, "small moving");

  putText(o_frame, TestStr, Point(30,60), CV_FONT_NORMAL, 1, Scalar(255,255,255),2,2); //OutImg is Mat class;     


  //Display   
  imshow("origin", o_frame);  
  /////////////////////////////////////////////////////////////////////////

  if( waitKey(10) > 0)
   break;
 }

 return 0;
}


...











4/28/2014

(OpenCV Study) the example source code for using FarnebackOpticalFlow function (dense opticlal flow, gpu function)

This is GPU version of this post.
http://feelmare.blogspot.kr/2014/04/opencv-study-calcopticalflowfarneback.html

In the GPU mode, the return value of the function is two vector direction, that is x direction and y direction.
In the GPU mode, functions of resize, cvtColor cannot copy to same variable of gpumat.
And gputmat cannot access to the pixel point using at(x,y).


GPU is more fast about 10 times than cpu mode in my computer environment.

refer to this example source code and video.




#include < stdio.h>
#include < iostream>

#include < opencv2\opencv.hpp>
#include < opencv2/core/core.hpp>
#include < opencv2/highgui/highgui.hpp>
#include < opencv2/video/background_segm.hpp>
#include < opencv2\gpu\gpu.hpp>

#ifdef _DEBUG        
#pragma comment(lib, "opencv_core247d.lib")
#pragma comment(lib, "opencv_imgproc247d.lib")   //MAT processing
#pragma comment(lib, "opencv_objdetect247d.lib") //HOGDescriptor
#pragma comment(lib, "opencv_gpu247d.lib")
//#pragma comment(lib, "opencv_features2d247d.lib")
#pragma comment(lib, "opencv_highgui247d.lib")
#pragma comment(lib, "opencv_ml247d.lib")
//#pragma comment(lib, "opencv_stitching247d.lib");
//#pragma comment(lib, "opencv_nonfree247d.lib");
#pragma comment(lib, "opencv_video247d.lib")
#else
#pragma comment(lib, "opencv_core247.lib")
#pragma comment(lib, "opencv_imgproc247.lib")
#pragma comment(lib, "opencv_objdetect247.lib")
#pragma comment(lib, "opencv_gpu247.lib")
//#pragma comment(lib, "opencv_features2d247.lib")
#pragma comment(lib, "opencv_highgui247.lib")
#pragma comment(lib, "opencv_ml247.lib")
//#pragma comment(lib, "opencv_stitching247.lib");
//#pragma comment(lib, "opencv_nonfree247.lib");
#pragma comment(lib, "opencv_video247d.lib")
#endif 

using namespace cv;
using namespace std;


void drawOptFlowMap_gpu (const Mat& flow_x, const Mat& flow_y, Mat& cflowmap, int step, const Scalar& color) {

 

 for(int y = 0; y < cflowmap.rows; y += step)
        for(int x = 0; x < cflowmap.cols; x += step)
        {
   Point2f fxy; 
   fxy.x = cvRound( flow_x.at< float >(y, x) + x );
   fxy.y = cvRound( flow_y.at< float >(y, x) + y );
   
   line(cflowmap, Point(x,y), Point(fxy.x, fxy.y), color);
   circle(cflowmap, Point(fxy.x, fxy.y), 1, color, -1);
        }
}



int main()
{
 //resize scale
 int s=4;

 unsigned long AAtime=0, BBtime=0;

 //variables
 Mat GetImg, flow_x, flow_y, next, prvs;
 
 //gpu variable
 gpu::GpuMat prvs_gpu, next_gpu, flow_x_gpu, flow_y_gpu;
 gpu::GpuMat prvs_gpu_o, next_gpu_o;
 gpu::GpuMat prvs_gpu_c, next_gpu_c;

 //file name
 char fileName[100] = ".\\mm2.avi"; //Gate1_175_p1.avi"; //video\\mm2.avi"; //mm2.avi"; //cctv 2.mov"; //mm2.avi"; //";//_p1.avi";
 //video file open
 VideoCapture stream1(fileName);   //0 is the id of video device.0 if you have only one camera   
 if(!(stream1.read(GetImg))) //get one frame form video
  return 0;



 //////////////////////////////////////////////////////////////////////////////////////////////
 //resize(GetImg, prvs, Size(GetImg.size().width/s, GetImg.size().height/s) );
 //cvtColor(prvs, prvs, CV_BGR2GRAY);
 //prvs_gpu.upload(prvs);
 //////////////////////////////////////////////////////////////////////////////////////////////
 //gpu upload, resize, color convert
 prvs_gpu_o.upload(GetImg);
 gpu::resize(prvs_gpu_o, prvs_gpu_c, Size(GetImg.size().width/s, GetImg.size().height/s) );
 gpu::cvtColor(prvs_gpu_c, prvs_gpu, CV_BGR2GRAY);
 /////////////////////////////////////////////////////////////////////////////////////////////

 //dense optical flow
 gpu::FarnebackOpticalFlow fbOF;

 //unconditional loop   
 while (true) {   
  
  if(!(stream1.read(GetImg))) //get one frame form video   
   break;

  ///////////////////////////////////////////////////////////////////
  //resize(GetImg, next, Size(GetImg.size().width/s, GetImg.size().height/s) );
  //cvtColor(next, next, CV_BGR2GRAY);
  //next_gpu.upload(next);
  ///////////////////////////////////////////////////////////////////
  //gpu upload, resize, color convert
  next_gpu_o.upload(GetImg);
  gpu::resize(next_gpu_o, next_gpu_c, Size(GetImg.size().width/s, GetImg.size().height/s) );
  gpu::cvtColor(next_gpu_c, next_gpu, CV_BGR2GRAY);
  ///////////////////////////////////////////////////////////////////

  AAtime = getTickCount();
  //dense optical flow
  fbOF.operator()(prvs_gpu, next_gpu, flow_x_gpu, flow_y_gpu);
  //fbOF(prvs_gpu, next_gpu, flow_x_gpu, flow_y_gpu);
  BBtime = getTickCount();
  float pt = (BBtime - AAtime)/getTickFrequency();
  float fpt = 1/pt;
  printf("%.2lf / %.2lf \n",  pt, fpt );

  //copy for vector flow drawing
  Mat cflow;
  resize(GetImg, cflow, Size(GetImg.size().width/s, GetImg.size().height/s) );
  flow_x_gpu.download( flow_x );
  flow_y_gpu.download( flow_y );
  drawOptFlowMap_gpu(flow_x, flow_y, cflow, 10 , CV_RGB(0, 255, 0));
  imshow("OpticalFlowFarneback", cflow);

  ///////////////////////////////////////////////////////////////////
  //Display gpumat
  next_gpu.download( next );
  prvs_gpu.download( prvs );
  imshow("next", next );
  imshow("prvs", prvs );

  //prvs mat update
  prvs_gpu = next_gpu.clone();
  
  if (waitKey(5) >= 0)   
   break;
 }
}