Showing posts with label Tracking. Show all posts
Showing posts with label Tracking. Show all posts

12/21/2015

Opencv Condensation(particle filter) example source code





Dear Jihad anwar

Here is source code you asking.
I don't remember equation about particle filter well now.
But source code is still run well. ^^
I hope to help your study.

Note. Opencv 2.4.9 and 32 bit.

Thank you.


#include < iostream>
#include < vector>

#include <  stdio.h>      
#include <  opencv2\opencv.hpp>    
#include < opencv2\legacy\legacy.hpp>

#ifdef _DEBUG           
#pragma comment(lib, "opencv_core249d.lib")   
#pragma comment(lib, "opencv_imgproc249d.lib")   //MAT processing   
#pragma comment(lib, "opencv_objdetect249d.lib") //HOGDescriptor   
//#pragma comment(lib, "opencv_gpu249d.lib")   
//#pragma comment(lib, "opencv_features2d249d.lib")   
#pragma comment(lib, "opencv_highgui249d.lib")   
#pragma comment(lib, "opencv_ml249d.lib")   
//#pragma comment(lib, "opencv_stitching249d.lib");   
//#pragma comment(lib, "opencv_nonfree249d.lib");   
#pragma comment(lib, "opencv_video249d.lib")
#pragma comment(lib, "opencv_legacy249d.lib")
#else   
#pragma comment(lib, "opencv_core249.lib")   
#pragma comment(lib, "opencv_imgproc249.lib")   
#pragma comment(lib, "opencv_objdetect249.lib")   
//#pragma comment(lib, "opencv_gpu249.lib")   
//#pragma comment(lib, "opencv_features2d249.lib")   
#pragma comment(lib, "opencv_highgui249.lib")   
#pragma comment(lib, "opencv_ml249.lib")   
//#pragma comment(lib, "opencv_stitching249.lib");   
//#pragma comment(lib, "opencv_nonfree249.lib");   
#pragma comment(lib, "opencv_video249d.lib")   
#endif   



using namespace cv;
using namespace std;

// (1) functions for calculating the likelihood
float calc_likelihood (IplImage * img, int x, int y)
{
 float b, g, r;
 float dist = 0.0, sigma = 50.0;

 b = img->imageData[img->widthStep * y + x * 3];       //B
 g = img->imageData[img->widthStep * y + x * 3 + 1];   //G
 r = img->imageData[img->widthStep * y + x * 3 + 2];   //R
 dist = sqrt (b * b + g * g + (255.0 - r) * (255.0 - r));

 return 1.0 / (sqrt (2.0 * CV_PI) * sigma) * expf (-dist * dist / (2.0 * sigma * sigma));
}

void ProccTimePrint( unsigned long Atime , string msg)   
{   
 unsigned long Btime=0;   
 float sec, fps;   
 Btime = getTickCount();   
 sec = (Btime - Atime)/getTickFrequency();   
 fps = 1/sec;   
 printf("%s %.4lf(sec) / %.4lf(fps) \n", msg.c_str(),  sec, fps );   
}   



int main ()
{

 float TakeTime;   
 unsigned long Atime, Btime;   

 int i, c;
 double w = 0.0, h = 0.0;
 CvCapture *capture = 0;
 IplImage *frame = 0;

 int n_stat = 4;
 int n_particle = 1000;
 vector<  float > vx(n_particle);
 vector<  float > vy(n_particle);

 CvConDensation *cond = 0;
 CvMat *lowerBound = 0;
 CvMat *upperBound = 0;

 int xx, yy;

 // (2)you want to create a capture structure with respect to the camera with the specified number by the command argument
 
 capture = cvCreateCameraCapture(0);

 // (3)The one frame captured, and obtains the capture size.
 frame = cvQueryFrame (capture);
 w = frame->width;
 h = frame->height;

 cvNamedWindow ("Condensation", CV_WINDOW_AUTOSIZE);

 // (4)Condensation create a structure.
 cond = cvCreateConDensation (n_stat, 0, n_particle);

 // (5) it will specify the minimum and maximum values of the state vector can be taken for each dimension.
 lowerBound = cvCreateMat (4, 1, CV_32FC1);
 upperBound = cvCreateMat (4, 1, CV_32FC1);

 cvmSet (lowerBound, 0, 0, 0.0);
 cvmSet (lowerBound, 1, 0, 0.0);
 cvmSet (lowerBound, 2, 0, -10); //-10.0);
 cvmSet (lowerBound, 3, 0, -10); //-10.0);
 cvmSet (upperBound, 0, 0, w);
 cvmSet (upperBound, 1, 0, h);
 cvmSet (upperBound, 2, 0, 10); //10.0);
 cvmSet (upperBound, 3, 0, 10); //10.0);

 // (6)Condensation Initialize the structure
 cvConDensInitSampleSet (cond, lowerBound, upperBound);

 // (7)ConDensation Specify the dynamics of the state vector in the algorithm
 cond->DynamMatr[0] = 1.0;
 cond->DynamMatr[1] = 0.0;
 cond->DynamMatr[2] = 1.0;
 cond->DynamMatr[3] = 0.0;
 cond->DynamMatr[4] = 0.0;
 cond->DynamMatr[5] = 1.0;
 cond->DynamMatr[6] = 0.0;
 cond->DynamMatr[7] = 1.0;
 cond->DynamMatr[8] = 0.0;
 cond->DynamMatr[9] = 0.0;
 cond->DynamMatr[10] = 1.0;
 cond->DynamMatr[11] = 0.0;
 cond->DynamMatr[12] = 0.0;
 cond->DynamMatr[13] = 0.0;
 cond->DynamMatr[14] = 0.0;
 cond->DynamMatr[15] = 1.0;

 // (8)re-set the noise parameters.

 while (1) {
  frame = cvQueryFrame (capture);
  Atime = getTickCount(); //μ‹œμž‘ μ‹œκ°„ 
  float a=0,b=0,c=0,d=0,e=0;

  // (9) It calculates the likelihood for each particle.
  for (i = 0; i <  n_particle; i++) {
   xx = (int) (cond->flSamples[i][0]);
   yy = (int) (cond->flSamples[i][1]);

   vx[i] = cond->flSamples[i][0];
   vy[i] = cond->flSamples[i][1];


   if (xx <  0 || xx >= w || yy <  0 || yy >= h) {
    cond->flConfidence[i] = 0.0;
   }
   else {
    cond->flConfidence[i] = calc_likelihood (frame, xx, yy);
    cvCircle (frame, cvPoint (xx, yy), 1, CV_RGB (0, 0, 255), -1);   
    
   }
  }

  
  // (10)  estimate the state of the next model
  cvConDensUpdateByTime (cond);
  printf("crrection \n");
  
  ProccTimePrint( Atime , "time :"); //μ²˜λ¦¬μ‹œκ°„ 좜λ ₯  

  cv::Point statePt(cond->State[0], cond->State[1]);
  cvCircle (frame, statePt, 5, CV_RGB (255, 255, 255), 5);  
  printf("-----------\n");
  
  cvShowImage ("Condensation", frame);
  
  if (cvWaitKey (10) > 10 )
   break;

 }

 cvDestroyWindow ("Condensation");

 cvReleaseCapture (&capture);
 cvReleaseConDensation (&cond);
 cvReleaseMat (&lowerBound);
 cvReleaseMat (&upperBound);

 return 0;
}




1/25/2015

MIL, Boosting tracker test in opencv 3.0

I have tested MIL, Boosting tracking algorithm in opencv 3.0

Please refer to official reference here -> http://docs.opencv.org/trunk/modules/tracking/doc/tracking.html

Firstly, to use tracker algorithm, you have to set the value which is tracking module path of OPENCV_EXTRA_MODUALES_PATH option, when you build the opencv 3.0.



We can download tracking module in Github. -> https://github.com/Itseez/opencv_contrib
"opencv_contrib" is developing separately with opencv full version.
In setting cmake, I checked only tracking option. because other module may be unstable version yet.
And I need just tracking module.


The example source code is referenced in here -> https://github.com/lenlen/opencv/blob/tracking_api/samples/cpp/tracker.cpp

In source code, you can select Mil and Boosting algorithm option in the function of creation.

At the result, MIL is faster than boosting.
But both algorithm are very slow and performance is also not good yet.
So they can not use in industrial application.


MIL result video

Boosting result video


The example source code is here





#include < stdio.h>  
#include < opencv2\video\tracker.hpp>
#include < opencv2\opencv.hpp>


 
#ifdef _DEBUG          
#pragma comment(lib, "opencv_core300d.lib")  
#pragma comment(lib, "opencv_highgui300d.lib")  
#pragma comment(lib, "opencv_imgcodecs300d.lib")  
#pragma comment(lib, "opencv_videoio300d.lib") 
#pragma comment(lib, "opencv_imgproc300d.lib") 
#pragma comment(lib, "opencv_cuda300d.lib")   
#pragma comment(lib, "opencv_cudawarping300d.lib")
#pragma comment(lib, "opencv_tracking300d.lib")
#define _DEBUG_RRRR  
#else  
#pragma comment(lib, "opencv_core300.lib")  
#pragma comment(lib, "opencv_highgui300.lib")  
#pragma comment(lib, "opencv_imgcodecs300.lib")  
#pragma comment(lib, "opencv_videoio300.lib") 
#pragma comment(lib, "opencv_imgproc300.lib") 
#pragma comment(lib, "opencv_cuda300.lib")   
#pragma comment(lib, "opencv_cudawarping300.lib")
#pragma comment(lib, "opencv_tracking300d.lib")
#endif  


using namespace cv;
using namespace std;

static Mat image;
static bool selectObject = false;
static bool startSelection = false;
static Rect2d boundingBox;
static bool paused;

static void onMouse( int event, int x, int y, int, void* ) 
{ 
 if( !selectObject ) 
 { 
  switch ( event ) 
  { 
  case EVENT_LBUTTONDOWN: 
   //set origin of the bounding box 
   startSelection = true; 
   boundingBox.x = x; 
   boundingBox.y = y; 
   break; 
  case EVENT_LBUTTONUP: 
   //sei with and height of the bounding box 
   boundingBox.width = std::abs( x - boundingBox.x ); 
   boundingBox.height = std::abs( y - boundingBox.y ); 
   paused = false; 
   selectObject = true; 

   printf("Object Rect Size(left, right, width, height) %.1f %.1f %.1f %.1f\n", boundingBox.x, boundingBox.y, boundingBox.width, boundingBox.height);
   break; 
  case EVENT_MOUSEMOVE: 
   if( startSelection && !selectObject ) 
   { 
    //draw the bounding box 
    Mat currentFrame; 
    image.copyTo( currentFrame ); 
    rectangle( currentFrame, Point( boundingBox.x, boundingBox.y ), Point( x, y ), Scalar( 255, 0, 0 ), 2, 1 ); 
    imshow( "Tracking API", currentFrame ); 
   } 
   break; 
  } 
 } 
} 




int main()
{
 unsigned long AAtime=0, BBtime=0;

 VideoCapture cap;
 cap.open("C:\\videoSample\\tracking\\sample2.avi");

 if( !cap.isOpened() ) 
 { 
  printf("Video File Open Fail! \n");
  return -1; 
 } 


 Mat frame; 
 paused = false; 
 namedWindow( "Tracking", 0 ); 
 setMouseCallback( "Tracking", onMouse, 0 );


 //instantiates the specific Tracker 
 //MIL : TrackerMIL
 //BOOSTING : TrackerBoosting
 Ptr< Tracker> tracker = Tracker::create("BOOSTING" ); //"MIL");//
 if( tracker == NULL ) 
 { 
  printf("Error in the instantiation of the tracker..\n");
  return -1; 
 } 

 //get the first frame 
 cap >> frame; 
 frame.copyTo( image ); 
 imshow( "Tracking", image ); 

 bool initialized = false;
 while(1)
 { 
  if( !paused ) 
  { 
   cap >> frame; 
   frame.copyTo( image ); 


   if( !initialized && selectObject ) 
   { 
    //initializes the tracker 
    AAtime = getTickCount();
    if( !tracker->init( frame, boundingBox ) ) 
    { 
     printf("Could not initialize tracker\n"); 
     return -1; 
    } 
    BBtime = getTickCount();
    double fps = (BBtime - AAtime)/getTickFrequency();
    double sec = 1/fps;
    printf("Tracking Initial time = %.2lffsp, %.2lfsec \n",  fps, sec );

    initialized = true; 
   } 
   else if( initialized ) 
   { 
    AAtime = getTickCount();
    
    //updates the tracker 
    if( tracker->update( frame, boundingBox ) ) 
    { 
     rectangle( image, boundingBox, Scalar( 255, 0, 0 ), 2, 1 ); 
    } 

    BBtime = getTickCount();
    double fps = (BBtime - AAtime)/getTickFrequency();
    double sec = 1/fps;
    printf("Tracking update time = %.2lffsp, %.2lfsec \n",  fps, sec );

   } 
   imshow( "Tracking", image ); 
  } 


  char c = (char) waitKey( 2 ); 
  if( c == 'q' ) 
   break; 
  if( c == 'p' ) 
   paused = !paused; 


 } 
}