Face detection and Tracking using MeanShift/CamShift

Standard

First to track any thing such as face you should first detect  it (your target) to track.

the steps :-

  1. capture image from web camera.
  2. detect face using haar cascade classifier.
  3. setup region of interset for tracking,initial position of face returned from step 1 and calculate it’s histogram and normalized it.
  4. grapped image from web camera and backproject face histogram to graped image and apply meanshift /camshift to get current location of face.
  5. repeat step 4.for more explanation see
    http://docs.opencv.org/trunk/doc/py_tutorials/py_video/py_meanshift/py_meanshift.html

    -CamShiftdemo.cpp in opencv\sources\samples\cpp folder

  and finally the source code.

#include <opencv2/opencv.hpp>
#include <opencv2/highgui/highgui.hpp>
#include “opencv2/video/tracking.hpp”
#include “opencv2/imgproc/imgproc.hpp”
#include <iomanip>
#include “Config.h”
#include <windows.h>
using namespace cv;
using namespace std;

// Globals —————————————————————————————-

bool doneYet = false;

//default image size
const int FRAME_WIDTH = 480;
const int FRAME_HEIGHT = 640;

/** Global variables */
String face_cascade_name = “G:\\Documents\\Downloads\\Programs\\opencv\\sources\\data\\haarcascades\\haarcascade_frontalface_alt.xml”;
CascadeClassifier face_cascade;

// Main ——————————————————————————————-
int main()
{

//– 1. Load the cascades
if (!face_cascade.load(face_cascade_name)){ printf(“–(!)Error loading\n”); return -1; };

//set up matrices for storage
Mat frame, gray, one;

//set up VideoCapture object to acquire the webcam feed from location 0 (default webcam location)
CvCapture* capture = cvCreateCameraCapture(0);

//set the capture frame size
// Try to set the camera resolution to 320 x 240.
cvSetCaptureProperty(capture, CV_CAP_PROP_FRAME_WIDTH, FRAME_WIDTH);
cvSetCaptureProperty(capture, CV_CAP_PROP_FRAME_HEIGHT, FRAME_HEIGHT);
vector<Rect> faces;
Rect trackWindow;
int trackObject = 0;
Mat region_of_interest, HSV_region_of_interest,mask,hist,hsv,hue,dst,dst1;
int hsize = 180;
float hranges[] = { 0, 180 };
const float* phranges = hranges;
//show the image on screen
namedWindow(“Face Tracking Using CamShift”, 1);
while (capture != NULL&&!doneYet)
{
//store image to matrix
frame = cvQueryFrame(capture);
if (frame.empty()) continue;
cvtColor(frame, hsv, COLOR_BGR2HSV);
int ch[] = { 0, 0 };
dst.create(hsv.size(), hsv.depth());
mixChannels(&hsv, 1, &dst, 1, ch, 1);

if (!trackObject){
//make a gray copy of the webcam image
cvtColor(frame, gray, COLOR_BGR2GRAY);
equalizeHist(gray, gray);
//– Detect faces
face_cascade.detectMultiScale(gray, faces, 1.1, 2, 0 | CV_HAAR_SCALE_IMAGE, Size(30, 30));
//find camera orientation if the chessboard corners have been found
if (!faces.empty())
{
Point center(faces[0].x + faces[0].width*0.5, faces[0].y + faces[0].height*0.5);
ellipse(frame, center, Size(faces[0].width*0.5, faces[0].height*0.5), 0, 0, 360, Scalar(255, 0, 255), 4, 8, 0);
//set up region of interest
trackWindow = faces[0];

HSV_region_of_interest = dst(faces[0]);
//cvtColor(region_of_interest, HSV_region_of_interest, COLOR_BGR2HSV);

inRange(HSV_region_of_interest,Scalar(0,60,32) ,Scalar(180,255,255) , mask);

calcHist(&HSV_region_of_interest,1,0, mask, hist,1, &hsize,&phranges);

normalize(hist, hist, 0, 255, CV_MINMAX);

trackObject = 1;
rectangle(frame, trackWindow, Scalar(0, 0, 255), 2);
putText(frame, “Face detected”, Point(2, 25), CV_FONT_HERSHEY_SIMPLEX, 1, Scalar(0, 255, 255));
imshow(“Face Tracking Using CamShift”, frame);
waitKey(1000);

}
}
else
{
putText(frame, “Face Tracking”, Point(2, 25), CV_FONT_HERSHEY_SIMPLEX, 1, Scalar(0, 0, 255));
//cvtColor(frame, hsv, COLOR_BGR2HSV);
int ch[] = { 0, 0 };
hue.create(hsv.size(), hsv.depth());
mixChannels(&hsv, 1, &hue, 1, ch, 1);

calcBackProject(&hue, 1,0, hist, dst1, &phranges);

RotatedRect trackBox = CamShift(dst1, trackWindow,
TermCriteria(CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1));

//////////////////////mean shift
//  meanShift(dst1, trackWindow,
//    TermCriteria(CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1));
//  /////////////////////////////

if (trackWindow.area() <= 1)
{
int cols = dst1.cols, rows = dst1.rows, r = (MIN(cols, rows) + 5) / 6;
trackWindow = Rect(trackWindow.x – r, trackWindow.y – r,
trackWindow.x + r, trackWindow.y + r) &
Rect(0, 0, cols, rows);
}
//trackWindow = trackBox.boundingRect();
//rectangle(frame, trackWindow, Scalar(0, 255, 0), 2);

ellipse(frame, trackBox, Scalar(0, 0, 255), 3, CV_AA);
imshow(“Face Tracking Using CamShift”, frame);
}

waitKey(1);
}

return 0;
}