% Create a cascade detector object. faceDetector = vision.CascadeObjectDetector();
% Read a video frame and run the detector. videoFileReader = vision.VideoFileReader('visionface.avi'); videoFrame = step(videoFileReader); bbox = step(faceDetector, videoFrame);
% Draw the returned bounding box around the detected face. videoOut = insertObjectAnnotation(videoFrame,'rectangle',bbox,'Face'); figure, imshow(videoOut), title('Detected face');
% Get the skin tone information by extracting the Hue from the video frame % converted to the HSV color space. [hueChannel,~,~] = rgb2hsv(videoFrame);
% Display the Hue Channel data and draw the bounding box around the face. figure, imshow(hueChannel), title('Hue channel data'); rectangle('Position',bbox(1,:),'LineWidth',2,'EdgeColor',[1 1 0]) % Detect the nose within the face region. The nose provides a more accurate % measure of the skin tone because it does not contain any background % pixels. noseDetector = vision.CascadeObjectDetector('Nose', 'UseROI', true); noseBBox = step(noseDetector, videoFrame, bbox(1,:));
% Create a tracker object. tracker = vision.HistogramBasedTracker;
% Initialize the tracker histogram using the Hue channel pixels from the % nose. initializeObject(tracker, hueChannel, noseBBox(1,:));
% Create a video player object for displaying video frames. videoInfo = info(videoFileReader); videoPlayer = vision.VideoPlayer('Position',[300 300 videoInfo.VideoSize+30]);
% Track the face over successive video frames until the video is finished. while ~isDone(videoFileReader)
videoFrame = step(videoFileReader);
[hueChannel,~,~] = rgb2hsv(videoFrame);
bbox = step(tracker, hueChannel);
videoOut = insertObjectAnnotation(videoFrame,'rectangle',bbox,'Face');
step(videoPlayer, videoOut);
end % Release resources release(videoFileReader); release(videoPlayer);