Emgu.CV.CvInvoke.cvCamShift C# (CSharp) Метод

cvCamShift() приватный Метод

private cvCamShift ( IntPtr probImage, Rectangle window, MCvTermCriteria criteria, MCvConnectedComp &comp, MCvBox2D &box ) : int
probImage IntPtr
window Rectangle
criteria MCvTermCriteria
comp MCvConnectedComp
box MCvBox2D
Результат int
        public static extern int cvCamShift(
         IntPtr probImage,
         Rectangle window,
         MCvTermCriteria criteria,
         out MCvConnectedComp comp,
         out MCvBox2D box);

Usage Example

Пример #1
0
        /// <summary>
        /// Use camshift to track the feature
        /// </summary>
        /// <param name="observedFeatures">The feature found from the observed image</param>
        /// <param name="initRegion">The predicted location of the model in the observed image. If not known, use MCvBox2D.Empty as default</param>
        /// <param name="priorMask">The mask that should be the same size as the observed image. Contains a priori value of the probability a match can be found. If you are not sure, pass an image fills with 1.0s</param>
        /// <returns>If a match is found, the homography projection matrix is returned. Otherwise null is returned</returns>
        public HomographyMatrix CamShiftTrack(SURFFeature[] observedFeatures, MCvBox2D initRegion, Image <Gray, Single> priorMask)
        {
            using (Image <Gray, Single> matchMask = new Image <Gray, Single>(priorMask.Size))
            {
                #region get the list of matched point on the observed image
                Single[, ,] matchMaskData = matchMask.Data;

                //Compute the matched features
                MatchedSURFFeature[] matchedFeature = _matcher.MatchFeature(observedFeatures, 2, 20);
                matchedFeature = VoteForUniqueness(matchedFeature, 0.8);

                foreach (MatchedSURFFeature f in matchedFeature)
                {
                    PointF p = f.ObservedFeature.Point.pt;
                    matchMaskData[(int)p.Y, (int)p.X, 0] = 1.0f / (float)f.SimilarFeatures[0].Distance;
                }
                #endregion

                Rectangle startRegion;
                if (initRegion.Equals(MCvBox2D.Empty))
                {
                    startRegion = matchMask.ROI;
                }
                else
                {
                    startRegion = PointCollection.BoundingRectangle(initRegion.GetVertices());
                    if (startRegion.IntersectsWith(matchMask.ROI))
                    {
                        startRegion.Intersect(matchMask.ROI);
                    }
                }

                CvInvoke.cvMul(matchMask.Ptr, priorMask.Ptr, matchMask.Ptr, 1.0);

                MCvConnectedComp comp;
                MCvBox2D         currentRegion;
                //Updates the current location
                CvInvoke.cvCamShift(matchMask.Ptr, startRegion, new MCvTermCriteria(10, 1.0e-8), out comp, out currentRegion);

                #region find the SURF features that belongs to the current Region
                MatchedSURFFeature[] featuesInCurrentRegion;
                using (MemStorage stor = new MemStorage())
                {
                    Contour <System.Drawing.PointF> contour = new Contour <PointF>(stor);
                    contour.PushMulti(currentRegion.GetVertices(), Emgu.CV.CvEnum.BACK_OR_FRONT.BACK);

                    CvInvoke.cvBoundingRect(contour.Ptr, 1); //this is required before calling the InContour function

                    featuesInCurrentRegion = Array.FindAll(matchedFeature,
                                                           delegate(MatchedSURFFeature f)
                                                           { return(contour.InContour(f.ObservedFeature.Point.pt) >= 0); });
                }
                #endregion

                return(GetHomographyMatrixFromMatchedFeatures(VoteForSizeAndOrientation(featuesInCurrentRegion, 1.5, 20)));
            }
        }
CvInvoke