SURF feature detector in CSharp: Difference between revisions

From EMGU
Jump to navigation Jump to search
No edit summary
No edit summary
Line 1: Line 1:
<font color=green>'''This project is part of the Emgu.CV.Example solution'''</font>
== System Requirement ==
== System Requirement ==
{| style="text-align:center" border="1px" cellpadding="10" cellspacing="0"
{| style="text-align:center" border="1px" cellpadding="10" cellspacing="0"
!Component || Requirement || Detail  
!Component || Requirement || Detail  
|-
|-
|Emgu CV || Version 1.5 ||   
|Emgu CV || [[Version_History#Emgu.CV-2.0.0.0_Alpha|Version 2.0.0.0 Alpha]] ||   
|-
|-
|Operation System || Cross Platform ||  
|Operation System || Cross Platform ||  
Line 36: Line 38:
       static void Run()
       static void Run()
       {
       {
        MCvSURFParams surfParam = new MCvSURFParams(500, false);
         Image<Gray, Byte> modelImage = new Image<Gray, byte>("box.png");
         Image<Gray, Byte> modelImage = new Image<Gray, byte>("box.png");
 
         //extract features from the object image
         #region extract features from the object image
         SURFFeature[] modelFeatures = modelImage.ExtractSURF(ref surfParam);
        MCvSURFParams param1 = new MCvSURFParams(500, false);
         SURFFeature[] modelFeatures = modelImage.ExtractSURF(ref param1);
        SURFFeature[] modelFeaturesPositiveLaplacian = Array.FindAll<SURFFeature>(modelFeatures, delegate(SURFFeature f) { return f.Point.laplacian >= 0; });
        SURFFeature[] modelFeaturesNegativeLaplacian = Array.FindAll<SURFFeature>(modelFeatures, delegate(SURFFeature f) { return f.Point.laplacian < 0; });
 
        //Create feature trees for the given features
        FeatureTree featureTreePositiveLaplacian = new FeatureTree(
            Array.ConvertAll<SURFFeature, Matrix<float>>(
              modelFeaturesPositiveLaplacian,
              delegate(SURFFeature f) { return f.Descriptor; }));
        FeatureTree featureTreeNegativeLaplacian = new FeatureTree(
            Array.ConvertAll<SURFFeature, Matrix<float>>(
              modelFeaturesNegativeLaplacian,
              delegate(SURFFeature f) { return f.Descriptor; }));
        #endregion


         Image<Gray, Byte> observedImage = new Image<Gray, byte>("box_in_scene.png");
         Image<Gray, Byte> observedImage = new Image<Gray, byte>("box_in_scene.png");
        // extract features from the observed image
        SURFFeature[] imageFeatures = observedImage.ExtractSURF(ref surfParam);


         #region extract features from the observed image
         //Create a SURF Tracker using k-d Tree
         MCvSURFParams param2 = new MCvSURFParams(500, false);
         SURFTracker tracker = new SURFTracker(modelFeatures);
         SURFFeature[] imageFeatures = observedImage.ExtractSURF(ref param2);
         //Comment out above and uncomment below if you wish to use spill-tree instead
        SURFFeature[] imageFeaturesPositiveLaplacian = Array.FindAll<SURFFeature>(imageFeatures, delegate(SURFFeature f) { return f.Point.laplacian >= 0; });
         //SURFTracker tracker = new SURFTracker(modelFeatures, 50, .7, .1);
        SURFFeature[] imageFeaturesNegativeLaplacian = Array.FindAll<SURFFeature>(imageFeatures, delegate(SURFFeature f) { return f.Point.laplacian < 0; });
        #endregion
 
        #region Merge the object image and the observed image into one image for display
        Image<Gray, Byte> res = new Image<Gray, byte>(Math.Max(modelImage.Width, observedImage.Width), modelImage.Height + observedImage.Height);
        res.ROI = new System.Drawing.Rectangle(0, 0, modelImage.Width, modelImage.Height);
        modelImage.Copy(res, null);
        res.ROI = new System.Drawing.Rectangle(0, modelImage.Height, observedImage.Width, observedImage.Height);
        observedImage.Copy(res, null);
        res.ROI = Rectangle.Empty;
        #endregion
 
        double matchDistanceRatio = 0.8;
        List<PointF> modelPoints = new List<PointF>();
         List<PointF> observePoints = new List<PointF>();
 
        #region using Feature Tree to match feature
        Matrix<float>[] imageFeatureDescriptorsPositiveLaplacian = Array.ConvertAll<SURFFeature, Matrix<float>>(
            imageFeaturesPositiveLaplacian,
            delegate(SURFFeature f) { return f.Descriptor; });
        Matrix<float>[] imageFeatureDescriptorsNegativeLaplacian = Array.ConvertAll<SURFFeature, Matrix<float>>(
            imageFeaturesNegativeLaplacian,
            delegate(SURFFeature f) { return f.Descriptor; });
        Matrix<Int32> result1;
        Matrix<double> dist1;
 
        featureTreePositiveLaplacian.FindFeatures(imageFeatureDescriptorsPositiveLaplacian, out result1, out dist1, 2, 20);
        MatchSURFFeatureWithFeatureTree(
          modelFeaturesPositiveLaplacian,
          imageFeaturesPositiveLaplacian,
          matchDistanceRatio, result1.Data, dist1.Data, modelPoints, observePoints);


         featureTreeNegativeLaplacian.FindFeatures(imageFeatureDescriptorsNegativeLaplacian, out result1, out dist1, 2, 20);
         SURFTracker.MatchedSURFFeature[] matchedFeatures = tracker.MatchFeature(imageFeatures, 2, 20);
         MatchSURFFeatureWithFeatureTree(
         matchedFeatures = SURFTracker.VoteForUniqueness(matchedFeatures, 0.8);
              modelFeaturesNegativeLaplacian,
        matchedFeatures = SURFTracker.VoteForSizeAndOrientation(matchedFeatures, 1.5, 20);
              imageFeaturesNegativeLaplacian,
         HomographyMatrix homography = SURFTracker.GetHomographyMatrixFromMatchedFeatures(matchedFeatures);
              matchDistanceRatio, result1.Data, dist1.Data, modelPoints, observePoints);
         #endregion


         Matrix<float> homographyMatrix = CameraCalibration.FindHomography(
         //Merge the object image and the observed image into one image for display
            modelPoints.ToArray(), //points on the object image
        Image<Gray, Byte> res = modelImage.ConcateVertical(observedImage);
            observePoints.ToArray(), //points on the observed image
            HOMOGRAPHY_METHOD.RANSAC,
            3).Convert<float>();


         #region draw the projected object in observed image
         #region draw lines between the matched features
         for (int i = 0; i < modelPoints.Count; i++)
         foreach (SURFTracker.MatchedSURFFeature matchedFeature in matchedFeatures)
         {
         {
             PointF p = observePoints[i];
             PointF p = matchedFeature.ObservedFeature.Point.pt;
             p.Y += modelImage.Height;
             p.Y += modelImage.Height;
             res.Draw(new LineSegment2DF(modelPoints[i], p), new Gray(0), 1);
             res.Draw(new LineSegment2DF(matchedFeature.ModelFeatures[0].Point.pt, p), new Gray(0), 1);
         }
         }
        #endregion


         System.Drawing.Rectangle rect = modelImage.ROI;
         #region draw the project region on the image
        Matrix<float> orginalCornerCoordinate = new Matrix<float>(new float[,]  
        if (homography != null)
            {{  rect.Left, rect.Bottom, 1.0f},
        {  //draw a rectangle along the projected model
               { rect.Right, rect.Bottom, 1.0f},
            Rectangle rect = modelImage.ROI;
               { rect.Right, rect.Top, 1.0f},
            PointF[] pts = new PointF[] {
               { rect.Left, rect.Top, 1.0f}});
              new PointF(rect.Left, rect.Bottom),
               new PointF(rect.Right, rect.Bottom),
               new PointF(rect.Right, rect.Top),
               new PointF(rect.Left, rect.Top)};
            homography.ProjectPoints(pts);


        Matrix<float> destCornerCoordinate = homographyMatrix * orginalCornerCoordinate.Transpose();
            for (int i = 0; i < pts.Length; i++)
        float[,] destCornerCoordinateArray = destCornerCoordinate.Data;
              pts[i].Y += modelImage.Height;


        Point[] destCornerPoints = new Point[4];
            res.DrawPolyline(Array.ConvertAll<PointF, Point>(pts, Point.Round), true, new Gray(255.0), 5);
        for (int i = 0; i < destCornerPoints.Length; i++)
        {
            float denominator = destCornerCoordinateArray[2, i];
            destCornerPoints[i] = new Point(
              (int)(destCornerCoordinateArray[0, i] / denominator),
              (int)(destCornerCoordinateArray[1, i] / denominator) + modelImage.Height);
         }
         }
        res.DrawPolyline(destCornerPoints, true, new Gray(255.0), 5);
         #endregion
         #endregion


         ImageViewer.Show(res);
         ImageViewer.Show(res);
      }
      private static void MatchSURFFeatureWithFeatureTree(SURFFeature[] modelFeatures, SURFFeature[] imageFeatures, double matchDistanceRatio, int[,] result1, double[,] dist1, List<PointF> modelPointList, List<PointF> imagePointList)
      {
        for (int i = 0; i < result1.GetLength(0); i++)
        {
            int bestMatchedIndex = dist1[i, 0] < dist1[i, 1] ? result1[i, 0] : result1[i, 1];
            int secondBestMatchedIndex = dist1[i, 0] < dist1[i, 1] ? result1[i, 1] : result1[i, 0];
            SURFFeature bestMatchedModelPoint = bestMatchedIndex >= 0 ? modelFeatures[bestMatchedIndex] : null;
            SURFFeature secondBestMatchedModelPoint = secondBestMatchedIndex > 0 ? modelFeatures[secondBestMatchedIndex] : null;
            if (bestMatchedModelPoint != null)
            {
              double distanceRatio = dist1[i, 0] / dist1[i, 1];
              if (secondBestMatchedModelPoint == null || distanceRatio <= matchDistanceRatio || distanceRatio >= (1.0 / matchDistanceRatio))
              {  //this is a unique / almost unique match
                  modelPointList.Add(bestMatchedModelPoint.Point.pt);
                  imagePointList.Add(imageFeatures[i].Point.pt);
              }
            }
        }
       }
       }
   }
   }
}
}
</source>
</source>


== Result ==
== Result ==
[[image:SURFExample.png]]
[[image:SURFExample.png]]

Revision as of 14:53, 14 July 2009

This project is part of the Emgu.CV.Example solution

System Requirement

Component Requirement Detail
Emgu CV Version 2.0.0.0 Alpha
Operation System Cross Platform

Source Code

using System;
using System.Collections.Generic;
using System.Windows.Forms;
using System.Drawing;
using Emgu.CV;
using Emgu.CV.UI;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;

namespace SURFFeatureExample
{
   static class Program
   {
      /// <summary>
      /// The main entry point for the application.
      /// </summary>
      [STAThread]
      static void Main()
      {
         Application.EnableVisualStyles();
         Application.SetCompatibleTextRenderingDefault(false);
         Run();
      }

      static void Run()
      {
         MCvSURFParams surfParam = new MCvSURFParams(500, false);

         Image<Gray, Byte> modelImage = new Image<Gray, byte>("box.png");
         //extract features from the object image
         SURFFeature[] modelFeatures = modelImage.ExtractSURF(ref surfParam);

         Image<Gray, Byte> observedImage = new Image<Gray, byte>("box_in_scene.png");
         // extract features from the observed image
         SURFFeature[] imageFeatures = observedImage.ExtractSURF(ref surfParam);

         //Create a SURF Tracker using k-d Tree
         SURFTracker tracker = new SURFTracker(modelFeatures);
         //Comment out above and uncomment below if you wish to use spill-tree instead
         //SURFTracker tracker = new SURFTracker(modelFeatures, 50, .7, .1);

         SURFTracker.MatchedSURFFeature[] matchedFeatures = tracker.MatchFeature(imageFeatures, 2, 20);
         matchedFeatures = SURFTracker.VoteForUniqueness(matchedFeatures, 0.8);
         matchedFeatures = SURFTracker.VoteForSizeAndOrientation(matchedFeatures, 1.5, 20);
         HomographyMatrix homography = SURFTracker.GetHomographyMatrixFromMatchedFeatures(matchedFeatures);

         //Merge the object image and the observed image into one image for display
         Image<Gray, Byte> res = modelImage.ConcateVertical(observedImage);

         #region draw lines between the matched features
         foreach (SURFTracker.MatchedSURFFeature matchedFeature in matchedFeatures)
         {
            PointF p = matchedFeature.ObservedFeature.Point.pt;
            p.Y += modelImage.Height;
            res.Draw(new LineSegment2DF(matchedFeature.ModelFeatures[0].Point.pt, p), new Gray(0), 1);
         }
         #endregion

         #region draw the project region on the image
         if (homography != null)
         {  //draw a rectangle along the projected model
            Rectangle rect = modelImage.ROI;
            PointF[] pts = new PointF[] { 
               new PointF(rect.Left, rect.Bottom),
               new PointF(rect.Right, rect.Bottom),
               new PointF(rect.Right, rect.Top),
               new PointF(rect.Left, rect.Top)};
            homography.ProjectPoints(pts);

            for (int i = 0; i < pts.Length; i++)
               pts[i].Y += modelImage.Height;

            res.DrawPolyline(Array.ConvertAll<PointF, Point>(pts, Point.Round), true, new Gray(255.0), 5);
         }
         #endregion

         ImageViewer.Show(res);
      }
   }
}

Result