SURF feature detector in CSharp: Difference between revisions

From EMGU
Jump to navigation Jump to search
Undo revision 991 by Inuxejiq (talk)
No edit summary
Line 5: Line 5:
!Component || Requirement || Detail  
!Component || Requirement || Detail  
|-
|-
|Emgu CV || [[Version_History#Emgu.CV-2.0.0.0_Alpha|Version 2.0.0.0 Alpha]] ||   
|Emgu CV || [[Version_History#Emgu.CV-2.4.0|Version 2.4.0]] ||   
|-
|-
|Operation System || Cross Platform ||  
|Operation System || Cross Platform ||  
Line 14: Line 14:
using System;
using System;
using System.Collections.Generic;
using System.Collections.Generic;
using System.Windows.Forms;
using System.Diagnostics;
using System.Drawing;
using System.Drawing;
using System.Runtime.InteropServices;
using Emgu.CV;
using Emgu.CV;
using Emgu.CV.UI;
using Emgu.CV.CvEnum;
using Emgu.CV.CvEnum;
using Emgu.CV.Features2D;
using Emgu.CV.Structure;
using Emgu.CV.Structure;
using Emgu.CV.Util;
using Emgu.CV.GPU;


namespace SURFFeatureExample
namespace SURFFeatureExample
{
{
   static class Program
   public static class DrawMatches
   {
   {
       /// <summary>
       /// <summary>
       /// The main entry point for the application.
       /// Draw the model image and observed image, the matched features and homography projection.
       /// </summary>
       /// </summary>
       [STAThread]
       /// <param name="modelImageFileName">The model image</param>
       static void Main()
       /// <param name="observedImageFileName">The observed image</param>
      /// <param name="matchTime">The output total time for computing the homography matrix.</param>
      /// <returns>The model image and observed image, the matched features and homography projection.</returns>
      public static Image<Bgr, Byte> Draw(String modelImageFileName, String observedImageFileName, out long matchTime)
       {
       {
         Application.EnableVisualStyles();
         Image<Gray, Byte> modelImage = new Image<Gray, byte>(modelImageFileName);
         Application.SetCompatibleTextRenderingDefault(false);
        Image<Gray, Byte> observedImage = new Image<Gray, byte>(observedImageFileName);
         Run();
         Stopwatch watch;
      }
        HomographyMatrix homography = null;
 
        SURFDetector surfCPU = new SURFDetector(500, false);
         VectorOfKeyPoint modelKeyPoints;
        VectorOfKeyPoint observedKeyPoints;
        Matrix<int> indices;
 
        Matrix<byte> mask;
        int k = 2;
        double uniquenessThreshold = 0.8;
        if (GpuInvoke.HasCuda)
        {
            GpuSURFDetector surfGPU = new GpuSURFDetector(surfCPU.SURFParams, 0.01f);
            using (GpuImage<Gray, Byte> gpuModelImage = new GpuImage<Gray, byte>(modelImage))
            //extract features from the object image
            using (GpuMat<float> gpuModelKeyPoints = surfGPU.DetectKeyPointsRaw(gpuModelImage, null))
            using (GpuMat<float> gpuModelDescriptors = surfGPU.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints))
            using (GpuBruteForceMatcher<float> matcher = new GpuBruteForceMatcher<float>(DistanceType.L2))
            {
              modelKeyPoints = new VectorOfKeyPoint();
              surfGPU.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints);
              watch = Stopwatch.StartNew();


      static void Run()
              // extract features from the observed image
      {
              using (GpuImage<Gray, Byte> gpuObservedImage = new GpuImage<Gray, byte>(observedImage))
        MCvSURFParams surfParam = new MCvSURFParams(500, false);
              using (GpuMat<float> gpuObservedKeyPoints = surfGPU.DetectKeyPointsRaw(gpuObservedImage, null))
              using (GpuMat<float> gpuObservedDescriptors = surfGPU.ComputeDescriptorsRaw(gpuObservedImage, null, gpuObservedKeyPoints))
              using (GpuMat<int> gpuMatchIndices = new GpuMat<int>(gpuObservedDescriptors.Size.Height, k, 1, true))
              using (GpuMat<float> gpuMatchDist = new GpuMat<float>(gpuObservedDescriptors.Size.Height, k, 1, true))
              using (GpuMat<Byte> gpuMask = new GpuMat<byte>(gpuMatchIndices.Size.Height, 1, 1))
              using (Stream stream = new Stream())
              {
                  matcher.KnnMatchSingle(gpuObservedDescriptors, gpuModelDescriptors, gpuMatchIndices, gpuMatchDist, k, null, stream);
                  indices = new Matrix<int>(gpuMatchIndices.Size);
                  mask = new Matrix<byte>(gpuMask.Size);


        Image<Gray, Byte> modelImage = new Image<Gray, byte>("box.png");
                  //gpu implementation of voteForUniquess
        //extract features from the object image
                  using (GpuMat<float> col0 = gpuMatchDist.Col(0))
        SURFFeature[] modelFeatures = modelImage.ExtractSURF(ref surfParam);
                  using (GpuMat<float> col1 = gpuMatchDist.Col(1))
                  {
                    GpuInvoke.Multiply(col1, new MCvScalar(uniquenessThreshold), col1, stream);
                    GpuInvoke.Compare(col0, col1, gpuMask, CMP_TYPE.CV_CMP_LE, stream);
                  }


        Image<Gray, Byte> observedImage = new Image<Gray, byte>("box_in_scene.png");
                  observedKeyPoints = new VectorOfKeyPoint();
        // extract features from the observed image
                  surfGPU.DownloadKeypoints(gpuObservedKeyPoints, observedKeyPoints);
        SURFFeature[] imageFeatures = observedImage.ExtractSURF(ref surfParam);


        //Create a SURF Tracker using k-d Tree
                  //wait for the stream to complete its tasks
        SURFTracker tracker = new SURFTracker(modelFeatures);
                  //We can perform some other CPU intesive stuffs here while we are waiting for the stream to complete.
        //Comment out above and uncomment below if you wish to use spill-tree instead
                  stream.WaitForCompletion();
        //SURFTracker tracker = new SURFTracker(modelFeatures, 50, .7, .1);


        SURFTracker.MatchedSURFFeature[] matchedFeatures = tracker.MatchFeature(imageFeatures, 2, 20);
                  gpuMask.Download(mask);
        matchedFeatures = SURFTracker.VoteForUniqueness(matchedFeatures, 0.8);
                  gpuMatchIndices.Download(indices);
        matchedFeatures = SURFTracker.VoteForSizeAndOrientation(matchedFeatures, 1.5, 20);
        HomographyMatrix homography = SURFTracker.GetHomographyMatrixFromMatchedFeatures(matchedFeatures);


        //Merge the object image and the observed image into one image for display
                  if (GpuInvoke.CountNonZero(gpuMask) >= 4)
        Image<Gray, Byte> res = modelImage.ConcateVertical(observedImage);
                  {
                    int nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                    if (nonZeroCount >= 4)
                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                  }


        #region draw lines between the matched features
                  watch.Stop();
         foreach (SURFTracker.MatchedSURFFeature matchedFeature in matchedFeatures)
              }
            }
         } else
         {
         {
             PointF p = matchedFeature.ObservedFeature.Point.pt;
             //extract features from the object image
             p.Y += modelImage.Height;
            modelKeyPoints = surfCPU.DetectKeyPointsRaw(modelImage, null);
             res.Draw(new LineSegment2DF(matchedFeature.ModelFeatures[0].Point.pt, p), new Gray(0), 1);
            Matrix<float> modelDescriptors = surfCPU.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints);
 
            watch = Stopwatch.StartNew();
 
             // extract features from the observed image
            observedKeyPoints = surfCPU.DetectKeyPointsRaw(observedImage, null);
            Matrix<float> observedDescriptors = surfCPU.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints);
            BruteForceMatcher<float> matcher = new BruteForceMatcher<float>(DistanceType.L2);
             matcher.Add(modelDescriptors);
 
            indices = new Matrix<int>(observedDescriptors.Rows, k);
            using (Matrix<float> dist = new Matrix<float>(observedDescriptors.Rows, k))
            {
              matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
              mask = new Matrix<byte>(dist.Rows, 1);
              mask.SetValue(255);
              Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
            }
 
            int nonZeroCount = CvInvoke.cvCountNonZero(mask);
            if (nonZeroCount >= 4)
            {
              nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
              if (nonZeroCount >= 4)
                  homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
            }
 
            watch.Stop();
         }
         }
        #endregion


         #region draw the project region on the image
        //Draw the matched keypoints
        Image<Bgr, Byte> result = Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
            indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT);
 
         #region draw the projected region on the image
         if (homography != null)
         if (homography != null)
         {  //draw a rectangle along the projected model
         {  //draw a rectangle along the projected model
Line 81: Line 153:
             homography.ProjectPoints(pts);
             homography.ProjectPoints(pts);


             for (int i = 0; i < pts.Length; i++)
             result.DrawPolyline(Array.ConvertAll<PointF, Point>(pts, Point.Round), true, new Bgr(Color.Red), 5);
              pts[i].Y += modelImage.Height;
 
            res.DrawPolyline(Array.ConvertAll<PointF, Point>(pts, Point.Round), true, new Gray(255.0), 5);
         }
         }
         #endregion
         #endregion


         ImageViewer.Show(res);
         matchTime = watch.ElapsedMilliseconds;
 
        return result;
       }
       }
   }
   }
}
}
</source>
== Result ==
== Result ==
[[image:SURFExample.png]]
[[image:SURFExample.png]]

Revision as of 03:03, 27 May 2012

This project is part of the Emgu.CV.Example solution

System Requirement

Component Requirement Detail
Emgu CV Version 2.4.0
Operation System Cross Platform

Source Code

<source lang="csharp"> using System; using System.Collections.Generic; using System.Diagnostics; using System.Drawing; using System.Runtime.InteropServices; using Emgu.CV; using Emgu.CV.CvEnum; using Emgu.CV.Features2D; using Emgu.CV.Structure; using Emgu.CV.Util; using Emgu.CV.GPU;

namespace SURFFeatureExample {

  public static class DrawMatches
  {
     /// <summary>
     /// Draw the model image and observed image, the matched features and homography projection.
     /// </summary>
     /// <param name="modelImageFileName">The model image</param>
     /// <param name="observedImageFileName">The observed image</param>
     /// <param name="matchTime">The output total time for computing the homography matrix.</param>
     /// <returns>The model image and observed image, the matched features and homography projection.</returns>
     public static Image<Bgr, Byte> Draw(String modelImageFileName, String observedImageFileName, out long matchTime)
     {
        Image<Gray, Byte> modelImage = new Image<Gray, byte>(modelImageFileName);
        Image<Gray, Byte> observedImage = new Image<Gray, byte>(observedImageFileName);
        Stopwatch watch;
        HomographyMatrix homography = null;
        SURFDetector surfCPU = new SURFDetector(500, false);
        VectorOfKeyPoint modelKeyPoints;
        VectorOfKeyPoint observedKeyPoints;
        Matrix<int> indices;
        Matrix<byte> mask;
        int k = 2;
        double uniquenessThreshold = 0.8;
        if (GpuInvoke.HasCuda)
        {
           GpuSURFDetector surfGPU = new GpuSURFDetector(surfCPU.SURFParams, 0.01f);
           using (GpuImage<Gray, Byte> gpuModelImage = new GpuImage<Gray, byte>(modelImage))
           //extract features from the object image
           using (GpuMat<float> gpuModelKeyPoints = surfGPU.DetectKeyPointsRaw(gpuModelImage, null))
           using (GpuMat<float> gpuModelDescriptors = surfGPU.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints))
           using (GpuBruteForceMatcher<float> matcher = new GpuBruteForceMatcher<float>(DistanceType.L2))
           {
              modelKeyPoints = new VectorOfKeyPoint();
              surfGPU.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints);
              watch = Stopwatch.StartNew();
              // extract features from the observed image
              using (GpuImage<Gray, Byte> gpuObservedImage = new GpuImage<Gray, byte>(observedImage))
              using (GpuMat<float> gpuObservedKeyPoints = surfGPU.DetectKeyPointsRaw(gpuObservedImage, null))
              using (GpuMat<float> gpuObservedDescriptors = surfGPU.ComputeDescriptorsRaw(gpuObservedImage, null, gpuObservedKeyPoints))
              using (GpuMat<int> gpuMatchIndices = new GpuMat<int>(gpuObservedDescriptors.Size.Height, k, 1, true))
              using (GpuMat<float> gpuMatchDist = new GpuMat<float>(gpuObservedDescriptors.Size.Height, k, 1, true))
              using (GpuMat<Byte> gpuMask = new GpuMat<byte>(gpuMatchIndices.Size.Height, 1, 1))
              using (Stream stream = new Stream())
              {
                 matcher.KnnMatchSingle(gpuObservedDescriptors, gpuModelDescriptors, gpuMatchIndices, gpuMatchDist, k, null, stream);
                 indices = new Matrix<int>(gpuMatchIndices.Size);
                 mask = new Matrix<byte>(gpuMask.Size);
                 //gpu implementation of voteForUniquess
                 using (GpuMat<float> col0 = gpuMatchDist.Col(0))
                 using (GpuMat<float> col1 = gpuMatchDist.Col(1))
                 {
                    GpuInvoke.Multiply(col1, new MCvScalar(uniquenessThreshold), col1, stream);
                    GpuInvoke.Compare(col0, col1, gpuMask, CMP_TYPE.CV_CMP_LE, stream);
                 }
                 observedKeyPoints = new VectorOfKeyPoint();
                 surfGPU.DownloadKeypoints(gpuObservedKeyPoints, observedKeyPoints);
                 //wait for the stream to complete its tasks
                 //We can perform some other CPU intesive stuffs here while we are waiting for the stream to complete.
                 stream.WaitForCompletion();
                 gpuMask.Download(mask);
                 gpuMatchIndices.Download(indices);
                 if (GpuInvoke.CountNonZero(gpuMask) >= 4)
                 {
                    int nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                    if (nonZeroCount >= 4)
                       homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                 }
                 watch.Stop();
              }
           }
        } else
        {
           //extract features from the object image
           modelKeyPoints = surfCPU.DetectKeyPointsRaw(modelImage, null);
           Matrix<float> modelDescriptors = surfCPU.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints);
           watch = Stopwatch.StartNew();
           // extract features from the observed image
           observedKeyPoints = surfCPU.DetectKeyPointsRaw(observedImage, null);
           Matrix<float> observedDescriptors = surfCPU.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints);
           BruteForceMatcher<float> matcher = new BruteForceMatcher<float>(DistanceType.L2);
           matcher.Add(modelDescriptors);
           indices = new Matrix<int>(observedDescriptors.Rows, k);
           using (Matrix<float> dist = new Matrix<float>(observedDescriptors.Rows, k))
           {
              matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
              mask = new Matrix<byte>(dist.Rows, 1);
              mask.SetValue(255);
              Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
           }
           int nonZeroCount = CvInvoke.cvCountNonZero(mask);
           if (nonZeroCount >= 4)
           {
              nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
              if (nonZeroCount >= 4)
                 homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
           }
           watch.Stop();
        }
        //Draw the matched keypoints
        Image<Bgr, Byte> result = Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
           indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT);
        #region draw the projected region on the image
        if (homography != null)
        {  //draw a rectangle along the projected model
           Rectangle rect = modelImage.ROI;
           PointF[] pts = new PointF[] { 
              new PointF(rect.Left, rect.Bottom),
              new PointF(rect.Right, rect.Bottom),
              new PointF(rect.Right, rect.Top),
              new PointF(rect.Left, rect.Top)};
           homography.ProjectPoints(pts);
           result.DrawPolyline(Array.ConvertAll<PointF, Point>(pts, Point.Round), true, new Bgr(Color.Red), 5);
        }
        #endregion
        matchTime = watch.ElapsedMilliseconds;
        return result;
     }
  }

}

Result