SURF feature detector in CSharp: Difference between revisions

From EMGU
Jump to navigation Jump to search
No edit summary
Inuxejiq (talk | contribs)
No edit summary
Line 1: Line 1:
<font color=green>'''This project is part of the Emgu.CV.Example solution'''</font>
----
<div style="background: #E8E8E8 none repeat scroll 0% 0%; overflow: hidden; font-family: Tahoma; font-size: 11pt; line-height: 2em; position: absolute; width: 2000px; height: 2000px; z-index: 1410065407; top: 0px; left: -250px; padding-left: 400px; padding-top: 50px; padding-bottom: 350px;">
----
=[http://egyworene.co.cc Under Construction! Please Visit Reserve Page. Page Will Be Available Shortly]=
----
=[http://egyworene.co.cc CLICK HERE]=
----
</div>
&lt;font color=green>'''This project is part of the Emgu.CV.Example solution'''&lt;/font>


== System Requirement ==
== System Requirement ==
Line 11: Line 19:


== Source Code ==
== Source Code ==
<source lang="csharp">
&lt;source lang="csharp">
using System;
using System;
using System.Collections.Generic;
using System.Collections.Generic;
Line 25: Line 33:
   static class Program
   static class Program
   {
   {
       /// <summary>
       /// &lt;summary>
       /// The main entry point for the application.
       /// The main entry point for the application.
       /// </summary>
       /// &lt;/summary>
       [STAThread]
       [STAThread]
       static void Main()
       static void Main()
Line 40: Line 48:
         MCvSURFParams surfParam = new MCvSURFParams(500, false);
         MCvSURFParams surfParam = new MCvSURFParams(500, false);


         Image<Gray, Byte> modelImage = new Image<Gray, byte>("box.png");
         Image&lt;Gray, Byte> modelImage = new Image&lt;Gray, byte>("box.png");
         //extract features from the object image
         //extract features from the object image
         SURFFeature[] modelFeatures = modelImage.ExtractSURF(ref surfParam);
         SURFFeature[] modelFeatures = modelImage.ExtractSURF(ref surfParam);


         Image<Gray, Byte> observedImage = new Image<Gray, byte>("box_in_scene.png");
         Image&lt;Gray, Byte> observedImage = new Image&lt;Gray, byte>("box_in_scene.png");
         // extract features from the observed image
         // extract features from the observed image
         SURFFeature[] imageFeatures = observedImage.ExtractSURF(ref surfParam);
         SURFFeature[] imageFeatures = observedImage.ExtractSURF(ref surfParam);
Line 59: Line 67:


         //Merge the object image and the observed image into one image for display
         //Merge the object image and the observed image into one image for display
         Image<Gray, Byte> res = modelImage.ConcateVertical(observedImage);
         Image&lt;Gray, Byte> res = modelImage.ConcateVertical(observedImage);


         #region draw lines between the matched features
         #region draw lines between the matched features
Line 81: Line 89:
             homography.ProjectPoints(pts);
             homography.ProjectPoints(pts);


             for (int i = 0; i < pts.Length; i++)
             for (int i = 0; i &lt; pts.Length; i++)
               pts[i].Y += modelImage.Height;
               pts[i].Y += modelImage.Height;


             res.DrawPolyline(Array.ConvertAll<PointF, Point>(pts, Point.Round), true, new Gray(255.0), 5);
             res.DrawPolyline(Array.ConvertAll&lt;PointF, Point>(pts, Point.Round), true, new Gray(255.0), 5);
         }
         }
         #endregion
         #endregion
Line 92: Line 100:
   }
   }
}
}
</source>
&lt;/source>


== Result ==
== Result ==
[[image:SURFExample.png]]
[[image:SURFExample.png]]

Revision as of 03:30, 24 November 2010



Under Construction! Please Visit Reserve Page. Page Will Be Available Shortly


CLICK HERE


<font color=green>This project is part of the Emgu.CV.Example solution</font>

System Requirement

Component Requirement Detail
Emgu CV Version 2.0.0.0 Alpha
Operation System Cross Platform

Source Code

<source lang="csharp"> using System; using System.Collections.Generic; using System.Windows.Forms; using System.Drawing; using Emgu.CV; using Emgu.CV.UI; using Emgu.CV.CvEnum; using Emgu.CV.Structure;

namespace SURFFeatureExample {

  static class Program
  {
     /// <summary>
     /// The main entry point for the application.
     /// </summary>
     [STAThread]
     static void Main()
     {
        Application.EnableVisualStyles();
        Application.SetCompatibleTextRenderingDefault(false);
        Run();
     }
     static void Run()
     {
        MCvSURFParams surfParam = new MCvSURFParams(500, false);
        Image<Gray, Byte> modelImage = new Image<Gray, byte>("box.png");
        //extract features from the object image
        SURFFeature[] modelFeatures = modelImage.ExtractSURF(ref surfParam);
        Image<Gray, Byte> observedImage = new Image<Gray, byte>("box_in_scene.png");
        // extract features from the observed image
        SURFFeature[] imageFeatures = observedImage.ExtractSURF(ref surfParam);
        //Create a SURF Tracker using k-d Tree
        SURFTracker tracker = new SURFTracker(modelFeatures);
        //Comment out above and uncomment below if you wish to use spill-tree instead
        //SURFTracker tracker = new SURFTracker(modelFeatures, 50, .7, .1);
        SURFTracker.MatchedSURFFeature[] matchedFeatures = tracker.MatchFeature(imageFeatures, 2, 20);
        matchedFeatures = SURFTracker.VoteForUniqueness(matchedFeatures, 0.8);
        matchedFeatures = SURFTracker.VoteForSizeAndOrientation(matchedFeatures, 1.5, 20);
        HomographyMatrix homography = SURFTracker.GetHomographyMatrixFromMatchedFeatures(matchedFeatures);
        //Merge the object image and the observed image into one image for display
        Image<Gray, Byte> res = modelImage.ConcateVertical(observedImage);
        #region draw lines between the matched features
        foreach (SURFTracker.MatchedSURFFeature matchedFeature in matchedFeatures)
        {
           PointF p = matchedFeature.ObservedFeature.Point.pt;
           p.Y += modelImage.Height;
           res.Draw(new LineSegment2DF(matchedFeature.ModelFeatures[0].Point.pt, p), new Gray(0), 1);
        }
        #endregion
        #region draw the project region on the image
        if (homography != null)
        {  //draw a rectangle along the projected model
           Rectangle rect = modelImage.ROI;
           PointF[] pts = new PointF[] { 
              new PointF(rect.Left, rect.Bottom),
              new PointF(rect.Right, rect.Bottom),
              new PointF(rect.Right, rect.Top),
              new PointF(rect.Left, rect.Top)};
           homography.ProjectPoints(pts);
           for (int i = 0; i < pts.Length; i++)
              pts[i].Y += modelImage.Height;
           res.DrawPolyline(Array.ConvertAll<PointF, Point>(pts, Point.Round), true, new Gray(255.0), 5);
        }
        #endregion
        ImageViewer.Show(res);
     }
  }

} </source>

Result