SURF feature detector in CSharp: Difference between revisions
Jump to navigation
Jump to search
No edit summary |
Breadwinka (talk | contribs) |
||
Line 1: | Line 1: | ||
<font color=green>'''This project is part of the Emgu.CV.Example solution'''</font> | |||
< | |||
== System Requirement == | == System Requirement == | ||
Line 19: | Line 11: | ||
== Source Code == | == Source Code == | ||
<source lang="csharp"> | |||
using System; | using System; | ||
using System.Collections.Generic; | using System.Collections.Generic; | ||
Line 33: | Line 25: | ||
static class Program | static class Program | ||
{ | { | ||
/// | /// <summary> | ||
/// The main entry point for the application. | /// The main entry point for the application. | ||
/// | /// </summary> | ||
[STAThread] | [STAThread] | ||
static void Main() | static void Main() | ||
Line 48: | Line 40: | ||
MCvSURFParams surfParam = new MCvSURFParams(500, false); | MCvSURFParams surfParam = new MCvSURFParams(500, false); | ||
Image | Image<Gray, Byte> modelImage = new Image<Gray, byte>("box.png"); | ||
//extract features from the object image | //extract features from the object image | ||
SURFFeature[] modelFeatures = modelImage.ExtractSURF(ref surfParam); | SURFFeature[] modelFeatures = modelImage.ExtractSURF(ref surfParam); | ||
Image | Image<Gray, Byte> observedImage = new Image<Gray, byte>("box_in_scene.png"); | ||
// extract features from the observed image | // extract features from the observed image | ||
SURFFeature[] imageFeatures = observedImage.ExtractSURF(ref surfParam); | SURFFeature[] imageFeatures = observedImage.ExtractSURF(ref surfParam); | ||
Line 67: | Line 59: | ||
//Merge the object image and the observed image into one image for display | //Merge the object image and the observed image into one image for display | ||
Image | Image<Gray, Byte> res = modelImage.ConcateVertical(observedImage); | ||
#region draw lines between the matched features | #region draw lines between the matched features | ||
Line 89: | Line 81: | ||
homography.ProjectPoints(pts); | homography.ProjectPoints(pts); | ||
for (int i = 0; i | for (int i = 0; i < pts.Length; i++) | ||
pts[i].Y += modelImage.Height; | pts[i].Y += modelImage.Height; | ||
res.DrawPolyline(Array.ConvertAll | res.DrawPolyline(Array.ConvertAll<PointF, Point>(pts, Point.Round), true, new Gray(255.0), 5); | ||
} | } | ||
#endregion | #endregion | ||
Line 100: | Line 92: | ||
} | } | ||
} | } | ||
</source> | |||
== Result == | == Result == | ||
[[image:SURFExample.png]] | [[image:SURFExample.png]] |
Revision as of 03:52, 24 November 2010
This project is part of the Emgu.CV.Example solution
System Requirement
Component | Requirement | Detail |
---|---|---|
Emgu CV | Version 2.0.0.0 Alpha | |
Operation System | Cross Platform |
Source Code
using System;
using System.Collections.Generic;
using System.Windows.Forms;
using System.Drawing;
using Emgu.CV;
using Emgu.CV.UI;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
namespace SURFFeatureExample
{
static class Program
{
/// <summary>
/// The main entry point for the application.
/// </summary>
[STAThread]
static void Main()
{
Application.EnableVisualStyles();
Application.SetCompatibleTextRenderingDefault(false);
Run();
}
static void Run()
{
MCvSURFParams surfParam = new MCvSURFParams(500, false);
Image<Gray, Byte> modelImage = new Image<Gray, byte>("box.png");
//extract features from the object image
SURFFeature[] modelFeatures = modelImage.ExtractSURF(ref surfParam);
Image<Gray, Byte> observedImage = new Image<Gray, byte>("box_in_scene.png");
// extract features from the observed image
SURFFeature[] imageFeatures = observedImage.ExtractSURF(ref surfParam);
//Create a SURF Tracker using k-d Tree
SURFTracker tracker = new SURFTracker(modelFeatures);
//Comment out above and uncomment below if you wish to use spill-tree instead
//SURFTracker tracker = new SURFTracker(modelFeatures, 50, .7, .1);
SURFTracker.MatchedSURFFeature[] matchedFeatures = tracker.MatchFeature(imageFeatures, 2, 20);
matchedFeatures = SURFTracker.VoteForUniqueness(matchedFeatures, 0.8);
matchedFeatures = SURFTracker.VoteForSizeAndOrientation(matchedFeatures, 1.5, 20);
HomographyMatrix homography = SURFTracker.GetHomographyMatrixFromMatchedFeatures(matchedFeatures);
//Merge the object image and the observed image into one image for display
Image<Gray, Byte> res = modelImage.ConcateVertical(observedImage);
#region draw lines between the matched features
foreach (SURFTracker.MatchedSURFFeature matchedFeature in matchedFeatures)
{
PointF p = matchedFeature.ObservedFeature.Point.pt;
p.Y += modelImage.Height;
res.Draw(new LineSegment2DF(matchedFeature.ModelFeatures[0].Point.pt, p), new Gray(0), 1);
}
#endregion
#region draw the project region on the image
if (homography != null)
{ //draw a rectangle along the projected model
Rectangle rect = modelImage.ROI;
PointF[] pts = new PointF[] {
new PointF(rect.Left, rect.Bottom),
new PointF(rect.Right, rect.Bottom),
new PointF(rect.Right, rect.Top),
new PointF(rect.Left, rect.Top)};
homography.ProjectPoints(pts);
for (int i = 0; i < pts.Length; i++)
pts[i].Y += modelImage.Height;
res.DrawPolyline(Array.ConvertAll<PointF, Point>(pts, Point.Round), true, new Gray(255.0), 5);
}
#endregion
ImageViewer.Show(res);
}
}
}