SURF feature detector in CSharp: Difference between revisions
Jump to navigation
Jump to search
No edit summary |
|||
(12 intermediate revisions by the same user not shown) | |||
Line 5: | Line 5: | ||
!Component || Requirement || Detail | !Component || Requirement || Detail | ||
|- | |- | ||
|Emgu CV || [[Version_History#Emgu.CV-2.4.0|Version 2.4.0]] || | |Emgu CV || [[Version_History#Emgu.CV-2.4.0|Version 2.4.0]] + || | ||
|- | |- | ||
|Operation System || Cross Platform || | |Operation System || Cross Platform || | ||
Line 11: | Line 11: | ||
== Source Code == | == Source Code == | ||
=== Emgu CV 3.x === | |||
<div class="toccolours mw-collapsible mw-collapsed"> | |||
Click to view source code | |||
<div class="mw-collapsible-content"> | |||
<source lang="csharp"> | |||
using System; | |||
using System.Collections.Generic; | |||
using System.Diagnostics; | |||
using System.Drawing; | |||
using System.Runtime.InteropServices; | |||
using Emgu.CV; | |||
using Emgu.CV.CvEnum; | |||
using Emgu.CV.Features2D; | |||
using Emgu.CV.Structure; | |||
using Emgu.CV.Util; | |||
#if !__IOS__ | |||
using Emgu.CV.Cuda; | |||
#endif | |||
using Emgu.CV.XFeatures2D; | |||
namespace SURFFeatureExample | |||
{ | |||
public static class DrawMatches | |||
{ | |||
public static void FindMatch(Mat modelImage, Mat observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography) | |||
{ | |||
int k = 2; | |||
double uniquenessThreshold = 0.8; | |||
double hessianThresh = 300; | |||
Stopwatch watch; | |||
homography = null; | |||
modelKeyPoints = new VectorOfKeyPoint(); | |||
observedKeyPoints = new VectorOfKeyPoint(); | |||
#if !__IOS__ | |||
if ( CudaInvoke.HasCuda) | |||
{ | |||
CudaSURF surfCuda = new CudaSURF((float) hessianThresh); | |||
using (GpuMat gpuModelImage = new GpuMat(modelImage)) | |||
//extract features from the object image | |||
using (GpuMat gpuModelKeyPoints = surfCuda.DetectKeyPointsRaw(gpuModelImage, null)) | |||
using (GpuMat gpuModelDescriptors = surfCuda.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints)) | |||
using (CudaBFMatcher matcher = new CudaBFMatcher(DistanceType.L2)) | |||
{ | |||
surfCuda.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints); | |||
watch = Stopwatch.StartNew(); | |||
// extract features from the observed image | |||
using (GpuMat gpuObservedImage = new GpuMat(observedImage)) | |||
using (GpuMat gpuObservedKeyPoints = surfCuda.DetectKeyPointsRaw(gpuObservedImage, null)) | |||
using (GpuMat gpuObservedDescriptors = surfCuda.ComputeDescriptorsRaw(gpuObservedImage, null, gpuObservedKeyPoints)) | |||
//using (GpuMat tmp = new GpuMat()) | |||
//using (Stream stream = new Stream()) | |||
{ | |||
matcher.KnnMatch(gpuObservedDescriptors, gpuModelDescriptors, matches, k); | |||
surfCuda.DownloadKeypoints(gpuObservedKeyPoints, observedKeyPoints); | |||
mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1); | |||
mask.SetTo(new MCvScalar(255)); | |||
Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask); | |||
int nonZeroCount = CvInvoke.CountNonZero(mask); | |||
if (nonZeroCount >= 4) | |||
{ | |||
nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, | |||
matches, mask, 1.5, 20); | |||
if (nonZeroCount >= 4) | |||
homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, | |||
observedKeyPoints, matches, mask, 2); | |||
} | |||
} | |||
watch.Stop(); | |||
} | |||
} | |||
else | |||
#endif | |||
{ | |||
using (UMat uModelImage = modelImage.ToUMat(AccessType.Read)) | |||
using (UMat uObservedImage = observedImage.ToUMat(AccessType.Read)) | |||
{ | |||
SURF surfCPU = new SURF(hessianThresh); | |||
//extract features from the object image | |||
UMat modelDescriptors = new UMat(); | |||
surfCPU.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false); | |||
watch = Stopwatch.StartNew(); | |||
// extract features from the observed image | |||
UMat observedDescriptors = new UMat(); | |||
surfCPU.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false); | |||
BFMatcher matcher = new BFMatcher(DistanceType.L2); | |||
matcher.Add(modelDescriptors); | |||
matcher.KnnMatch(observedDescriptors, matches, k, null); | |||
mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1); | |||
mask.SetTo(new MCvScalar(255)); | |||
Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask); | |||
int nonZeroCount = CvInvoke.CountNonZero(mask); | |||
if (nonZeroCount >= 4) | |||
{ | |||
nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, | |||
matches, mask, 1.5, 20); | |||
if (nonZeroCount >= 4) | |||
homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, | |||
observedKeyPoints, matches, mask, 2); | |||
} | |||
watch.Stop(); | |||
} | |||
} | |||
matchTime = watch.ElapsedMilliseconds; | |||
} | |||
/// <summary> | |||
/// Draw the model image and observed image, the matched features and homography projection. | |||
/// </summary> | |||
/// <param name="modelImage">The model image</param> | |||
/// <param name="observedImage">The observed image</param> | |||
/// <param name="matchTime">The output total time for computing the homography matrix.</param> | |||
/// <returns>The model image and observed image, the matched features and homography projection.</returns> | |||
public static Mat Draw(Mat modelImage, Mat observedImage, out long matchTime) | |||
{ | |||
Mat homography; | |||
VectorOfKeyPoint modelKeyPoints; | |||
VectorOfKeyPoint observedKeyPoints; | |||
using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch()) | |||
{ | |||
Mat mask; | |||
FindMatch(modelImage, observedImage, out matchTime, out modelKeyPoints, out observedKeyPoints, matches, | |||
out mask, out homography); | |||
//Draw the matched keypoints | |||
Mat result = new Mat(); | |||
Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints, | |||
matches, result, new MCvScalar(255, 255, 255), new MCvScalar(255, 255, 255), mask); | |||
#region draw the projected region on the image | |||
if (homography != null) | |||
{ | |||
//draw a rectangle along the projected model | |||
Rectangle rect = new Rectangle(Point.Empty, modelImage.Size); | |||
PointF[] pts = new PointF[] | |||
{ | |||
new PointF(rect.Left, rect.Bottom), | |||
new PointF(rect.Right, rect.Bottom), | |||
new PointF(rect.Right, rect.Top), | |||
new PointF(rect.Left, rect.Top) | |||
}; | |||
pts = CvInvoke.PerspectiveTransform(pts, homography); | |||
Point[] points = Array.ConvertAll<PointF, Point>(pts, Point.Round); | |||
using (VectorOfPoint vp = new VectorOfPoint(points)) | |||
{ | |||
CvInvoke.Polylines(result, vp, true, new MCvScalar(255, 0, 0, 255), 5); | |||
} | |||
} | |||
#endregion | |||
return result; | |||
} | |||
} | |||
} | |||
} | |||
</source> | |||
</div> | |||
</div> | |||
=== Emgu CV 2.x === | |||
<div class="toccolours mw-collapsible mw-collapsed"> | |||
Click to view source code | |||
<div class="mw-collapsible-content"> | |||
<source lang="csharp"> | <source lang="csharp"> | ||
using System; | using System; | ||
Line 31: | Line 211: | ||
/// Draw the model image and observed image, the matched features and homography projection. | /// Draw the model image and observed image, the matched features and homography projection. | ||
/// </summary> | /// </summary> | ||
/// <param name=" | /// <param name="modelImage">The model image</param> | ||
/// <param name=" | /// <param name="observedImage">The observed image</param> | ||
/// <param name="matchTime">The output total time for computing the homography matrix.</param> | /// <param name="matchTime">The output total time for computing the homography matrix.</param> | ||
/// <returns>The model image and observed image, the matched features and homography projection.</returns> | /// <returns>The model image and observed image, the matched features and homography projection.</returns> | ||
public static Image<Bgr, Byte> Draw( | public static Image<Bgr, Byte> Draw(Image<Gray, Byte> modelImage, Image<Gray, byte> observedImage, out long matchTime) | ||
{ | { | ||
Stopwatch watch; | Stopwatch watch; | ||
HomographyMatrix homography = null; | HomographyMatrix homography = null; | ||
Line 164: | Line 342: | ||
} | } | ||
</source> | </source> | ||
</div> | |||
</div> | |||
== Performance Comparison == | == Performance Comparison == | ||
{| style="text-align:center" border="1px" cellpadding="10" cellspacing="0" | {| style="text-align:center" border="1px" cellpadding="10" cellspacing="0" | ||
!CPU|| GPU || Emgu CV Package || Execution Time (millisecond) | !CPU|| GPU || Emgu CV Package || Execution Time (millisecond) | ||
|- | |- | ||
| '''Core i7-2630QM@2.0Ghz''' || NVidia GeForce GTX560M || libemgucv-windows-x64-2.4.0.1714 || | | <del>Core i7-2630QM@2.0Ghz</del> || '''NVidia GeForce GTX560M''' || libemgucv-windows-x64-2.4.0.1714 || 87 | ||
|- | |||
| '''Core i7-2630QM@2.0Ghz''' || <del>NVidia GeForce GTX560M</del> || libemgucv-windows-x64-2.4.0.1714 || 192 | |||
|- | |||
| LG G Flex 2 (Android) || || libemgucv-android-3.1.0.2298 || 432 | |||
|} | |} | ||
== Result == | == Result == | ||
*Windows | |||
[[image:SURFExample.png]] | [[image:SURFExample.png]] | ||
*Android (Nexus S) | |||
[[File:MonoAndroidSURFFeatureResultNexusS.jpg | 500px]] |
Latest revision as of 15:10, 20 February 2016
This project is part of the Emgu.CV.Example solution
System Requirement
Component | Requirement | Detail |
---|---|---|
Emgu CV | Version 2.4.0 + | |
Operation System | Cross Platform |
Source Code
Emgu CV 3.x
Click to view source code
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Drawing;
using System.Runtime.InteropServices;
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Features2D;
using Emgu.CV.Structure;
using Emgu.CV.Util;
#if !__IOS__
using Emgu.CV.Cuda;
#endif
using Emgu.CV.XFeatures2D;
namespace SURFFeatureExample
{
public static class DrawMatches
{
public static void FindMatch(Mat modelImage, Mat observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography)
{
int k = 2;
double uniquenessThreshold = 0.8;
double hessianThresh = 300;
Stopwatch watch;
homography = null;
modelKeyPoints = new VectorOfKeyPoint();
observedKeyPoints = new VectorOfKeyPoint();
#if !__IOS__
if ( CudaInvoke.HasCuda)
{
CudaSURF surfCuda = new CudaSURF((float) hessianThresh);
using (GpuMat gpuModelImage = new GpuMat(modelImage))
//extract features from the object image
using (GpuMat gpuModelKeyPoints = surfCuda.DetectKeyPointsRaw(gpuModelImage, null))
using (GpuMat gpuModelDescriptors = surfCuda.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints))
using (CudaBFMatcher matcher = new CudaBFMatcher(DistanceType.L2))
{
surfCuda.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints);
watch = Stopwatch.StartNew();
// extract features from the observed image
using (GpuMat gpuObservedImage = new GpuMat(observedImage))
using (GpuMat gpuObservedKeyPoints = surfCuda.DetectKeyPointsRaw(gpuObservedImage, null))
using (GpuMat gpuObservedDescriptors = surfCuda.ComputeDescriptorsRaw(gpuObservedImage, null, gpuObservedKeyPoints))
//using (GpuMat tmp = new GpuMat())
//using (Stream stream = new Stream())
{
matcher.KnnMatch(gpuObservedDescriptors, gpuModelDescriptors, matches, k);
surfCuda.DownloadKeypoints(gpuObservedKeyPoints, observedKeyPoints);
mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
mask.SetTo(new MCvScalar(255));
Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);
int nonZeroCount = CvInvoke.CountNonZero(mask);
if (nonZeroCount >= 4)
{
nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
matches, mask, 1.5, 20);
if (nonZeroCount >= 4)
homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
observedKeyPoints, matches, mask, 2);
}
}
watch.Stop();
}
}
else
#endif
{
using (UMat uModelImage = modelImage.ToUMat(AccessType.Read))
using (UMat uObservedImage = observedImage.ToUMat(AccessType.Read))
{
SURF surfCPU = new SURF(hessianThresh);
//extract features from the object image
UMat modelDescriptors = new UMat();
surfCPU.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);
watch = Stopwatch.StartNew();
// extract features from the observed image
UMat observedDescriptors = new UMat();
surfCPU.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);
BFMatcher matcher = new BFMatcher(DistanceType.L2);
matcher.Add(modelDescriptors);
matcher.KnnMatch(observedDescriptors, matches, k, null);
mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
mask.SetTo(new MCvScalar(255));
Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);
int nonZeroCount = CvInvoke.CountNonZero(mask);
if (nonZeroCount >= 4)
{
nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
matches, mask, 1.5, 20);
if (nonZeroCount >= 4)
homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
observedKeyPoints, matches, mask, 2);
}
watch.Stop();
}
}
matchTime = watch.ElapsedMilliseconds;
}
/// <summary>
/// Draw the model image and observed image, the matched features and homography projection.
/// </summary>
/// <param name="modelImage">The model image</param>
/// <param name="observedImage">The observed image</param>
/// <param name="matchTime">The output total time for computing the homography matrix.</param>
/// <returns>The model image and observed image, the matched features and homography projection.</returns>
public static Mat Draw(Mat modelImage, Mat observedImage, out long matchTime)
{
Mat homography;
VectorOfKeyPoint modelKeyPoints;
VectorOfKeyPoint observedKeyPoints;
using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch())
{
Mat mask;
FindMatch(modelImage, observedImage, out matchTime, out modelKeyPoints, out observedKeyPoints, matches,
out mask, out homography);
//Draw the matched keypoints
Mat result = new Mat();
Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
matches, result, new MCvScalar(255, 255, 255), new MCvScalar(255, 255, 255), mask);
#region draw the projected region on the image
if (homography != null)
{
//draw a rectangle along the projected model
Rectangle rect = new Rectangle(Point.Empty, modelImage.Size);
PointF[] pts = new PointF[]
{
new PointF(rect.Left, rect.Bottom),
new PointF(rect.Right, rect.Bottom),
new PointF(rect.Right, rect.Top),
new PointF(rect.Left, rect.Top)
};
pts = CvInvoke.PerspectiveTransform(pts, homography);
Point[] points = Array.ConvertAll<PointF, Point>(pts, Point.Round);
using (VectorOfPoint vp = new VectorOfPoint(points))
{
CvInvoke.Polylines(result, vp, true, new MCvScalar(255, 0, 0, 255), 5);
}
}
#endregion
return result;
}
}
}
}
Emgu CV 2.x
Click to view source code
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Drawing;
using System.Runtime.InteropServices;
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Features2D;
using Emgu.CV.Structure;
using Emgu.CV.Util;
using Emgu.CV.GPU;
namespace SURFFeatureExample
{
public static class DrawMatches
{
/// <summary>
/// Draw the model image and observed image, the matched features and homography projection.
/// </summary>
/// <param name="modelImage">The model image</param>
/// <param name="observedImage">The observed image</param>
/// <param name="matchTime">The output total time for computing the homography matrix.</param>
/// <returns>The model image and observed image, the matched features and homography projection.</returns>
public static Image<Bgr, Byte> Draw(Image<Gray, Byte> modelImage, Image<Gray, byte> observedImage, out long matchTime)
{
Stopwatch watch;
HomographyMatrix homography = null;
SURFDetector surfCPU = new SURFDetector(500, false);
VectorOfKeyPoint modelKeyPoints;
VectorOfKeyPoint observedKeyPoints;
Matrix<int> indices;
Matrix<byte> mask;
int k = 2;
double uniquenessThreshold = 0.8;
if (GpuInvoke.HasCuda)
{
GpuSURFDetector surfGPU = new GpuSURFDetector(surfCPU.SURFParams, 0.01f);
using (GpuImage<Gray, Byte> gpuModelImage = new GpuImage<Gray, byte>(modelImage))
//extract features from the object image
using (GpuMat<float> gpuModelKeyPoints = surfGPU.DetectKeyPointsRaw(gpuModelImage, null))
using (GpuMat<float> gpuModelDescriptors = surfGPU.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints))
using (GpuBruteForceMatcher<float> matcher = new GpuBruteForceMatcher<float>(DistanceType.L2))
{
modelKeyPoints = new VectorOfKeyPoint();
surfGPU.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints);
watch = Stopwatch.StartNew();
// extract features from the observed image
using (GpuImage<Gray, Byte> gpuObservedImage = new GpuImage<Gray, byte>(observedImage))
using (GpuMat<float> gpuObservedKeyPoints = surfGPU.DetectKeyPointsRaw(gpuObservedImage, null))
using (GpuMat<float> gpuObservedDescriptors = surfGPU.ComputeDescriptorsRaw(gpuObservedImage, null, gpuObservedKeyPoints))
using (GpuMat<int> gpuMatchIndices = new GpuMat<int>(gpuObservedDescriptors.Size.Height, k, 1, true))
using (GpuMat<float> gpuMatchDist = new GpuMat<float>(gpuObservedDescriptors.Size.Height, k, 1, true))
using (GpuMat<Byte> gpuMask = new GpuMat<byte>(gpuMatchIndices.Size.Height, 1, 1))
using (Stream stream = new Stream())
{
matcher.KnnMatchSingle(gpuObservedDescriptors, gpuModelDescriptors, gpuMatchIndices, gpuMatchDist, k, null, stream);
indices = new Matrix<int>(gpuMatchIndices.Size);
mask = new Matrix<byte>(gpuMask.Size);
//gpu implementation of voteForUniquess
using (GpuMat<float> col0 = gpuMatchDist.Col(0))
using (GpuMat<float> col1 = gpuMatchDist.Col(1))
{
GpuInvoke.Multiply(col1, new MCvScalar(uniquenessThreshold), col1, stream);
GpuInvoke.Compare(col0, col1, gpuMask, CMP_TYPE.CV_CMP_LE, stream);
}
observedKeyPoints = new VectorOfKeyPoint();
surfGPU.DownloadKeypoints(gpuObservedKeyPoints, observedKeyPoints);
//wait for the stream to complete its tasks
//We can perform some other CPU intesive stuffs here while we are waiting for the stream to complete.
stream.WaitForCompletion();
gpuMask.Download(mask);
gpuMatchIndices.Download(indices);
if (GpuInvoke.CountNonZero(gpuMask) >= 4)
{
int nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
if (nonZeroCount >= 4)
homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
}
watch.Stop();
}
}
} else
{
//extract features from the object image
modelKeyPoints = surfCPU.DetectKeyPointsRaw(modelImage, null);
Matrix<float> modelDescriptors = surfCPU.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints);
watch = Stopwatch.StartNew();
// extract features from the observed image
observedKeyPoints = surfCPU.DetectKeyPointsRaw(observedImage, null);
Matrix<float> observedDescriptors = surfCPU.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints);
BruteForceMatcher<float> matcher = new BruteForceMatcher<float>(DistanceType.L2);
matcher.Add(modelDescriptors);
indices = new Matrix<int>(observedDescriptors.Rows, k);
using (Matrix<float> dist = new Matrix<float>(observedDescriptors.Rows, k))
{
matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
mask = new Matrix<byte>(dist.Rows, 1);
mask.SetValue(255);
Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
}
int nonZeroCount = CvInvoke.cvCountNonZero(mask);
if (nonZeroCount >= 4)
{
nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
if (nonZeroCount >= 4)
homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
}
watch.Stop();
}
//Draw the matched keypoints
Image<Bgr, Byte> result = Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT);
#region draw the projected region on the image
if (homography != null)
{ //draw a rectangle along the projected model
Rectangle rect = modelImage.ROI;
PointF[] pts = new PointF[] {
new PointF(rect.Left, rect.Bottom),
new PointF(rect.Right, rect.Bottom),
new PointF(rect.Right, rect.Top),
new PointF(rect.Left, rect.Top)};
homography.ProjectPoints(pts);
result.DrawPolyline(Array.ConvertAll<PointF, Point>(pts, Point.Round), true, new Bgr(Color.Red), 5);
}
#endregion
matchTime = watch.ElapsedMilliseconds;
return result;
}
}
}
Performance Comparison
CPU | GPU | Emgu CV Package | Execution Time (millisecond) |
---|---|---|---|
NVidia GeForce GTX560M | libemgucv-windows-x64-2.4.0.1714 | 87 | |
Core i7-2630QM@2.0Ghz | libemgucv-windows-x64-2.4.0.1714 | 192 | |
LG G Flex 2 (Android) | libemgucv-android-3.1.0.2298 | 432 |
Result
- Windows
- Android (Nexus S)