K Nearest Neighbors in CSharp: Difference between revisions
Jump to navigation
Jump to search
No edit summary |
No edit summary |
||
(One intermediate revision by one other user not shown) | |||
Line 1: | Line 1: | ||
'''This example requires [[Version_History#Emgu.CV-1.5.0.0|Emgu CV 1.5.0.0]] or later release''' | |||
'''This example requires [[Version_History#Emgu.CV-1.5.0.0|Emgu CV 1.5.0.0]]''' | |||
== What is a K Nearest Neighbors Classifier == | == What is a K Nearest Neighbors Classifier == | ||
According to [http://en.wikipedia.org/wiki/K-nearest_neighbor_algorithm wikipedia], | According to [http://en.wikipedia.org/wiki/K-nearest_neighbor_algorithm wikipedia], | ||
Line 14: | Line 6: | ||
== Source Code == | == Source Code == | ||
=== Emgu CV 3.x === | |||
<div class="toccolours mw-collapsible mw-collapsed"> | |||
Click to view source code | |||
<div class="mw-collapsible-content"> | |||
<source lang="csharp"> | |||
using System.Drawing; | |||
using Emgu.CV.Structure; | |||
using Emgu.CV.ML; | |||
using Emgu.CV.ML.Structure; | |||
... | |||
int K = 10; | |||
int trainSampleCount = 100; | |||
#region Generate the training data and classes | |||
Matrix<float> trainData = new Matrix<float>(trainSampleCount, 2); | |||
Matrix<float> trainClasses = new Matrix<float>(trainSampleCount, 1); | |||
Image<Bgr, Byte> img = new Image<Bgr, byte>(500, 500); | |||
Matrix<float> sample = new Matrix<float>(1, 2); | |||
Matrix<float> trainData1 = trainData.GetRows(0, trainSampleCount >> 1, 1); | |||
trainData1.SetRandNormal(new MCvScalar(200), new MCvScalar(50)); | |||
Matrix<float> trainData2 = trainData.GetRows(trainSampleCount >> 1, trainSampleCount, 1); | |||
trainData2.SetRandNormal(new MCvScalar(300), new MCvScalar(50)); | |||
Matrix<float> trainClasses1 = trainClasses.GetRows(0, trainSampleCount >> 1, 1); | |||
trainClasses1.SetValue(1); | |||
Matrix<float> trainClasses2 = trainClasses.GetRows(trainSampleCount >> 1, trainSampleCount, 1); | |||
trainClasses2.SetValue(2); | |||
#endregion | |||
Matrix<float> results, neighborResponses; | |||
results = new Matrix<float>(sample.Rows, 1); | |||
neighborResponses = new Matrix<float>(sample.Rows, K); | |||
//dist = new Matrix<float>(sample.Rows, K); | |||
using (KNearest knn = new KNearest()) | |||
{ | |||
knn.DefaultK = K; | |||
knn.IsClassifier = true; | |||
knn.Train(trainData, MlEnum.DataLayoutType.RowSample, trainClasses); | |||
for (int i = 0; i < img.Height; i++) | |||
{ | |||
for (int j = 0; j < img.Width; j++) | |||
{ | |||
sample.Data[0, 0] = j; | |||
sample.Data[0, 1] = i; | |||
// estimates the response and get the neighbors' labels | |||
float response = knn.Predict(sample); //knn.FindNearest(sample, K, results, null, neighborResponses, null); | |||
int accuracy = 0; | |||
// compute the number of neighbors representing the majority | |||
for (int k = 0; k < K; k++) | |||
{ | |||
if (neighborResponses.Data[0, k] == response) | |||
accuracy++; | |||
} | |||
// highlight the pixel depending on the accuracy (or confidence) | |||
img[i, j] = | |||
response == 1 ? | |||
(accuracy > 5 ? new Bgr(90, 0, 0) : new Bgr(90, 40, 0)) : | |||
(accuracy > 5 ? new Bgr(0, 90, 0) : new Bgr(40, 90, 0)); | |||
} | |||
} | |||
} | |||
// display the original training samples | |||
for (int i = 0; i < (trainSampleCount >> 1); i++) | |||
{ | |||
PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]); | |||
img.Draw(new CircleF(p1, 2.0f), new Bgr(255, 100, 100), -1); | |||
PointF p2 = new PointF(trainData2[i, 0], trainData2[i, 1]); | |||
img.Draw(new CircleF(p2, 2.0f), new Bgr(100, 255, 100), -1); | |||
} | |||
Emgu.CV.UI.ImageViewer.Show(img); | |||
</source> | |||
</div> | |||
</div> | |||
=== Emgu CV 2.x === | |||
<div class="toccolours mw-collapsible mw-collapsed"> | |||
Click to view source code | |||
<div class="mw-collapsible-content"> | |||
<source lang="csharp"> | |||
using System.Drawing; | using System.Drawing; | ||
using Emgu.CV.Structure; | using Emgu.CV.Structure; | ||
Line 27: | Line 113: | ||
#region Generate the traning data and classes | #region Generate the traning data and classes | ||
Matrix | Matrix<float> trainData = new Matrix<float>(trainSampleCount, 2); | ||
Matrix | Matrix<float> trainClasses = new Matrix<float>(trainSampleCount, 1); | ||
Image | Image<Bgr, Byte> img = new Image<Bgr, byte>(500, 500); | ||
Matrix | Matrix<float> sample = new Matrix<float>(1, 2); | ||
Matrix | Matrix<float> trainData1 = trainData.GetRows(0, trainSampleCount >> 1, 1); | ||
trainData1.SetRandNormal(new MCvScalar(200), new MCvScalar(50)); | trainData1.SetRandNormal(new MCvScalar(200), new MCvScalar(50)); | ||
Matrix | Matrix<float> trainData2 = trainData.GetRows(trainSampleCount >> 1, trainSampleCount, 1); | ||
trainData2.SetRandNormal(new MCvScalar(300), new MCvScalar(50)); | trainData2.SetRandNormal(new MCvScalar(300), new MCvScalar(50)); | ||
Matrix | Matrix<float> trainClasses1 = trainClasses.GetRows(0, trainSampleCount >> 1, 1); | ||
trainClasses1.SetValue(1); | trainClasses1.SetValue(1); | ||
Matrix | Matrix<float> trainClasses2 = trainClasses.GetRows(trainSampleCount >> 1, trainSampleCount, 1); | ||
trainClasses2.SetValue(2); | trainClasses2.SetValue(2); | ||
#endregion | #endregion | ||
Matrix | Matrix<float> results, neighborResponses; | ||
results = new Matrix | results = new Matrix<float>(sample.Rows, 1); | ||
neighborResponses = new Matrix | neighborResponses = new Matrix<float>(sample.Rows, K); | ||
//dist = new Matrix | //dist = new Matrix<float>(sample.Rows, K); | ||
using (KNearest knn = new KNearest(trainData, trainClasses, null, false, K)) | using (KNearest knn = new KNearest(trainData, trainClasses, null, false, K)) | ||
{ | { | ||
for (int i = 0; i | for (int i = 0; i < img.Height; i++) | ||
{ | { | ||
for (int j = 0; j | for (int j = 0; j < img.Width; j++) | ||
{ | { | ||
sample.Data[0, 0] = j; | sample.Data[0, 0] = j; | ||
sample.Data[0, 1] = i; | sample.Data[0, 1] = i; | ||
//Matrix | //Matrix<float> nearestNeighbors = new Matrix<float>(K* sample.Rows, sample.Cols); | ||
// estimates the response and get the neighbors' labels | // estimates the response and get the neighbors' labels | ||
float response = knn.FindNearest(sample, K, results, null, neighborResponses, null); | float response = knn.FindNearest(sample, K, results, null, neighborResponses, null); | ||
Line 65: | Line 151: | ||
int accuracy = 0; | int accuracy = 0; | ||
// compute the number of neighbors representing the majority | // compute the number of neighbors representing the majority | ||
for (int k = 0; k | for (int k = 0; k < K; k++) | ||
{ | { | ||
if (neighborResponses.Data[0, k] == response) | if (neighborResponses.Data[0, k] == response) | ||
Line 80: | Line 166: | ||
// display the original training samples | // display the original training samples | ||
for (int i = 0; i | for (int i = 0; i < (trainSampleCount >> 1); i++) | ||
{ | { | ||
PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]); | PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]); | ||
Line 89: | Line 175: | ||
Emgu.CV.UI.ImageViewer.Show(img); | Emgu.CV.UI.ImageViewer.Show(img); | ||
</source> | |||
</div> | |||
</div> | |||
== Result == | == Result == | ||
[[image:KNearest.png]] | [[image:KNearest.png]] |
Latest revision as of 19:38, 5 April 2016
This example requires Emgu CV 1.5.0.0 or later release
What is a K Nearest Neighbors Classifier
According to wikipedia,
- In pattern recognition, the k-nearest neighbors algorithm (k-NN) is a method for classifying objects based on closest training examples in the feature space. k-NN is a type of instance-based learning, or lazy learning where the function is only approximated locally and all computation is deferred until classification. It can also be used for regression.
Source Code
Emgu CV 3.x
Click to view source code
using System.Drawing;
using Emgu.CV.Structure;
using Emgu.CV.ML;
using Emgu.CV.ML.Structure;
...
int K = 10;
int trainSampleCount = 100;
#region Generate the training data and classes
Matrix<float> trainData = new Matrix<float>(trainSampleCount, 2);
Matrix<float> trainClasses = new Matrix<float>(trainSampleCount, 1);
Image<Bgr, Byte> img = new Image<Bgr, byte>(500, 500);
Matrix<float> sample = new Matrix<float>(1, 2);
Matrix<float> trainData1 = trainData.GetRows(0, trainSampleCount >> 1, 1);
trainData1.SetRandNormal(new MCvScalar(200), new MCvScalar(50));
Matrix<float> trainData2 = trainData.GetRows(trainSampleCount >> 1, trainSampleCount, 1);
trainData2.SetRandNormal(new MCvScalar(300), new MCvScalar(50));
Matrix<float> trainClasses1 = trainClasses.GetRows(0, trainSampleCount >> 1, 1);
trainClasses1.SetValue(1);
Matrix<float> trainClasses2 = trainClasses.GetRows(trainSampleCount >> 1, trainSampleCount, 1);
trainClasses2.SetValue(2);
#endregion
Matrix<float> results, neighborResponses;
results = new Matrix<float>(sample.Rows, 1);
neighborResponses = new Matrix<float>(sample.Rows, K);
//dist = new Matrix<float>(sample.Rows, K);
using (KNearest knn = new KNearest())
{
knn.DefaultK = K;
knn.IsClassifier = true;
knn.Train(trainData, MlEnum.DataLayoutType.RowSample, trainClasses);
for (int i = 0; i < img.Height; i++)
{
for (int j = 0; j < img.Width; j++)
{
sample.Data[0, 0] = j;
sample.Data[0, 1] = i;
// estimates the response and get the neighbors' labels
float response = knn.Predict(sample); //knn.FindNearest(sample, K, results, null, neighborResponses, null);
int accuracy = 0;
// compute the number of neighbors representing the majority
for (int k = 0; k < K; k++)
{
if (neighborResponses.Data[0, k] == response)
accuracy++;
}
// highlight the pixel depending on the accuracy (or confidence)
img[i, j] =
response == 1 ?
(accuracy > 5 ? new Bgr(90, 0, 0) : new Bgr(90, 40, 0)) :
(accuracy > 5 ? new Bgr(0, 90, 0) : new Bgr(40, 90, 0));
}
}
}
// display the original training samples
for (int i = 0; i < (trainSampleCount >> 1); i++)
{
PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]);
img.Draw(new CircleF(p1, 2.0f), new Bgr(255, 100, 100), -1);
PointF p2 = new PointF(trainData2[i, 0], trainData2[i, 1]);
img.Draw(new CircleF(p2, 2.0f), new Bgr(100, 255, 100), -1);
}
Emgu.CV.UI.ImageViewer.Show(img);
Emgu CV 2.x
Click to view source code
using System.Drawing;
using Emgu.CV.Structure;
using Emgu.CV.ML;
using Emgu.CV.ML.Structure;
...
int K = 10;
int trainSampleCount = 100;
#region Generate the traning data and classes
Matrix<float> trainData = new Matrix<float>(trainSampleCount, 2);
Matrix<float> trainClasses = new Matrix<float>(trainSampleCount, 1);
Image<Bgr, Byte> img = new Image<Bgr, byte>(500, 500);
Matrix<float> sample = new Matrix<float>(1, 2);
Matrix<float> trainData1 = trainData.GetRows(0, trainSampleCount >> 1, 1);
trainData1.SetRandNormal(new MCvScalar(200), new MCvScalar(50));
Matrix<float> trainData2 = trainData.GetRows(trainSampleCount >> 1, trainSampleCount, 1);
trainData2.SetRandNormal(new MCvScalar(300), new MCvScalar(50));
Matrix<float> trainClasses1 = trainClasses.GetRows(0, trainSampleCount >> 1, 1);
trainClasses1.SetValue(1);
Matrix<float> trainClasses2 = trainClasses.GetRows(trainSampleCount >> 1, trainSampleCount, 1);
trainClasses2.SetValue(2);
#endregion
Matrix<float> results, neighborResponses;
results = new Matrix<float>(sample.Rows, 1);
neighborResponses = new Matrix<float>(sample.Rows, K);
//dist = new Matrix<float>(sample.Rows, K);
using (KNearest knn = new KNearest(trainData, trainClasses, null, false, K))
{
for (int i = 0; i < img.Height; i++)
{
for (int j = 0; j < img.Width; j++)
{
sample.Data[0, 0] = j;
sample.Data[0, 1] = i;
//Matrix<float> nearestNeighbors = new Matrix<float>(K* sample.Rows, sample.Cols);
// estimates the response and get the neighbors' labels
float response = knn.FindNearest(sample, K, results, null, neighborResponses, null);
int accuracy = 0;
// compute the number of neighbors representing the majority
for (int k = 0; k < K; k++)
{
if (neighborResponses.Data[0, k] == response)
accuracy++;
}
// highlight the pixel depending on the accuracy (or confidence)
img[i, j] =
response == 1 ?
(accuracy > 5 ? new Bgr(90, 0, 0) : new Bgr(90, 60, 0)) :
(accuracy > 5 ? new Bgr(0, 90, 0) : new Bgr(60, 90, 0));
}
}
}
// display the original training samples
for (int i = 0; i < (trainSampleCount >> 1); i++)
{
PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]);
img.Draw(new CircleF(p1, 2.0f), new Bgr(255, 100, 100), -1);
PointF p2 = new PointF(trainData2[i, 0], trainData2[i, 1]);
img.Draw(new CircleF(p2, 2.0f), new Bgr(100, 255, 100), -1);
}
Emgu.CV.UI.ImageViewer.Show(img);