opencv K鄰近分類器的使用

下面是手冊中給出的K鄰近分類器使用的例子,該例子是以CvMat形式實現的。通過下面的例子可以知道如何使用Opencv自帶的分類器、矩陣數據如何訪問、如何畫圖、如何使用Opencv的隨機數生成函數等內容。在第二個例子中已將這些代碼部分做了註釋。

#include "ml.h"
#include "highgui.h"
int main( int argc, char** argv )
{
const int K = 10;
int i, j, k, accuracy;
float response;
int train_sample_count = 100;
CvRNG rng_state = cvRNG( - 1);
CvMat* trainData = cvCreateMat( train_sample_count, 2, CV_32FC1 );
CvMat* trainClasses = cvCreateMat( train_sample_count, 1, CV_32FC1 );
IplImage* img = cvCreateImage( cvSize( 500, 500 ), 8, 3 );
float _sample[ 2];
CvMat sample = cvMat( 1, 2, CV_32FC1, _sample );
cvZero( img );
CvMat trainData1, trainData2, trainClasses1, trainClasses2;
// form the training samples
cvGetRows( trainData, &trainData1, 0, train_sample_count/2 );
cvRandArr( &rng_state, &trainData1, CV_RAND_NORMAL, cvScalar( 200, 200), cvScalar( 50, 50) );
cvGetRows( trainData, &trainData2, train_sample_count/2, train_sample_count );
cvRandArr( &rng_state, &trainData2, CV_RAND_NORMAL, cvScalar( 300, 300), cvScalar( 50, 50) );
cvGetRows( trainClasses, &trainClasses1, 0, train_sample_count/2 );
cvSet( &trainClasses1, cvScalar( 1) );
cvGetRows( trainClasses, &trainClasses2, train_sample_count/2, train_sample_count );
cvSet( &trainClasses2, cvScalar( 2) );
// learn classifier
CvKNearest knn( trainData, trainClasses, 0, false, K );
CvMat* nearests = cvCreateMat( 1, K, CV_32FC1);
for( i = 0; i < img- >height; i++ )
{
for( j = 0; j < img- >width; j ++ )
{
sample.data.fl[ 0] = ( float)j;
sample.data.fl[ 1] = ( float)i;
// estimate the response and get the neighbors’ labels
response = knn.find_nearest( &sample,K, 0, 0,nearests, 0);
// compute the number of neighbors representing the majority
for( k = 0, accuracy = 0; k < K; k++ )
{
if( nearests- >data.fl[k] == response)
accuracy++;
}
// highlight the pixel depending on the accuracy (or confidence)
cvSet2D( img, i, j, response == 1 ?
(accuracy > 5 ? CV_RGB( 180, 0, 0) : CV_RGB( 180, 120, 0)) :
(accuracy > 5 ? CV_RGB( 0, 180, 0) : CV_RGB( 120, 120, 0)) );
}
}
// display the original training samples
for( i = 0; i < train_sample_count/2; i++ )
{
CvPoint pt;
pt.x = cvRound(trainData1.data.fl[i*2]);
pt.y = cvRound(trainData1.data.fl[i*2+1]);
cvCircle( img, pt, 2, CV_RGB( 255, 0, 0), CV_FILLED );
pt.x = cvRound(trainData2.data.fl[i*2]);
pt.y = cvRound(trainData2.data.fl[i*2+1]);
cvCircle( img, pt, 2, CV_RGB( 0, 255, 0), CV_FILLED );
}
cvNamedWindow( " classifier result" , 1 );
cvShowImage( " classifier result" , img );
cvWaitKey( 0);
cvReleaseMat( &trainClasses );
cvReleaseMat( &trainData );
return 0;
}


下面是自己修改的基於Mat數據類型的K鄰近分類器使用例子

#include <opencv.hpp>
int main( int argc, char** argv )
{
const int K = 10;
int i, j, k, accuracy;
float response;
int train_sample_count = 100;
RNG rng_state = RNG( - 1);
//如何使用隨機函數
Mat trainData=Mat::zeros( train_sample_count, 2, CV_32FC1 );
Mat trainClasses =Mat::zeros( train_sample_count, 1, CV_32FC1 );
Mat img = Mat::zeros( 500,500,CV_8UC3);
float _sample[ 2];
Mat sample=Mat( 1, 2, CV_32FC1, _sample );
Mat trainData1, trainData2, trainClasses1, trainClasses2;

// form the training samples
trainData1=trainData.rowRange(0,train_sample_count/2);
//如何從已有矩陣中提取部分矩陣
rng_state.fill( trainData1, CV_RAND_NORMAL,Mat(1,1,CV_64F,cvScalar(200, 200)), Mat(1,1,CV_64F,cvScalar(50, 50)) );
//如何利用隨機函數賦值已有矩陣
trainData2=trainData.rowRange(train_sample_count/2,train_sample_count);
rng_state.fill( trainData2, CV_RAND_NORMAL,Mat(1,1,CV_64F,cvScalar(300, 300)), Mat(1,1,CV_64F,cvScalar(50, 50)) );
trainClasses1=trainClasses.rowRange(0,train_sample_count/2);
trainClasses1.setTo(Scalar( 1));
//如何初始化矩陣爲同一個數值
trainClasses2=trainClasses.rowRange(train_sample_count/2,train_sample_count);
trainClasses2.setTo(Scalar( 2));
// learn classifier
CvKNearest knn( trainData, trainClasses,Mat(), false, K );
//如何建立分類器的訓練對象
Mat nearests ( 1, K, CV_32FC1);
for( i = 0; i < img.rows; i++ )
{
for( j = 0; j < img.cols; j ++ )
{
 sample.at<float>(0,0) = ( float)j;
sample.at<float>(0,1) = ( float)i;
// estimate the response and get the neighbors’ labels
response = knn.find_nearest( sample,K, 0, 0,&nearests, 0);
//利用訓練好的分類器分類新的數據
// compute the number of neighbors representing the majority
for( k = 0, accuracy = 0; k < K; k++ )
{
if( nearests.at<float>(0,k) == response)
accuracy++;
}
// highlight the pixel depending on the accuracy (or confidence)
img.at<Vec3b>(i,j)[2]=response == 1 ?(accuracy>5?180:180):(accuracy>5?0:120);
//如何訪問多通道的矩陣
img.at<Vec3b>(i,j)[1]=response == 1 ?(accuracy>5?0:120):(accuracy>5?180:120);
img.at<Vec3b>(i,j)[0]=response == 1 ?(accuracy>5?0:0):(accuracy>5?0:0);
}
}
// display the original training samples
for( i = 0; i < train_sample_count/2; i++ )
{
CvPoint pt;
pt.x = cvRound(trainData1.at<float>(i,0));
pt.y = cvRound(trainData1.at<float>(i,1));
circle( img, pt, 2, CV_RGB( 255, 0, 0), CV_FILLED );
//在Mat數據類型中畫圖
pt.x = cvRound(trainData2.at<float>(i,0));
pt.y = cvRound(trainData2.at<float>(i,1));
circle( img, pt, 2, CV_RGB( 0, 255, 0), CV_FILLED );
}

imshow( " classifier result" , img );
cvWaitKey( 0);

return 0;
}

下面是分類器分類結果



發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章