33 #ifndef GRT_MEAN_SHIFT_HEADER 34 #define GRT_MEAN_SHIFT_HEADER 36 #include "../../CoreModules/MLBase.h" 54 const unsigned int numDimensions = (
unsigned int)meanStart.size();
55 const unsigned int numPoints = (
unsigned int)points.size();
56 const Float gamma = 1.0 / (2 * SQR(sigma) );
57 unsigned int iteration = 0;
61 Float pointsWithinSearchRadius = 0;
70 pointsWithinSearchRadius = 0;
71 std::fill(numer.begin(),numer.end(),0);
72 std::fill(denom.begin(),denom.end(),0);
73 std::fill(kernelDist.begin(),kernelDist.end(),0);
76 for(
unsigned int i=0; i<numPoints; i++){
79 Float distToMean = euclideanDist( mean, points[i] );
82 if( distToMean < searchRadius ){
84 for(
unsigned int j=0; j<numDimensions; j++){
85 kernelDist[j] = gaussKernel( points[i][j], mean[j], gamma );
86 numer[j] += kernelDist[j] * points[i][j];
87 denom[j] += kernelDist[j];
90 pointsWithinSearchRadius++;
96 for(
unsigned int j=0; j<numDimensions; j++){
98 mean[j] = numer[j] / denom[j];
100 change += grt_sqr( mean[j] - lastMean[j] );
102 lastMean[j] = mean[j];
104 change = grt_sqrt( change );
106 trainingLog <<
"iteration: " << iteration;
107 trainingLog <<
" mean: ";
108 for(
unsigned int j=0; j<numDimensions; j++){
109 trainingLog << mean[j] <<
" ";
111 trainingLog <<
" change: " << change << std::endl;
113 if( change < minChange ){
114 trainingLog <<
"min changed limit reached - stopping search" << std::endl;
118 if( ++iteration >= maxNumEpochs ){
119 trainingLog <<
"max number of iterations reached - stopping search." << std::endl;
124 numTrainingIterationsToConverge = iteration;
134 Float gaussKernel(
const Float &x,
const Float &mu,
const Float gamma ){
135 return exp( gamma * grt_sqr(x-mu) );
142 for(UINT i=0; i<N; i++){
143 y += grt_sqr(x[i]-mu[i]);
145 return exp( gamma * y );
152 for(UINT i=0; i<N; i++){
153 z += grt_sqr(x[i]-y[i]);
166 #endif //GRT_MEAN_SHIFT_HEADER MLBase(const std::string &id="", const BaseType type=BASE_TYPE_NOT_SET)
This is the main base class that all GRT machine learning algorithms should inherit from...