-
Notifications
You must be signed in to change notification settings - Fork 4
/
Copy pathFerNNClassifier.cpp
217 lines (202 loc) · 8.31 KB
/
FerNNClassifier.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
#include "FerNNClassifier.h"
using namespace cv;
using namespace std;
void FerNNClassifier::read(const FileNode& file){
///Classifier Parameters
valid = (float)file["valid"];
ncc_thesame = (float)file["ncc_thesame"];
nstructs = (int)file["num_trees"];
structSize = (int)file["num_features"];
thr_fern = (float)file["thr_fern"];
thr_nn = (float)file["thr_nn"];
thr_nn_valid = (float)file["thr_nn_valid"];
}
void FerNNClassifier::prepare(const vector<Size>& scales){
acum = 0;
//Initialize test locations for features
int totalFeatures = nstructs*structSize;
features = vector<vector<Feature> >(scales.size(),vector<Feature> (totalFeatures));
RNG& rng = theRNG();
float x1f,x2f,y1f,y2f;
int x1, x2, y1, y2;
for (int i=0;i<totalFeatures;i++){
x1f = (float)rng;
y1f = (float)rng;
x2f = (float)rng;
y2f = (float)rng;
for (int s=0;s<scales.size();s++){
x1 = (int)(x1f * scales[s].width);
y1 = (int)(y1f * scales[s].height);
x2 = (int)(x2f * scales[s].width);
y2 = (int)(y2f * scales[s].height);
features[s][i] = Feature(x1, y1, x2, y2);
}
}
//Thresholds
thrN = 0.5f*nstructs;
//Initialize Posteriors
for (int i = 0; i<nstructs; i++) {
posteriors.push_back(vector<float>(pow(2.0,structSize), 0));
pCounter.push_back(vector<int>(pow(2.0,structSize), 0));
nCounter.push_back(vector<int>(pow(2.0,structSize), 0));
}
}
void FerNNClassifier::getFeatures(const cv::Mat& image,const int& scale_idx, vector<int>& fern){
int leaf;
for (int t=0;t<nstructs;t++){
leaf=0;
for (int f=0; f<structSize; f++){
leaf = (leaf << 1) + features[scale_idx][t*nstructs+f](image);
}
fern[t]=leaf;
}
}
float FerNNClassifier::measure_forest(vector<int> fern) {
float votes = 0;
for (int i = 0; i < nstructs; i++) {
votes += posteriors[i][fern[i]];
}
return votes;
}
void FerNNClassifier::update(const vector<int>& fern, int C, int N) {
int idx;
for (int i = 0; i < nstructs; i++) {
idx = fern[i];
(C==1) ? pCounter[i][idx] += N : nCounter[i][idx] += N;
if (pCounter[i][idx]==0) {
posteriors[i][idx] = 0;
} else {
posteriors[i][idx] = ((float)(pCounter[i][idx]))/(pCounter[i][idx] + nCounter[i][idx]);
}
}
}
void FerNNClassifier::trainF(const vector<std::pair<vector<int>,int> >& ferns,int resample){
// Conf = function(2,X,Y,Margin,Bootstrap,Idx)
// 0 1 2 3 4 5
// double *X = mxGetPr(prhs[1]); -> ferns[i].first
// int numX = mxGetN(prhs[1]); -> ferns.size()
// double *Y = mxGetPr(prhs[2]); ->ferns[i].second
// double thrP = *mxGetPr(prhs[3]) * nTREES; ->threshold*nstructs
// int bootstrap = (int) *mxGetPr(prhs[4]); ->resample
thrP = thr_fern*nstructs; // int step = numX / 10;
//for (int j = 0; j < resample; j++) { // for (int j = 0; j < bootstrap; j++) {
for (int i = 0; i < ferns.size(); i++){ // for (int i = 0; i < step; i++) {
// for (int k = 0; k < 10; k++) {
// int I = k*step + i;//box index
// double *x = X+nTREES*I; //tree index
if(ferns[i].second==1){ // if (Y[I] == 1) {
if(measure_forest(ferns[i].first)<=thrP) // if (measure_forest(x) <= thrP)
update(ferns[i].first,1,1); // update(x,1,1);
}else{ // }else{
if (measure_forest(ferns[i].first) >= thrN) // if (measure_forest(x) >= thrN)
update(ferns[i].first,0,1); // update(x,0,1);
}
}
//}
}
void FerNNClassifier::trainNN(const vector<cv::Mat>& nn_examples){
float conf,dummy;
vector<int> y(nn_examples.size(),0);
y[0]=1;
vector<int> isin;
for (int i=0;i<nn_examples.size();i++){ // For each example
NNConf(nn_examples[i],isin,conf,dummy); // Measure Relative similarity
if (y[i]==1 && conf<=thr_nn){ // if y(i) == 1 && conf1 <= tld.model.thr_nn % 0.65
if (isin[1]<0){ // if isnan(isin(2))
pEx = vector<Mat>(1,nn_examples[i]); // tld.pex = x(:,i);
continue; // continue;
} // end
//pEx.insert(pEx.begin()+isin[1],nn_examples[i]); // tld.pex = [tld.pex(:,1:isin(2)) x(:,i) tld.pex(:,isin(2)+1:end)]; % add to model
pEx.push_back(nn_examples[i]);
} // end
if(y[i]==0 && conf>0.5) // if y(i) == 0 && conf1 > 0.5
nEx.push_back(nn_examples[i]); // tld.nex = [tld.nex x(:,i)];
} // end
acum++;
printf("%d. Trained NN examples: %d positive %d negative\n",acum,(int)pEx.size(),(int)nEx.size());
} // end
void FerNNClassifier::NNConf(const Mat& example, vector<int>& isin,float& rsconf,float& csconf){
/*Inputs:
* -NN Patch
* Outputs:
* -Relative Similarity (rsconf), Conservative Similarity (csconf), In pos. set|Id pos set|In neg. set (isin)
*/
isin=vector<int>(3,-1);
if (pEx.empty()){ //if isempty(tld.pex) % IF positive examples in the model are not defined THEN everything is negative
rsconf = 0; // conf1 = zeros(1,size(x,2));
csconf=0;
return;
}
if (nEx.empty()){ //if isempty(tld.nex) % IF negative examples in the model are not defined THEN everything is positive
rsconf = 1; // conf1 = ones(1,size(x,2));
csconf=1;
return;
}
Mat ncc(1,1,CV_32F);
float nccP,csmaxP,maxP=0;
bool anyP=false;
int maxPidx,validatedPart = (int)ceil(pEx.size()*valid);
float nccN, maxN=0;
bool anyN=false;
for (int i=0;i<pEx.size();i++){
matchTemplate(pEx[i],example,ncc,CV_TM_CCORR_NORMED); // measure NCC to positive examples
nccP=(((float*)ncc.data)[0]+1)*0.5f;
if (nccP>ncc_thesame)
anyP=true;
if(nccP > maxP){
maxP=nccP;
maxPidx = i;
if(i<validatedPart)
csmaxP=maxP;
}
}
for (int i=0;i<nEx.size();i++){
matchTemplate(nEx[i],example,ncc,CV_TM_CCORR_NORMED); //measure NCC to negative examples
nccN=(((float*)ncc.data)[0]+1)*0.5f;
if (nccN>ncc_thesame)
anyN=true;
if(nccN > maxN)
maxN=nccN;
}
//set isin
if (anyP) isin[0]=1; //if he query patch is highly correlated with any positive patch in the model then it is considered to be one of them
isin[1]=maxPidx; //get the index of the maximall correlated positive patch
if (anyN) isin[2]=1; //if the query patch is highly correlated with any negative patch in the model then it is considered to be one of them
//Measure Relative Similarity
float dN=1-maxN;
float dP=1-maxP;
rsconf = (float)dN/(dN+dP);
//Measure Conservative Similarity
dP = 1 - csmaxP;
csconf =(float)dN / (dN + dP);
}
void FerNNClassifier::evaluateTh(const vector<pair<vector<int>,int> >& nXT,const vector<cv::Mat>& nExT){
float fconf;
for (int i=0;i<nXT.size();i++){
fconf = (float) measure_forest(nXT[i].first)/nstructs;
if (fconf>thr_fern)
thr_fern=fconf;
}
vector <int> isin;
float conf,dummy;
for (int i=0;i<nExT.size();i++){
NNConf(nExT[i],isin,conf,dummy);
if (conf>thr_nn)
thr_nn=conf;
}
if (thr_nn>thr_nn_valid)
thr_nn_valid = thr_nn;
}
void FerNNClassifier::show(){
Mat examples((int)pEx.size()*pEx[0].rows,pEx[0].cols,CV_8U);
double minval;
Mat ex(pEx[0].rows,pEx[0].cols,pEx[0].type());
for (int i=0;i<pEx.size();i++){
minMaxLoc(pEx[i],&minval);
pEx[i].copyTo(ex);
ex = ex-minval;
Mat tmp = examples.rowRange(Range(i*pEx[i].rows,(i+1)*pEx[i].rows));
ex.convertTo(tmp,CV_8U);
}
imshow("Examples",examples);
}