23 using namespace Eigen;
24 using namespace shogun;
68 SG_ERROR(
"Specified features are not of type CDotFeatures\n")
77 ->get_feature_matrix();
78 int32_t num_feat=feature_matrix.
num_rows;
79 int32_t num_vec=feature_matrix.
num_cols;
80 REQUIRE(num_vec==train_labels.
vlen,
"Number of training examples(%d) should be "
81 "equal to number of labels specified(%d)!\n", num_vec, train_labels.
vlen);
90 for(i=0; i<train_labels.
vlen; i++)
92 if (train_labels.
vector[i]==-1)
93 classidx_neg[num_neg++]=i;
95 else if(train_labels.
vector[i]==+1)
96 classidx_pos[num_pos++]=i;
101 MatrixXd fmatrix=Map<MatrixXd>(feature_matrix.
matrix, num_feat, num_vec);
102 VectorXd mean_neg(num_feat);
103 mean_neg=VectorXd::Zero(num_feat);
104 VectorXd mean_pos(num_feat);
105 mean_pos=VectorXd::Zero(num_feat);
108 for(i=0; i<num_neg; i++)
109 mean_neg+=fmatrix.col(classidx_neg[i]);
113 for(i=0; i<num_neg; i++)
114 fmatrix.col(classidx_neg[i])-=mean_neg;
117 for(i=0; i<num_pos; i++)
118 mean_pos+=fmatrix.col(classidx_pos[i]);
122 for(i=0; i<num_pos; i++)
123 fmatrix.col(classidx_pos[i])-=mean_pos;
126 Map<MatrixXd> scatter(scatter_matrix.
matrix, num_feat, num_feat);
131 MatrixXd cov_mat(num_feat, num_feat);
132 cov_mat=fmatrix*fmatrix.transpose();
133 scatter=cov_mat/(num_vec-1);
137 scatter.diagonal()+=VectorXd::Constant(num_feat, trace*
m_gamma/num_feat);
146 Map<VectorXd> x(
w.
vector, num_feat);
147 LLT<MatrixXd> decomposition(scatter);
148 x=decomposition.solve(mean_pos-mean_neg);
151 VectorXd w_neg=decomposition.solve(mean_neg);
152 VectorXd w_pos=decomposition.solve(mean_pos);
155 bias=0.5*(w_neg.dot(mean_neg)-w_pos.dot(mean_pos));
164 MatrixXd fmatrix1=Map<MatrixXd>(feature_matrix.
matrix, num_feat, num_vec);
167 MatrixXd cen_pos(num_feat,num_pos);
168 MatrixXd cen_neg(num_feat,num_neg);
170 for(i=0; i<num_pos;i++)
171 cen_pos.col(i)=fmatrix.col(classidx_pos[i]);
173 for(i=0; i<num_neg;i++)
174 cen_neg.col(i)=fmatrix.col(classidx_neg[i]);
177 cen_pos=cen_pos*cen_pos.transpose()/(
float64_t(num_pos-1));
180 cen_neg=cen_neg*cen_neg.transpose()/(
float64_t(num_neg-1));
183 MatrixXd Sw= num_pos*cen_pos+num_neg*cen_neg;
187 Sw.diagonal()+=VectorXd::Constant(num_feat, trace*
m_gamma/num_feat);
190 VectorXd mean_total=(num_pos*mean_pos+num_neg*mean_neg)/(
float64_t)num_vec;
193 MatrixXd Sb(num_feat,2);
194 Sb.col(0)=sqrt(num_pos)*(mean_pos-mean_total);
195 Sb.col(1)=sqrt(num_neg)*(mean_neg-mean_total);
197 JacobiSVD<MatrixXd> svd(fmatrix1, ComputeThinU);
200 MatrixXd Q=svd.matrixU();
202 Sb=Q.transpose()*(Sb*(Sb.transpose()))*Q;
205 Sw=Q.transpose()*Sw*Q;
210 HouseholderQR<MatrixXd> decomposition(Sw.llt().matrixU().transpose());
216 JacobiSVD<MatrixXd> svd2(decomposition.solve((decomposition.solve(Sb))
217 .transpose()).transpose(), ComputeThinU);
221 Map<VectorXd> x(
w.
vector, num_feat);
222 x=Q*(svd2.matrixU().col(0));
224 bias=(x.transpose()*mean_total);
virtual const char * get_name() const =0
virtual bool train_machine(CFeatures *data=NULL)
virtual ELabelType get_label_type() const =0
The class Labels models labels, i.e. class assignments of objects.
virtual void set_features(CDotFeatures *feat)
Features that support dot products among other operations.
CLDA(float64_t gamma=0, ELDAMethod method=AUTO_LDA)
Class LinearMachine is a generic interface for all kinds of linear machines like classifiers.
The class Features is the base class of all feature objects.
Binary Labels for binary classification.
bool has_property(EFeatureProperty p) const
virtual void set_labels(CLabels *lab)