SHOGUN  4.0.0
GUIClassifier.cpp
浏览该文件的文档.
1 /*
2  * This program is free software; you can redistribute it and/or modify
3  * it under the terms of the GNU General Public License as published by
4  * the Free Software Foundation; either version 3 of the License, or
5  * (at your option) any later version.
6  *
7  * Written (W) 1999-2009 Soeren Sonnenburg
8  * Written (W) 1999-2008 Gunnar Raetsch
9  * Copyright (C) 1999-2009 Fraunhofer Institute FIRST and Max-Planck-Society
10  */
12 #include <shogun/ui/SGInterface.h>
13 
14 #include <shogun/lib/config.h>
15 #include <shogun/io/SGIO.h>
16 
19 #include <shogun/labels/Labels.h>
20 
22 
23 #include <shogun/multiclass/KNN.h>
27 
28 #include <shogun/classifier/LDA.h>
29 #include <shogun/classifier/LPM.h>
32 
34 
35 
36 
46 
49 
55 
60 
62 
63 using namespace shogun;
64 
66 : CSGObject(), ui(ui_)
67 {
69  classifier=NULL;
71 
72  // Perceptron parameters
74  perceptron_maxiter=1000;
75 
76  // SVM parameters
77  svm_qpsize=41;
78  svm_bufsize=3000;
79  svm_max_qpsize=1000;
80  mkl_norm=1;
81  ent_lambda=0;
83  svm_C1=1;
84  svm_C2=1;
85  C_mkl=0;
87  svm_weight_epsilon=1e-5;
88  svm_epsilon=1e-5;
89  svm_tube_epsilon=1e-2;
90  svm_nu=0.5;
91  svm_use_shrinking = true ;
92 
93  svm_use_bias = true;
95  svm_use_linadd = true ;
96  svm_do_auc_maximization = false ;
97 
98  // KRR parameters
99  krr_tau=1;
100 
102 }
103 
105 {
108 }
109 
110 bool CGUIClassifier::new_classifier(char* name, int32_t d, int32_t from_d)
111 {
112  if (strcmp(name,"LIBSVM_ONECLASS")==0)
113  {
115  classifier = new CLibSVMOneClass();
116  SG_INFO("created SVMlibsvm object for oneclass\n")
117  }
118  else if (strcmp(name,"LIBSVM_MULTICLASS")==0)
119  {
122  SG_INFO("created SVMlibsvm object for multiclass\n")
123  }
124  else if (strcmp(name,"LIBSVM_NUMULTICLASS")==0)
125  {
127  classifier= new CMulticlassLibSVM(LIBSVM_NU_SVC);
128  SG_INFO("created SVMlibsvm object for multiclass\n")
129  }
130 
131  else if (strcmp(name,"SCATTERSVM_NO_BIAS_LIBSVM")==0)
132  {
135  SG_INFO("created ScatterSVM NO BIAS LIBSVM object\n")
136  }
137  else if (strcmp(name,"SCATTERSVM_TESTRULE1")==0)
138  {
141  SG_INFO("created ScatterSVM TESTRULE1 object\n")
142  }
143  else if (strcmp(name,"SCATTERSVM_TESTRULE2")==0)
144  {
147  SG_INFO("created ScatterSVM TESTRULE2 object\n")
148  }
149  else if (strcmp(name,"LIBSVM_NU")==0)
150  {
152  classifier= new CLibSVM(LIBSVM_NU_SVC);
153  SG_INFO("created SVMlibsvm object\n")
154  }
155  else if (strcmp(name,"LIBSVM")==0)
156  {
158  classifier= new CLibSVM();
159  SG_INFO("created SVMlibsvm object\n")
160  }
161  else if (strcmp(name,"LARANK")==0)
162  {
164  classifier= new CLaRank();
165  SG_INFO("created LaRank object\n")
166  }
167 
168  else if (strcmp(name,"GPBTSVM")==0)
169  {
171  classifier= new CGPBTSVM();
172  SG_INFO("created GPBT-SVM object\n")
173  }
174  else if (strcmp(name,"MPDSVM")==0)
175  {
177  classifier= new CMPDSVM();
178  SG_INFO("created MPD-SVM object\n")
179  }
180  else if (strcmp(name,"GNPPSVM")==0)
181  {
183  classifier= new CGNPPSVM();
184  SG_INFO("created GNPP-SVM object\n")
185  }
186  else if (strcmp(name,"GMNPSVM")==0)
187  {
189  classifier= new CGMNPSVM();
190  SG_INFO("created GMNP-SVM object\n")
191  }
192  else if (strcmp(name,"LIBSVR")==0)
193  {
195  classifier= new CLibSVR();
196  SG_INFO("created SVRlibsvm object\n")
197  }
198 #ifdef HAVE_LAPACK
199  else if (strcmp(name, "KERNELRIDGEREGRESSION")==0)
200  {
202  classifier=new CKernelRidgeRegression(krr_tau, ui->ui_kernel->get_kernel(),
203  ui->ui_labels->get_train_labels());
204  SG_INFO("created KernelRidgeRegression object %p\n", classifier)
205  }
206 #endif //HAVE_LAPACK
207  else if (strcmp(name,"PERCEPTRON")==0)
208  {
210  classifier= new CPerceptron();
211  SG_INFO("created Perceptron object\n")
212  }
213 #ifdef HAVE_LAPACK
214  else if (strncmp(name,"LIBLINEAR",9)==0)
215  {
217 
218  if (strcmp(name,"LIBLINEAR_L2R_LR")==0)
219  {
220  st=L2R_LR;
221  SG_INFO("created LibLinear l2 regularized logistic regression object\n")
222  }
223  else if (strcmp(name,"LIBLINEAR_L2R_L2LOSS_SVC_DUAL")==0)
224  {
226  SG_INFO("created LibLinear l2 regularized l2 loss SVM dual object\n")
227  }
228  else if (strcmp(name,"LIBLINEAR_L2R_L2LOSS_SVC")==0)
229  {
230  st=L2R_L2LOSS_SVC;
231  SG_INFO("created LibLinear l2 regularized l2 loss SVM primal object\n")
232  }
233  else if (strcmp(name,"LIBLINEAR_L1R_L2LOSS_SVC")==0)
234  {
235  st=L1R_L2LOSS_SVC;
236  SG_INFO("created LibLinear l1 regularized l2 loss SVM primal object\n")
237  }
238  else if (strcmp(name,"LIBLINEAR_L2R_L1LOSS_SVC_DUAL")==0)
239  {
241  SG_INFO("created LibLinear l2 regularized l1 loss dual SVM object\n")
242  }
243  else
244  SG_ERROR("unknown liblinear type\n")
245 
247  classifier= new CLibLinear(st);
248  ((CLibLinear*) classifier)->set_C(svm_C1, svm_C2);
249  ((CLibLinear*) classifier)->set_epsilon(svm_epsilon);
250  ((CLibLinear*) classifier)->set_bias_enabled(svm_use_bias);
251  }
252 #endif //HAVE_LAPACK
253 #ifdef HAVE_EIGEN3
254  else if (strcmp(name,"LDA")==0)
255  {
257  classifier= new CLDA();
258  SG_INFO("created LDA object\n")
259  }
260 #endif //HAVE_EIGEN3
261 #ifdef USE_CPLEX
262  else if (strcmp(name,"LPM")==0)
263  {
265  classifier= new CLPM();
266  ((CLPM*) classifier)->set_C(svm_C1, svm_C2);
267  ((CLPM*) classifier)->set_epsilon(svm_epsilon);
268  ((CLPM*) classifier)->set_bias_enabled(svm_use_bias);
269  ((CLPM*) classifier)->set_max_train_time(max_train_time);
270  SG_INFO("created LPM object\n")
271  }
272  else if (strcmp(name,"LPBOOST")==0)
273  {
275  classifier= new CLPBoost();
276  ((CLPBoost*) classifier)->set_C(svm_C1, svm_C2);
277  ((CLPBoost*) classifier)->set_epsilon(svm_epsilon);
278  ((CLPBoost*) classifier)->set_bias_enabled(svm_use_bias);
279  ((CLPBoost*) classifier)->set_max_train_time(max_train_time);
280  SG_INFO("created LPBoost object\n")
281  }
282 #endif //USE_CPLEX
283  else if (strncmp(name,"KNN", strlen("KNN"))==0)
284  {
286  classifier= new CKNN();
287  SG_INFO("created KNN object\n")
288  }
289  else if (strncmp(name,"KMEANS", strlen("KMEANS"))==0)
290  {
292  classifier= new CKMeans();
293  SG_INFO("created KMeans object\n")
294  }
295  else if (strncmp(name,"HIERARCHICAL", strlen("HIERARCHICAL"))==0)
296  {
298  classifier= new CHierarchical();
299  SG_INFO("created Hierarchical clustering object\n")
300  }
301  else if (strcmp(name,"SVMLIN")==0)
302  {
304  classifier= new CSVMLin();
305  ((CSVMLin*) classifier)->set_C(svm_C1, svm_C2);
306  ((CSVMLin*) classifier)->set_epsilon(svm_epsilon);
307  ((CSVMLin*) classifier)->set_bias_enabled(svm_use_bias);
308  SG_INFO("created SVMLin object\n")
309  }
310  else if (strncmp(name,"WDSVMOCAS", strlen("WDSVMOCAS"))==0)
311  {
313  classifier= new CWDSVMOcas(SVM_OCAS);
314 
315  ((CWDSVMOcas*) classifier)->set_bias_enabled(svm_use_bias);
316  ((CWDSVMOcas*) classifier)->set_degree(d, from_d);
317  ((CWDSVMOcas*) classifier)->set_C(svm_C1, svm_C2);
318  ((CWDSVMOcas*) classifier)->set_epsilon(svm_epsilon);
319  ((CWDSVMOcas*) classifier)->set_bufsize(svm_bufsize);
320  SG_INFO("created Weighted Degree Kernel SVM Ocas(OCAS) object of order %d (from order:%d)\n", d, from_d)
321  }
322  else if (strcmp(name,"SVMOCAS")==0)
323  {
325  classifier= new CSVMOcas(SVM_OCAS);
326 
327  ((CSVMOcas*) classifier)->set_C(svm_C1, svm_C2);
328  ((CSVMOcas*) classifier)->set_epsilon(svm_epsilon);
329  ((CSVMOcas*) classifier)->set_bufsize(svm_bufsize);
330  ((CSVMOcas*) classifier)->set_bias_enabled(svm_use_bias);
331  SG_INFO("created SVM Ocas(OCAS) object\n")
332  }
333  else if (strcmp(name,"SVMSGD")==0)
334  {
336  classifier= new CSVMSGD(svm_C1);
337  ((CSVMSGD*) classifier)->set_bias_enabled(svm_use_bias);
338  SG_INFO("created SVM SGD object\n")
339  }
340  else if (strcmp(name,"SVMBMRM")==0 || (strcmp(name,"SVMPERF")==0))
341  {
343  classifier= new CSVMOcas(SVM_BMRM);
344 
345  ((CSVMOcas*) classifier)->set_C(svm_C1, svm_C2);
346  ((CSVMOcas*) classifier)->set_epsilon(svm_epsilon);
347  ((CSVMOcas*) classifier)->set_bufsize(svm_bufsize);
348  ((CSVMOcas*) classifier)->set_bias_enabled(svm_use_bias);
349  SG_INFO("created SVM Ocas(BMRM/PERF) object\n")
350  }
351  else if (strcmp(name,"MKL_CLASSIFICATION")==0)
352  {
355  }
356  else if (strcmp(name,"MKL_ONECLASS")==0)
357  {
359  classifier= new CMKLOneClass();
360  }
361  else if (strcmp(name,"MKL_MULTICLASS")==0)
362  {
364  classifier= new CMKLMulticlass();
365  }
366  else if (strcmp(name,"MKL_REGRESSION")==0)
367  {
369  classifier= new CMKLRegression();
370  }
371  else
372  {
373  SG_ERROR("Unknown classifier %s.\n", name)
374  return false;
375  }
377 
378  return (classifier!=NULL);
379 }
380 
382 {
384  if (!mkl)
385  SG_ERROR("No MKL available.\n")
386 
387  CLabels* trainlabels=ui->ui_labels->get_train_labels();
388  if (!trainlabels)
389  SG_ERROR("No trainlabels available.\n")
390 
391  CKernel* kernel=ui->ui_kernel->get_kernel();
392  if (!kernel)
393  SG_ERROR("No kernel available.\n")
394 
395  bool success=ui->ui_kernel->init_kernel("TRAIN");
396 
397  if (!success || !ui->ui_kernel->is_initialized() || !kernel->has_features())
398  SG_ERROR("Kernel not initialized / no train features available.\n")
399 
400  int32_t num_vec=kernel->get_num_vec_lhs();
401  if (trainlabels->get_num_labels() != num_vec)
402  SG_ERROR("Number of train labels (%d) and training vectors (%d) differs!\n", trainlabels->get_num_labels(), num_vec)
403 
404  SG_INFO("Starting MC-MKL training on %ld vectors using C1=%lf C2=%lf epsilon=%lf\n", num_vec, svm_C1, svm_C2, svm_epsilon)
405 
407  mkl->set_mkl_norm(mkl_norm);
408  //mkl->set_max_num_mkliters(-1);
411  mkl->set_epsilon(svm_epsilon);
414  mkl->set_nu(svm_nu);
415  mkl->set_C(svm_C1);
416  mkl->set_qpsize(svm_qpsize);
420 
421  ((CKernelMulticlassMachine*) mkl)->set_labels(trainlabels);
422  ((CKernelMulticlassMachine*) mkl)->set_kernel(kernel);
423 
424  return mkl->train();
425 }
426 
428 {
429  CMKL* mkl= (CMKL*) classifier;
430  if (!mkl)
431  SG_ERROR("No SVM available.\n")
432 
433  bool oneclass=(mkl->get_classifier_type()==CT_LIBSVMONECLASS);
434  CLabels* trainlabels=NULL;
435  if(!oneclass)
436  trainlabels=ui->ui_labels->get_train_labels();
437  else
438  SG_INFO("Training one class mkl.\n")
439  if (!trainlabels && !oneclass)
440  SG_ERROR("No trainlabels available.\n")
441 
442  CKernel* kernel=ui->ui_kernel->get_kernel();
443  if (!kernel)
444  SG_ERROR("No kernel available.\n")
445 
446  bool success=ui->ui_kernel->init_kernel("TRAIN");
447  if (!success || !ui->ui_kernel->is_initialized() || !kernel->has_features())
448  SG_ERROR("Kernel not initialized.\n")
449 
450  int32_t num_vec=kernel->get_num_vec_lhs();
451  if (!oneclass && trainlabels->get_num_labels() != num_vec)
452  SG_ERROR("Number of train labels (%d) and training vectors (%d) differs!\n", trainlabels->get_num_labels(), num_vec)
453 
454  SG_INFO("Starting SVM training on %ld vectors using C1=%lf C2=%lf epsilon=%lf\n", num_vec, svm_C1, svm_C2, svm_epsilon)
455 
460  mkl->set_epsilon(svm_epsilon);
463  mkl->set_nu(svm_nu);
464  mkl->set_C(svm_C1, svm_C2);
465  mkl->set_qpsize(svm_qpsize);
470  mkl->set_mkl_norm(mkl_norm);
473  mkl->set_C_mkl(C_mkl);
475 
477  {
478  CAUCKernel* auc_kernel = new CAUCKernel(10, kernel);
479  CLabels* auc_labels= auc_kernel->setup_auc_maximization(trainlabels);
480  ((CKernelMachine*) mkl)->set_labels(auc_labels);
481  ((CKernelMachine*) mkl)->set_kernel(auc_kernel);
482  SG_UNREF(auc_labels);
483  }
484  else
485  {
486  if(!oneclass)
487  ((CKernelMachine*) mkl)->set_labels(trainlabels);
488  ((CKernelMachine*) mkl)->set_kernel(kernel);
489  }
490 
491  bool result=mkl->train();
492 
493  return result;
494 }
495 
497 {
499 
500  if (!classifier)
501  SG_ERROR("No SVM available.\n")
502 
503  bool oneclass=(type==CT_LIBSVMONECLASS);
504  CLabels* trainlabels=NULL;
505  if(!oneclass)
506  trainlabels=ui->ui_labels->get_train_labels();
507  else
508  SG_INFO("Training one class svm.\n")
509  if (!trainlabels && !oneclass)
510  SG_ERROR("No trainlabels available.\n")
511 
512  CKernel* kernel=ui->ui_kernel->get_kernel();
513  if (!kernel)
514  SG_ERROR("No kernel available.\n")
515 
516  bool success=ui->ui_kernel->init_kernel("TRAIN");
517 
518  if (!success || !ui->ui_kernel->is_initialized() || !kernel->has_features())
519  SG_ERROR("Kernel not initialized / no train features available.\n")
520 
521  int32_t num_vec=kernel->get_num_vec_lhs();
522  if (!oneclass && trainlabels->get_num_labels() != num_vec)
523  SG_ERROR("Number of train labels (%d) and training vectors (%d) differs!\n", trainlabels->get_num_labels(), num_vec)
524 
525  SG_INFO("Starting SVM training on %ld vectors using C1=%lf C2=%lf epsilon=%lf\n", num_vec, svm_C1, svm_C2, svm_epsilon)
526 
527  if (type==CT_LARANK || type==CT_GMNPSVM || type==CT_LIBSVMMULTICLASS)
528  {
532  svm->set_epsilon(svm_epsilon);
535  svm->set_nu(svm_nu);
536  svm->set_C(svm_C1);
537  svm->set_qpsize(svm_qpsize);
541  }
542  else
543  {
544  CSVM* svm = (CSVM*)classifier;
547  svm->set_epsilon(svm_epsilon);
550  svm->set_nu(svm_nu);
551  svm->set_C(svm_C1, svm_C2);
552  svm->set_qpsize(svm_qpsize);
556  }
557 
558  if (type==CT_MKLMULTICLASS)
559  {
560  ((CMKLMulticlass *)classifier)->set_mkl_epsilon(svm_weight_epsilon);
561  }
562 
564  {
565  CAUCKernel* auc_kernel = new CAUCKernel(10, kernel);
566  CLabels* auc_labels = auc_kernel->setup_auc_maximization(trainlabels);
567  ((CKernelMachine*)classifier)->set_labels(auc_labels);
568  ((CKernelMachine*)classifier)->set_kernel(auc_kernel);
569  SG_UNREF(auc_labels);
570  }
571  else
572  {
573  if (type==CT_LARANK || type==CT_GMNPSVM || type==CT_LIBSVMMULTICLASS)
574  {
575  ((CKernelMulticlassMachine*)classifier)->set_labels(trainlabels);
576  ((CKernelMulticlassMachine*)classifier)->set_kernel(kernel);
577  }
578  else
579  {
580  if(!oneclass)
581  ((CKernelMachine*)classifier)->set_labels(trainlabels);
582 
583  ((CKernelMachine*)classifier)->set_kernel(kernel);
584  }
585  }
586 
587  bool result = classifier->train();
588 
589  return result;
590 }
591 
592 bool CGUIClassifier::train_clustering(int32_t k, int32_t max_iter)
593 {
594  bool result=false;
595  CDistance* distance=ui->ui_distance->get_distance();
596 
597  if (!distance)
598  SG_ERROR("No distance available\n")
599 
600  if (!ui->ui_distance->init_distance("TRAIN"))
601  SG_ERROR("Initializing distance with train features failed.\n")
602 
603  ((CDistanceMachine*) classifier)->set_distance(distance);
604 
606  switch (type)
607  {
608  case CT_KMEANS:
609  {
610  ((CKMeans*) classifier)->set_k(k);
611  ((CKMeans*) classifier)->set_max_iter(max_iter);
612  result=((CKMeans*) classifier)->train();
613  break;
614  }
615  case CT_HIERARCHICAL:
616  {
617  ((CHierarchical*) classifier)->set_merges(k);
618  result=((CHierarchical*) classifier)->train();
619  break;
620  }
621  default:
622  SG_ERROR("Unknown clustering type %d\n", type)
623  }
624 
625  return result;
626 }
627 
629 {
630  CLabels* trainlabels=ui->ui_labels->get_train_labels();
631  CDistance* distance=ui->ui_distance->get_distance();
632 
633  bool result=false;
634 
635  if (trainlabels)
636  {
637  if (distance)
638  {
639  if (!ui->ui_distance->init_distance("TRAIN"))
640  SG_ERROR("Initializing distance with train features failed.\n")
641  ((CKNN*) classifier)->set_labels(trainlabels);
642  ((CKNN*) classifier)->set_distance(distance);
643  ((CKNN*) classifier)->set_k(k);
644  result=((CKNN*) classifier)->train();
645  }
646  else
647  SG_ERROR("No distance available.\n")
648  }
649  else
650  SG_ERROR("No labels available\n")
651 
652  return result;
653 }
654 
656 {
657 #ifdef HAVE_LAPACK
659  if (!krr)
660  SG_ERROR("No SVM available.\n")
661 
662  CLabels* trainlabels=NULL;
663  trainlabels=ui->ui_labels->get_train_labels();
664  if (!trainlabels)
665  SG_ERROR("No trainlabels available.\n")
666 
667  CKernel* kernel=ui->ui_kernel->get_kernel();
668  if (!kernel)
669  SG_ERROR("No kernel available.\n")
670 
671  bool success=ui->ui_kernel->init_kernel("TRAIN");
672 
673  if (!success || !ui->ui_kernel->is_initialized() || !kernel->has_features())
674  SG_ERROR("Kernel not initialized / no train features available.\n")
675 
676  int32_t num_vec=kernel->get_num_vec_lhs();
677  if (trainlabels->get_num_labels() != num_vec)
678  SG_ERROR("Number of train labels (%d) and training vectors (%d) differs!\n", trainlabels->get_num_labels(), num_vec)
679 
680 
681  // Set training labels and kernel
682  krr->set_labels(trainlabels);
683  krr->set_kernel(kernel);
684 
685  bool result=krr->train();
686  return result;
687 #else
688  return false;
689 #endif
690 }
691 
693 {
696  CFeatures* trainfeatures=ui->ui_features->get_train_features();
697  CLabels* trainlabels=ui->ui_labels->get_train_labels();
698  bool result=false;
699 
700  if (!trainfeatures)
701  SG_ERROR("No trainfeatures available.\n")
702 
703  if (!trainfeatures->has_property(FP_DOT))
704  SG_ERROR("Trainfeatures not based on DotFeatures.\n")
705 
706  if (!trainlabels)
707  SG_ERROR("No labels available\n")
708 
709  if (ctype==CT_PERCEPTRON)
710  {
711  ((CPerceptron*) classifier)->set_learn_rate(perceptron_learnrate);
712  ((CPerceptron*) classifier)->set_max_iter(perceptron_maxiter);
713  }
714 
715 #ifdef HAVE_EIGEN3
716  if (ctype==CT_LDA)
717  {
718  if (trainfeatures->get_feature_type()!=F_DREAL ||
719  trainfeatures->get_feature_class()!=C_DENSE)
720  SG_ERROR("LDA requires train features of class SIMPLE type REAL.\n")
721  ((CLDA*) classifier)->set_gamma(gamma);
722  }
723 #endif //HAVE_EIGEN3
724 
725  if (ctype==CT_SVMOCAS)
726  ((CSVMOcas*) classifier)->set_C(svm_C1, svm_C2);
727 #ifdef HAVE_LAPACK
728  else if (ctype==CT_LIBLINEAR)
729  ((CLibLinear*) classifier)->set_C(svm_C1, svm_C2);
730 #endif
731  else if (ctype==CT_SVMLIN)
732  ((CSVMLin*) classifier)->set_C(svm_C1, svm_C2);
733  else if (ctype==CT_SVMSGD)
734  ((CSVMSGD*) classifier)->set_C(svm_C1, svm_C2);
735  else if (ctype==CT_LPM || ctype==CT_LPBOOST)
736  {
737  if (trainfeatures->get_feature_class()!=C_SPARSE ||
738  trainfeatures->get_feature_type()!=F_DREAL)
739  SG_ERROR("LPM and LPBOOST require trainfeatures of class SPARSE type REAL.\n")
740  }
741 
742  ((CLinearMachine*) classifier)->set_labels(trainlabels);
743  ((CLinearMachine*) classifier)->set_features((CDenseFeatures<float64_t>*) trainfeatures);
744  result=((CLinearMachine*) classifier)->train();
745 
746  return result;
747 }
748 
750 {
751  CFeatures* trainfeatures=ui->ui_features->get_train_features();
752  CLabels* trainlabels=ui->ui_labels->get_train_labels();
753 
754  bool result=false;
755 
756  if (!trainfeatures)
757  SG_ERROR("No trainfeatures available.\n")
758 
759  if (trainfeatures->get_feature_class()!=C_STRING ||
760  trainfeatures->get_feature_type()!=F_BYTE )
761  SG_ERROR("Trainfeatures are not of class STRING type BYTE.\n")
762 
763  if (!trainlabels)
764  SG_ERROR("No labels available.\n")
765 
766  ((CWDSVMOcas*) classifier)->set_labels(trainlabels);
767  ((CWDSVMOcas*) classifier)->set_features((CStringFeatures<uint8_t>*) trainfeatures);
768  result=((CWDSVMOcas*) classifier)->train();
769 
770  return result;
771 }
772 
773 bool CGUIClassifier::load(char* filename, char* type)
774 {
775  bool result=false;
776 
777  if (new_classifier(type))
778  {
779  FILE* model_file=fopen(filename, "r");
780  REQUIRE(model_file != NULL, "SVM/Classifier loading failed on file %s.\n", filename);
781 
782  CSerializableAsciiFile* ascii_file = new CSerializableAsciiFile(model_file,'r');
783 
784  if (ascii_file)
785  {
786  if (classifier && classifier->load_serializable(ascii_file))
787  {
788  SG_DEBUG("file successfully read.\n")
789  result=true;
790  }
791  else
792  SG_ERROR("SVM/Classifier creation/loading failed on file %s.\n", filename)
793 
794  delete ascii_file;
795  }
796  else
797  SG_ERROR("Opening file %s failed.\n", filename)
798 
799  return result;
800  }
801  else
802  SG_ERROR("Type %s of SVM/Classifier unknown.\n", type)
803 
804  return false;
805 }
806 
807 bool CGUIClassifier::save(char* param)
808 {
809  bool result=false;
810  param=SGIO::skip_spaces(param);
811 
812  if (classifier)
813  {
814  FILE* file=fopen(param, "w");
815  CSerializableAsciiFile* ascii_file = new CSerializableAsciiFile(file,'w');
816 
817  if ((!ascii_file) || (!classifier->save_serializable(ascii_file)))
818  printf("writing to file %s failed!\n", param);
819  else
820  {
821  printf("successfully written classifier into \"%s\" !\n", param);
822  result=true;
823  }
824 
825  if (ascii_file)
826  delete ascii_file;
827  }
828  else
829  SG_ERROR("create classifier first\n")
830 
831  return result;
832 }
833 
835  float64_t learnrate, int32_t maxiter)
836 {
837  if (learnrate<=0)
839  else
840  perceptron_learnrate=learnrate;
841 
842  if (maxiter<=0)
843  perceptron_maxiter=1000;
844  else
845  perceptron_maxiter=maxiter;
846  SG_INFO("Setting to perceptron parameters (learnrate %f and maxiter: %d\n", perceptron_learnrate, perceptron_maxiter)
847 
848  return true;
849 }
850 
852 {
853  if (epsilon<0)
854  svm_epsilon=1e-4;
855  else
857  SG_INFO("Set to svm_epsilon=%f.\n", svm_epsilon)
858 
859  return true;
860 }
861 
863 {
864  if (max>0)
865  {
867  SG_INFO("Set to max_train_time=%f.\n", max_train_time)
868  }
869  else
870  SG_INFO("Disabling max_train_time.\n")
871 
872  return true;
873 }
874 
876 {
877  if (!classifier)
878  SG_ERROR("No regression method allocated\n")
879 
883  {
884  SG_ERROR("Underlying method not capable of SV-regression\n")
885  }
886 
887  if (tube_epsilon<0)
888  svm_tube_epsilon=1e-2;
889  svm_tube_epsilon=tube_epsilon;
890 
891  ((CSVM*) classifier)->set_tube_epsilon(svm_tube_epsilon);
892  SG_INFO("Set to svr_tube_epsilon=%f.\n", svm_tube_epsilon)
893 
894  return true;
895 }
896 
898 {
899  if (nu<0 || nu>1)
900  nu=0.5;
901 
902  svm_nu=nu;
903  SG_INFO("Set to nu=%f.\n", svm_nu)
904 
905  return true;
906 }
907 
909  float64_t weight_epsilon, float64_t C, float64_t norm)
910 {
911  if (weight_epsilon<0)
912  weight_epsilon=1e-4;
913  if (C<0)
914  C=0;
915  if (norm<0)
916  SG_ERROR("MKL norm >= 0\n")
917 
918  svm_weight_epsilon=weight_epsilon;
919  C_mkl=C;
920  mkl_norm=norm;
921 
922  SG_INFO("Set to weight_epsilon=%f.\n", svm_weight_epsilon)
923  SG_INFO("Set to C_mkl=%f.\n", C_mkl)
924  SG_INFO("Set to mkl_norm=%f.\n", mkl_norm)
925 
926  return true;
927 }
928 
930 {
931  if (lambda<0 || lambda>1)
932  SG_ERROR("0 <= ent_lambda <= 1\n")
933 
934  ent_lambda = lambda;
935  return true;
936 }
937 
939 {
940  if (mkl_bnorm<1)
941  SG_ERROR("1 <= mkl_block_norm <= inf\n")
942 
943  mkl_block_norm=mkl_bnorm;
944  return true;
945 }
946 
947 
949 {
950  if (C1<0)
951  svm_C1=1.0;
952  else
953  svm_C1=C1;
954 
955  if (C2<0)
956  svm_C2=svm_C1;
957  else
958  svm_C2=C2;
959 
960  SG_INFO("Set to C1=%f C2=%f.\n", svm_C1, svm_C2)
961 
962  return true;
963 }
964 
965 bool CGUIClassifier::set_svm_qpsize(int32_t qpsize)
966 {
967  if (qpsize<2)
968  svm_qpsize=41;
969  else
970  svm_qpsize=qpsize;
971  SG_INFO("Set qpsize to svm_qpsize=%d.\n", svm_qpsize)
972 
973  return true;
974 }
975 
976 bool CGUIClassifier::set_svm_max_qpsize(int32_t max_qpsize)
977 {
978  if (max_qpsize<50)
979  svm_max_qpsize=50;
980  else
981  svm_max_qpsize=max_qpsize;
982  SG_INFO("Set max qpsize to svm_max_qpsize=%d.\n", svm_max_qpsize)
983 
984  return true;
985 }
986 
987 bool CGUIClassifier::set_svm_bufsize(int32_t bufsize)
988 {
989  if (svm_bufsize<0)
990  svm_bufsize=3000;
991  else
992  svm_bufsize=bufsize;
993  SG_INFO("Set bufsize to svm_bufsize=%d.\n", svm_bufsize)
994 
995  return true ;
996 }
997 
999 {
1000  svm_use_shrinking=enabled;
1001  if (svm_use_shrinking)
1002  SG_INFO("Enabling shrinking optimization.\n")
1003  else
1004  SG_INFO("Disabling shrinking optimization.\n")
1005 
1006  return true;
1007 }
1008 
1010 {
1011  svm_use_batch_computation=enabled;
1013  SG_INFO("Enabling batch computation.\n")
1014  else
1015  SG_INFO("Disabling batch computation.\n")
1016 
1017  return true;
1018 }
1019 
1021 {
1022  svm_use_linadd=enabled;
1023  if (svm_use_linadd)
1024  SG_INFO("Enabling LINADD optimization.\n")
1025  else
1026  SG_INFO("Disabling LINADD optimization.\n")
1027 
1028  return true;
1029 }
1030 
1032 {
1033  svm_use_bias=enabled;
1034  if (svm_use_bias)
1035  SG_INFO("Enabling svm bias.\n")
1036  else
1037  SG_INFO("Disabling svm bias.\n")
1038 
1039  return true;
1040 }
1041 
1043 {
1044  mkl_use_interleaved=enabled;
1045  if (mkl_use_interleaved)
1046  SG_INFO("Enabling mkl interleaved optimization.\n")
1047  else
1048  SG_INFO("Disabling mkl interleaved optimization.\n")
1049 
1050  return true;
1051 }
1052 
1054 {
1055  svm_do_auc_maximization=do_auc;
1056 
1058  SG_INFO("Enabling AUC maximization.\n")
1059  else
1060  SG_INFO("Disabling AUC maximization.\n")
1061 
1062  return true;
1063 }
1064 
1065 
1067 {
1069 
1070  switch (classifier->get_classifier_type())
1071  {
1072  case CT_LIGHT:
1073  case CT_LIGHTONECLASS:
1074  case CT_LIBSVM:
1075  case CT_SCATTERSVM:
1076  case CT_MPD:
1077  case CT_GPBT:
1078  case CT_CPLEXSVM:
1079  case CT_GMNPSVM:
1080  case CT_GNPPSVM:
1081  case CT_LIBSVR:
1082  case CT_LIBSVMMULTICLASS:
1083  case CT_LIBSVMONECLASS:
1084  case CT_SVRLIGHT:
1085  case CT_MKLCLASSIFICATION:
1086  case CT_MKLMULTICLASS:
1087  case CT_MKLREGRESSION:
1088  case CT_MKLONECLASS:
1090  return classify_kernelmachine();
1091  case CT_KNN:
1092  return classify_distancemachine();
1093  case CT_PERCEPTRON:
1094  case CT_LDA:
1095  return classify_linear();
1096  case CT_SVMLIN:
1097  case CT_SVMPERF:
1098  case CT_SVMOCAS:
1099  case CT_SVMSGD:
1100  case CT_LPM:
1101  case CT_LPBOOST:
1102  case CT_LIBLINEAR:
1103  return classify_linear();
1104  case CT_WDSVMOCAS:
1105  return classify_byte_linear();
1106  default:
1107  SG_ERROR("unknown classifier type\n")
1108  break;
1109  };
1110 
1111  return NULL;
1112 }
1113 
1115 {
1116  CFeatures* trainfeatures=ui->ui_features->get_train_features();
1117  CFeatures* testfeatures=ui->ui_features->get_test_features();
1118 
1119  if (!classifier)
1120  SG_ERROR("No kernelmachine available.\n")
1121 
1122  bool success=true;
1123 
1124  REQUIRE(ui->ui_kernel->get_kernel(), "No kernel set");
1125  if (ui->ui_kernel->get_kernel()->get_kernel_type()!=K_CUSTOM)
1126  {
1127  if (ui->ui_kernel->get_kernel()->get_kernel_type()==K_COMBINED
1128  && ( !trainfeatures || !testfeatures ))
1129  {
1130  SG_DEBUG("skipping initialisation of combined kernel "
1131  "as train/test features are unavailable\n")
1132  }
1133  else
1134  {
1135  if (!trainfeatures)
1136  SG_ERROR("No training features available.\n")
1137  if (!testfeatures)
1138  SG_ERROR("No test features available.\n")
1139 
1140  success=ui->ui_kernel->init_kernel("TEST");
1141  }
1142  }
1143 
1144  if (!success || !ui->ui_kernel->is_initialized())
1145  SG_ERROR("Kernel not initialized.\n")
1146 
1148  if (type==CT_LARANK || type==CT_GMNPSVM || type==CT_LIBSVMMULTICLASS ||
1149  type==CT_MKLMULTICLASS)
1150  {
1152  kmcm->set_kernel(ui->ui_kernel->get_kernel());
1153  }
1154  else
1155  {
1157  km->set_kernel(ui->ui_kernel->get_kernel());
1159  }
1160 
1161  SG_INFO("Starting kernel machine testing.\n")
1162  return classifier->apply();
1163 }
1164 
1166  float64_t* &weights, int32_t &rows, int32_t &cols, float64_t*& bias,
1167  int32_t& brows, int32_t& bcols,
1168  int32_t idx) // which SVM for Multiclass
1169 {
1171 
1172  switch (classifier->get_classifier_type())
1173  {
1174  case CT_SCATTERSVM:
1175  case CT_GNPPSVM:
1176  case CT_LIBSVMMULTICLASS:
1177  case CT_LIGHT:
1178  case CT_LIGHTONECLASS:
1179  case CT_LIBSVM:
1180  case CT_MPD:
1181  case CT_GPBT:
1182  case CT_CPLEXSVM:
1183  case CT_GMNPSVM:
1184  case CT_LIBSVR:
1185  case CT_LIBSVMONECLASS:
1186  case CT_SVRLIGHT:
1187  case CT_MKLCLASSIFICATION:
1188  case CT_MKLREGRESSION:
1189  case CT_MKLONECLASS:
1190  case CT_MKLMULTICLASS:
1192  return get_svm(weights, rows, cols, bias, brows, bcols, idx);
1193  break;
1194  case CT_PERCEPTRON:
1195  case CT_LDA:
1196  case CT_LPM:
1197  case CT_LPBOOST:
1198  case CT_SVMOCAS:
1199  case CT_SVMSGD:
1200  case CT_SVMLIN:
1201  case CT_SVMPERF:
1202  case CT_LIBLINEAR:
1203  return get_linear(weights, rows, cols, bias, brows, bcols);
1204  break;
1205  case CT_KMEANS:
1206  case CT_HIERARCHICAL:
1207  return get_clustering(weights, rows, cols, bias, brows, bcols);
1208  break;
1209  case CT_KNN:
1210  SG_ERROR("not implemented")
1211  break;
1212  default:
1213  SG_ERROR("unknown classifier type\n")
1214  break;
1215  };
1216  return false;
1217 }
1218 
1219 
1221 {
1223  return ((CMulticlassSVM*) classifier)->get_num_machines();
1224 }
1225 
1227  float64_t* &weights, int32_t& rows, int32_t& cols, float64_t*& bias,
1228  int32_t& brows, int32_t& bcols, int32_t idx)
1229 {
1230  CSVM* svm=(CSVM*) classifier;
1231 
1232  if (idx>-1) // should be MulticlassSVM
1233  svm=((CMulticlassSVM*) svm)->get_svm(idx);
1234 
1235  if (svm)
1236  {
1237  brows=1;
1238  bcols=1;
1239  bias=SG_MALLOC(float64_t, 1);
1240  *bias=svm->get_bias();
1241 
1242  rows=svm->get_num_support_vectors();
1243  cols=2;
1244  weights=SG_MALLOC(float64_t, rows*cols);
1245 
1246  for (int32_t i=0; i<rows; i++)
1247  {
1248  weights[i]=svm->get_alpha(i);
1249  weights[i+rows]=svm->get_support_vector(i);
1250  }
1251 
1252  return true;
1253  }
1254 
1255  return false;
1256 }
1257 
1259  float64_t* &centers, int32_t& rows, int32_t& cols, float64_t*& radi,
1260  int32_t& brows, int32_t& bcols)
1261 {
1262  if (!classifier)
1263  return false;
1264 
1265  switch (classifier->get_classifier_type())
1266  {
1267  case CT_KMEANS:
1268  {
1269  CKMeans* clustering=(CKMeans*) classifier;
1270 
1271  bcols=1;
1272  SGVector<float64_t> r=clustering->get_radiuses();
1273  brows=r.vlen;
1274  radi=SG_MALLOC(float64_t, brows);
1275  memcpy(radi, r.vector, sizeof(float64_t)*brows);
1276 
1277  cols=1;
1278  SGMatrix<float64_t> c=clustering->get_cluster_centers();
1279  rows=c.num_rows;
1280  cols=c.num_cols;
1281  centers=SG_MALLOC(float64_t, rows*cols);
1282  memcpy(centers, c.matrix, sizeof(float64_t)*rows*cols);
1283  break;
1284  }
1285 
1286  case CT_HIERARCHICAL:
1287  {
1288  CHierarchical* clustering=(CHierarchical*) classifier;
1289 
1290  // radi == merge_distances, centers == pairs
1291  bcols=1;
1292  SGVector<float64_t> r=clustering->get_merge_distances();
1293  brows=r.vlen;
1294  radi=SG_MALLOC(float64_t, brows);
1295  memcpy(radi, r.vector, sizeof(float64_t)*brows);
1296 
1297  SGMatrix<int32_t> p=clustering->get_cluster_pairs();
1298  rows=p.num_rows;
1299  cols=p.num_cols;
1300  centers=SG_MALLOC(float64_t, rows*cols);
1301  for (int32_t i=0; i<rows*cols; i++)
1302  centers[i]=(float64_t) p.matrix[i];
1303 
1304  break;
1305  }
1306 
1307  default:
1308  SG_ERROR("internal error - unknown clustering type\n")
1309  }
1310 
1311  return true;
1312 }
1313 
1315  float64_t* &weights, int32_t& rows, int32_t& cols, float64_t*& bias,
1316  int32_t& brows, int32_t& bcols)
1317 {
1319 
1320  if (!linear)
1321  return false;
1322 
1323  bias=SG_MALLOC(float64_t, 1);
1324  *bias=linear->get_bias();
1325  brows=1;
1326  bcols=1;
1327 
1328  SGVector<float64_t> w=linear->get_w();
1329  cols=1;
1330  rows=w.vlen;
1331 
1332  weights= SG_MALLOC(float64_t, w.vlen);
1333  memcpy(weights, w.vector, sizeof(float64_t)*w.vlen);
1334 
1335  return true;
1336 }
1337 
1339 {
1340  CFeatures* trainfeatures=ui->ui_features->get_train_features();
1341  CFeatures* testfeatures=ui->ui_features->get_test_features();
1342 
1343  if (!classifier)
1344  {
1345  SG_ERROR("no kernelmachine available\n")
1346  return NULL;
1347  }
1348  if (!trainfeatures)
1349  {
1350  SG_ERROR("no training features available\n")
1351  return NULL;
1352  }
1353 
1354  if (!testfeatures)
1355  {
1356  SG_ERROR("no test features available\n")
1357  return NULL;
1358  }
1359 
1360  bool success=ui->ui_distance->init_distance("TEST");
1361 
1362  if (!success || !ui->ui_distance->is_initialized())
1363  {
1364  SG_ERROR("distance not initialized\n")
1365  return NULL;
1366  }
1367 
1368  ((CDistanceMachine*) classifier)->set_distance(
1369  ui->ui_distance->get_distance());
1370  SG_INFO("starting distance machine testing\n")
1371  return classifier->apply();
1372 }
1373 
1374 
1376 {
1377  CFeatures* testfeatures=ui->ui_features->get_test_features();
1378 
1379  if (!classifier)
1380  {
1381  SG_ERROR("no classifier available\n")
1382  return NULL;
1383  }
1384  if (!testfeatures)
1385  {
1386  SG_ERROR("no test features available\n")
1387  return NULL;
1388  }
1389  if (!(testfeatures->has_property(FP_DOT)))
1390  {
1391  SG_ERROR("testfeatures not based on DotFeatures\n")
1392  return NULL;
1393  }
1394 
1395  ((CLinearMachine*) classifier)->set_features((CDotFeatures*) testfeatures);
1396  SG_INFO("starting linear classifier testing\n")
1397  return classifier->apply();
1398 }
1399 
1401 {
1402  CFeatures* testfeatures=ui->ui_features->get_test_features();
1403 
1404  if (!classifier)
1405  {
1406  SG_ERROR("no svm available\n")
1407  return NULL;
1408  }
1409  if (!testfeatures)
1410  {
1411  SG_ERROR("no test features available\n")
1412  return NULL;
1413  }
1414  if (testfeatures->get_feature_class() != C_STRING ||
1415  testfeatures->get_feature_type() != F_BYTE )
1416  {
1417  SG_ERROR("testfeatures not of class STRING type BYTE\n")
1418  return NULL;
1419  }
1420 
1421  ((CWDSVMOcas*) classifier)->set_features((CStringFeatures<uint8_t>*) testfeatures);
1422  SG_INFO("starting linear classifier testing\n")
1423  return classifier->apply();
1424 }
1425 
1427 {
1428  CFeatures* trainfeatures=ui->ui_features->get_train_features();
1429  CFeatures* testfeatures=ui->ui_features->get_test_features();
1430 
1431  if (!classifier)
1432  {
1433  SG_ERROR("no svm available\n")
1434  return false;
1435  }
1436 
1437  if (!ui->ui_kernel->is_initialized())
1438  {
1439  SG_ERROR("kernel not initialized\n")
1440  return false;
1441  }
1442 
1443  if (!ui->ui_kernel->get_kernel() ||
1444  ui->ui_kernel->get_kernel()->get_kernel_type()!=K_CUSTOM)
1445  {
1446  if (!trainfeatures)
1447  {
1448  SG_ERROR("no training features available\n")
1449  return false;
1450  }
1451 
1452  if (!testfeatures)
1453  {
1454  SG_ERROR("no test features available\n")
1455  return false;
1456  }
1457  }
1458 
1459  ((CKernelMachine*) classifier)->set_kernel(
1460  ui->ui_kernel->get_kernel());
1461 
1462  result=((CKernelMachine*)classifier)->apply_one(idx);
1463  return true ;
1464 }
1465 
1466 
1468 {
1469 #ifdef HAVE_LAPACK
1470  krr_tau=tau;
1471  ((CKernelRidgeRegression*) classifier)->set_tau(krr_tau);
1472  SG_INFO("Set to krr_tau=%f.\n", krr_tau)
1473 
1474  return true;
1475 #else
1476  return false;
1477 #endif
1478 }
1479 
1480 bool CGUIClassifier::set_solver(char* solver)
1481 {
1482  ESolverType s=ST_AUTO;
1483 
1484  if (strncmp(solver,"NEWTON", 6)==0)
1485  {
1486  SG_INFO("Using NEWTON solver.\n")
1487  s=ST_NEWTON;
1488  }
1489  else if (strncmp(solver,"DIRECT", 6)==0)
1490  {
1491  SG_INFO("Using DIRECT solver\n")
1492  s=ST_DIRECT;
1493  }
1494  else if (strncmp(solver,"BLOCK_NORM", 9)==0)
1495  {
1496  SG_INFO("Using BLOCK_NORM solver\n")
1497  s=ST_BLOCK_NORM;
1498  }
1499  else if (strncmp(solver,"ELASTICNET", 10)==0)
1500  {
1501  SG_INFO("Using ELASTICNET solver\n")
1502  s=ST_ELASTICNET;
1503  }
1504  else if (strncmp(solver,"AUTO", 4)==0)
1505  {
1506  SG_INFO("Automagically determining solver.\n")
1507  s=ST_AUTO;
1508  }
1509 #ifdef USE_CPLEX
1510  else if (strncmp(solver, "CPLEX", 5)==0)
1511  {
1512  SG_INFO("USING CPLEX METHOD selected\n")
1513  s=ST_CPLEX;
1514  }
1515 #endif
1516 #ifdef USE_GLPK
1517  else if (strncmp(solver,"GLPK", 4)==0)
1518  {
1519  SG_INFO("Using GLPK solver\n")
1520  s=ST_GLPK;
1521  }
1522 #endif
1523  else
1524  SG_ERROR("Unknown solver type, %s (not compiled in?)\n", solver)
1525 
1526 
1527  solver_type=s;
1528  return true;
1529 }
1530 
1532 {
1533  if (strcmp(name,"LIBSVM_ONECLASS")==0)
1534  {
1537  SG_INFO("created SVMlibsvm object for oneclass\n")
1538  }
1539  else if (strcmp(name,"LIBSVM_NU")==0)
1540  {
1542  constraint_generator= new CLibSVM(LIBSVM_NU_SVC);
1543  SG_INFO("created SVMlibsvm object\n")
1544  }
1545  else if (strcmp(name,"LIBSVM")==0)
1546  {
1549  SG_INFO("created SVMlibsvm object\n")
1550  }
1551 
1552  else if (strcmp(name,"GPBTSVM")==0)
1553  {
1556  SG_INFO("created GPBT-SVM object\n")
1557  }
1558  else if (strcmp(name,"MPDSVM")==0)
1559  {
1562  SG_INFO("created MPD-SVM object\n")
1563  }
1564  else if (strcmp(name,"GNPPSVM")==0)
1565  {
1568  SG_INFO("created GNPP-SVM object\n")
1569  }
1570  else if (strcmp(name,"LIBSVR")==0)
1571  {
1574  SG_INFO("created SVRlibsvm object\n")
1575  }
1576  else
1577  {
1578  SG_ERROR("Unknown SV-classifier %s.\n", name)
1579  return false;
1580  }
1582 
1583  return (constraint_generator!=NULL);
1584 }
void set_epsilon(float64_t eps)
float distance(CJLCoverTreePoint p1, CJLCoverTreePoint p2, float64_t upper_bound)
void set_shrinking_enabled(bool enable)
Definition: SVM.h:179
bool set_perceptron_parameters(float64_t lernrate, int32_t maxiter)
bool set_svm_epsilon(float64_t epsilon)
class SVMLin
Definition: SVMLin.h:22
void set_bias_enabled(bool enable_bias)
EMachineType
Definition: Machine.h:33
void set_mkl_block_norm(float64_t q)
Definition: MKL.cpp:395
Class KernelRidgeRegression implements Kernel Ridge Regression - a regularized least square method fo...
bool get_trained_classifier(float64_t *&weights, int32_t &rows, int32_t &cols, float64_t *&bias, int32_t &brows, int32_t &bcols, int32_t idx=-1)
#define SG_INFO(...)
Definition: SGIO.h:118
void set_max_train_time(float64_t t)
Definition: Machine.cpp:90
double norm(double *v, double p, int n)
Definition: epph.cpp:452
bool set_svm_shrinking_enabled(bool enabled)
virtual bool save_serializable(CSerializableFile *file, const char *prefix="", int32_t param_version=Version::get_version_parameter())
Definition: SGObject.cpp:315
bool set_svm_linadd_enabled(bool enabled)
bool train_knn(int32_t k=3)
MKLMulticlass is a class for L1-norm Multiclass MKL.
Definition: MKLMulticlass.h:40
static char * skip_spaces(char *str)
Definition: SGIO.cpp:257
SGVector< float64_t > get_merge_distances()
class WDSVMOcas
Definition: WDSVMOcas.h:28
Class Distance, a base class for all the distances used in the Shogun toolbox.
Definition: Distance.h:81
void set_qpsize(int32_t qps)
bool set_constraint_generator(char *cg)
bool train_clustering(int32_t k=3, int32_t max_iter=1000)
no bias w/ libsvm
Definition: ScatterSVM.h:27
LibSVM.
Definition: LibSVM.h:30
The class Labels models labels, i.e. class assignments of objects.
Definition: Labels.h:43
virtual int32_t get_num_labels() const =0
bool set_svm_mkl_parameters(float64_t weight_epsilon, float64_t C_mkl, float64_t mkl_norm)
void set_shrinking_enabled(bool enable)
float64_t perceptron_learnrate
bool set_do_auc_maximization(bool do_auc)
L2 regularized SVM with L2-loss using newton in the primal.
Definition: LibLinear.h:32
class MPDSVM
Definition: MPDSVM.h:24
Class LPM trains a linear classifier called Linear Programming Machine, i.e. a SVM using a norm regu...
Definition: LPM.h:42
bool set_svm_nu(float64_t nu)
ESolverType
Definition: Machine.h:98
#define SG_ERROR(...)
Definition: SGIO.h:129
#define REQUIRE(x,...)
Definition: SGIO.h:206
bool save(char *param)
L1 regularized SVM with L2-loss using dual coordinate descent.
Definition: LibLinear.h:37
CLabels * setup_auc_maximization(CLabels *labels)
Definition: AUCKernel.cpp:46
void set_mkl_norm(float64_t norm)
Definition: MKL.cpp:373
A generic KernelMachine interface.
Definition: KernelMachine.h:51
Multiple Kernel Learning for one-class-classification.
Definition: MKLOneClass.h:27
Agglomerative hierarchical single linkage clustering.
Definition: Hierarchical.h:38
Features that support dot products among other operations.
Definition: DotFeatures.h:44
virtual int32_t get_num_vec_lhs()
Definition: Kernel.h:513
class LibSVMMultiClass. Does one vs one classification.
Multiple Kernel Learning for regression.
Definition: MKLRegression.h:27
#define SG_REF(x)
Definition: SGObject.h:51
Class LDA implements regularized Linear Discriminant Analysis.
Definition: LDA.h:90
A generic DistanceMachine interface.
virtual void set_mkl_norm(float64_t norm)
class LibSVMOneClass
void set_nu(float64_t nue)
Definition: SVM.h:107
bool classify_example(int32_t idx, float64_t &result)
bool set_svm_batch_computation_enabled(bool enabled)
CLabels * classify_distancemachine()
The AUC kernel can be used to maximize the area under the receiver operator characteristic curve (AUC...
Definition: AUCKernel.h:35
static const float64_t epsilon
Definition: libbmrm.cpp:25
void set_mkl_epsilon(float64_t eps)
Definition: MKL.h:209
void set_interleaved_optimization_enabled(bool enable)
Definition: MKL.h:169
CLabels * classify_kernelmachine()
LIBLINEAR_SOLVER_TYPE
Definition: LibLinear.h:25
bool new_classifier(char *name, int32_t d=6, int32_t from_d=40)
bool set_svr_tube_epsilon(float64_t tube_epsilon)
#define ASSERT(x)
Definition: SGIO.h:201
SGVector< float64_t > get_radiuses()
Definition: KMeans.cpp:365
Class SGObject is the base class of all shogun objects.
Definition: SGObject.h:112
void set_constraint_generator(CSVM *s)
Definition: MKL.h:112
class MultiClassSVM
Definition: MulticlassSVM.h:28
void set_batch_computation_enabled(bool enable)
KMeans clustering, partitions the data into k (a-priori specified) clusters.
Definition: KMeans.h:57
void set_batch_computation_enabled(bool enable)
bool set_svm_max_qpsize(int32_t max_qpsize)
void set_nu(float64_t nue)
bool set_solver(char *solver)
SGMatrix< int32_t > get_cluster_pairs()
L2 regularized linear logistic regression.
Definition: LibLinear.h:28
double float64_t
Definition: common.h:50
bool set_mkl_block_norm(float64_t mkl_bnorm)
void set_C(float64_t C)
virtual bool load_serializable(CSerializableFile *file, const char *prefix="", int32_t param_version=Version::get_version_parameter())
Definition: SGObject.cpp:374
This class provides an interface to the LibLinear library for large- scale linear learning focusing o...
Definition: LibLinear.h:61
bool set_mkl_interleaved_enabled(bool enabled)
class SVMSGD
Definition: SVMSGD.h:36
Multiple Kernel Learning for two-class-classification.
void set_qpsize(int32_t qps)
Definition: SVM.h:143
L2 regularized SVM with L2-loss using dual coordinate descent.
Definition: LibLinear.h:30
index_t num_rows
Definition: SGMatrix.h:329
void set_tube_epsilon(float64_t eps)
bool set_svm_bufsize(int32_t bufsize)
float64_t get_alpha(int32_t idx)
virtual EFeatureClass get_feature_class() const =0
Class KNN, an implementation of the standard k-nearest neigbor classifier.
Definition: KNN.h:56
Class LinearMachine is a generic interface for all kinds of linear machines like classifiers.
Definition: LinearMachine.h:63
Multiple Kernel Learning.
Definition: MKL.h:95
index_t num_cols
Definition: SGMatrix.h:331
bool set_svm_bias_enabled(bool enabled)
virtual EMachineType get_classifier_type()
Definition: Machine.cpp:100
bool get_clustering(float64_t *&weights, int32_t &rows, int32_t &cols, float64_t *&bias, int32_t &brows, int32_t &bcols)
class GPBTSVM
Definition: GPBTSVM.h:23
virtual SGVector< float64_t > get_w() const
int32_t get_support_vector(int32_t idx)
bool get_svm(float64_t *&weights, int32_t &rows, int32_t &cols, float64_t *&bias, int32_t &brows, int32_t &bcols, int32_t idx=-1)
Class LPBoost trains a linear classifier called Linear Programming Machine, i.e. a SVM using a norm ...
Definition: LPBoost.h:47
bool get_linear(float64_t *&weights, int32_t &rows, int32_t &cols, float64_t *&bias, int32_t &brows, int32_t &bcols)
bool train_linear(float64_t gamma=0)
Class LibSVR, performs support vector regression using LibSVM.
Definition: LibSVR.h:70
Class Perceptron implements the standard linear (online) perceptron.
Definition: Perceptron.h:31
#define SG_UNREF(x)
Definition: SGObject.h:52
ScatterSVM - Multiclass SVM.
Definition: ScatterSVM.h:49
#define SG_DEBUG(...)
Definition: SGIO.h:107
all of classes and functions are contained in the shogun namespace
Definition: class_list.h:18
Class GMNPSVM implements a one vs. rest MultiClass SVM.
Definition: GMNPSVM.h:26
training with bias using test rule 2
Definition: ScatterSVM.h:32
The class Features is the base class of all feature objects.
Definition: Features.h:68
training with bias using test rule 1
Definition: ScatterSVM.h:30
void set_linadd_enabled(bool enable)
virtual float64_t get_bias()
virtual bool train(CFeatures *data=NULL)
Definition: Machine.cpp:47
void set_mkl_epsilon(float64_t eps)
A generic Support Vector Machine Interface.
Definition: SVM.h:49
void set_linadd_enabled(bool enable)
void set_elasticnet_lambda(float64_t elasticnet_lambda)
Definition: MKL.cpp:382
the LaRank multiclass SVM machine
Definition: LaRank.h:306
The Kernel base class.
Definition: Kernel.h:153
void set_bias_enabled(bool enable_bias)
class SVMOcas
Definition: SVMOcas.h:34
CLabels * classify_byte_linear()
void set_epsilon(float64_t eps)
Definition: SVM.h:125
Matrix::Scalar max(Matrix m)
Definition: Redux.h:177
L2 regularized linear SVM with L1-loss using dual coordinate descent.
Definition: LibLinear.h:35
class GNPPSVM
Definition: GNPPSVM.h:21
void set_kernel(CKernel *k)
bool has_property(EFeatureProperty p) const
Definition: Features.cpp:295
virtual bool has_features()
Definition: Kernel.h:531
virtual void set_labels(CLabels *lab)
Definition: Machine.cpp:73
bool set_krr_tau(float64_t tau=1)
bool set_svm_C(float64_t C1, float64_t C2)
bool load(char *filename, char *type)
void set_solver_type(ESolverType st)
Definition: Machine.cpp:105
bool set_elasticnet_lambda(float64_t lambda)
bool set_svm_qpsize(int32_t qpsize)
void set_C_mkl(float64_t C)
Definition: MKL.h:142
virtual EFeatureType get_feature_type() const =0
void set_C(float64_t c_neg, float64_t c_pos)
Definition: SVM.h:118
index_t vlen
Definition: SGVector.h:481
bool set_max_train_time(float64_t max)
void set_tube_epsilon(float64_t eps)
Definition: SVM.h:131
SGMatrix< float64_t > get_cluster_centers()
Definition: KMeans.cpp:370
virtual CLabels * apply(CFeatures *data=NULL)
Definition: Machine.cpp:160

SHOGUN 机器学习工具包 - 项目文档