00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00023
00024
00025
00026
00027
00028
00029
00030
00031
00043 #ifndef U_SVM_SVM_H
00044 #define U_SVM_SVM_H
00045
00046 #include "smo.h"
00047
00048 #include "fastlib/fastlib.h"
00049
00050 #include <typeinfo>
00051
00052
00053 #define ID_LINEAR 0
00054 #define ID_GAUSSIAN 1
00055
00059 class SVMLinearKernel {
00060 public:
00061
00062 ArrayList<double> kpara_;
00063 void Init(datanode *node) {
00064 kpara_.Init();
00065 }
00066
00067 void GetName(String* kname) {
00068 kname->Copy("linear");
00069 }
00070
00071 int GetTypeId() {
00072 return ID_LINEAR;
00073 }
00074
00075 double Eval(const double* a, const double* b, index_t n_features) const {
00076 return la::Dot(n_features, a, b);
00077 }
00078
00079 void SaveParam(FILE* fp) {
00080 }
00081 };
00082
00086 class SVMRBFKernel {
00087 public:
00088
00089 ArrayList<double> kpara_;
00090 void Init(datanode *node) {
00091 kpara_.Init(2);
00092 kpara_[0] = fx_param_double_req(NULL, "sigma");
00093 kpara_[1] = -1.0 / (2 * math::Sqr(kpara_[0]));
00094 }
00095
00096 void GetName(String* kname) {
00097 kname->Copy("gaussian");
00098 }
00099
00100 int GetTypeId() {
00101 return ID_GAUSSIAN;
00102 }
00103
00104 double Eval(const double *a, const double *b, index_t n_features) const {
00105 double distance_squared = la::DistanceSqEuclidean(n_features, a, b);
00106 return exp(kpara_[1] * distance_squared);
00107 }
00108
00109 void SaveParam(FILE* fp) {
00110 fprintf(fp, "sigma %g\n", kpara_[0]);
00111 fprintf(fp, "gamma %g\n", kpara_[1]);
00112 }
00113 };
00114
00118 template<typename TKernel>
00119 class SVM {
00120
00121 private:
00129 int learner_typeid_;
00130
00131
00132 struct SVM_MODELS {
00133
00134 double bias_;
00135
00136 ArrayList<double> coef_;
00137 };
00138 ArrayList<SVM_MODELS> models_;
00139
00140
00141
00142 ArrayList<double> train_labels_list_;
00143
00144 ArrayList<index_t> train_labels_index_;
00145
00146 ArrayList<index_t> train_labels_ct_;
00147
00148 ArrayList<index_t> train_labels_startpos_;
00149
00150
00151 Matrix sv_;
00152 Matrix sv_coef_;
00153 ArrayList<bool> trainset_sv_indicator_;
00154
00155
00156 index_t total_num_sv_;
00157
00158 ArrayList<index_t> sv_index_;
00159
00160 ArrayList<index_t> sv_list_startpos_;
00161
00162 ArrayList<index_t> sv_list_ct_;
00163
00164
00165 struct PARAMETERS {
00166 TKernel kernel_;
00167 String kernelname_;
00168 int kerneltypeid_;
00169 int b_;
00170 double C_;
00171
00172 double Cp_;
00173 double Cn_;
00174
00175 double epsilon_;
00176
00177 double wss_;
00178 };
00179 PARAMETERS param_;
00180
00181
00182 index_t n_data_;
00183
00184 int num_classes_;
00185
00186 int num_models_;
00187 int num_features_;
00188
00189 public:
00190 typedef TKernel Kernel;
00191 class SMO<Kernel>;
00192
00193 void Init(int learner_typeid, const Dataset& dataset, datanode *module);
00194 void InitTrain(int learner_typeid, const Dataset& dataset, datanode *module);
00195
00196 double Predict(int learner_typeid, const Vector& vector);
00197 void BatchPredict(int learner_typeid, Dataset& testset, String predictedvalue_filename);
00198 void LoadModelBatchPredict(int learner_typeid, Dataset& testset, String model_filename, String predictedvalue_filename);
00199
00200 private:
00201 void SVM_C_Train_(int learner_typeid, const Dataset& dataset, datanode *module);
00202 void SVM_R_Train_(int learner_typeid, const Dataset& dataset, datanode *module);
00203 void SVM_DE_Train_(int learner_typeid, const Dataset& dataset, datanode *module);
00204 double SVM_C_Predict_(const Vector& vector);
00205 double SVM_R_Predict_(const Vector& vector);
00206 double SVM_DE_Predict_(const Vector& vector);
00207
00208 void SaveModel_(int learner_typeid, String model_filename);
00209 void LoadModel_(int learner_typeid, String model_filename);
00210 };
00211
00219 template<typename TKernel>
00220 void SVM<TKernel>::Init(int learner_typeid, const Dataset& dataset, datanode *module){
00221 learner_typeid_ = learner_typeid;
00222
00223 n_data_ = dataset.n_points();
00224
00225 num_features_ = dataset.n_features()-1;
00226
00227 num_classes_ = dataset.n_labels();
00228
00229 train_labels_list_.Init();
00230 train_labels_index_.Init();
00231 train_labels_ct_.Init();
00232 train_labels_startpos_.Init();
00233
00234 if (learner_typeid == 0) {
00235 num_models_ = num_classes_ * (num_classes_-1) / 2;
00236 sv_list_startpos_.Init(num_classes_);
00237 sv_list_ct_.Init(num_classes_);
00238 }
00239 else {
00240 num_classes_ = 2;
00241
00242 num_models_ = 1;
00243 sv_list_startpos_.Init();
00244 sv_list_ct_.Init();
00245 }
00246
00247 models_.Init();
00248 sv_index_.Init();
00249 total_num_sv_ = 0;
00250
00251
00252
00253 trainset_sv_indicator_.Init(n_data_);
00254 for (index_t i=0; i<n_data_; i++)
00255 trainset_sv_indicator_[i] = false;
00256
00257 param_.kernel_.Init(fx_submodule(module, "kernel"));
00258 param_.kernel_.GetName(¶m_.kernelname_);
00259 param_.kerneltypeid_ = param_.kernel_.GetTypeId();
00260
00261
00262
00263 param_.wss_ = fx_param_int(NULL, "wss", 1);
00264
00265
00266 param_.C_ = fx_param_double(NULL, "c", 10.0);
00267 param_.Cp_ = fx_param_double(NULL, "c_p", param_.C_);
00268 param_.Cn_ = fx_param_double(NULL, "c_n", param_.C_);
00269
00270 if (learner_typeid == 1) {
00271
00272 param_.epsilon_ = fx_param_double(NULL, "epsilon", 0.1);
00273 }
00274 else if (learner_typeid == 2) {
00275 }
00276 }
00277
00285 template<typename TKernel>
00286 void SVM<TKernel>::InitTrain(int learner_typeid, const Dataset& dataset, datanode *module) {
00287 Init(learner_typeid, dataset, module);
00288
00289 if (learner_typeid == 0) {
00290 SVM_C_Train_(learner_typeid, dataset, module);
00291 }
00292 else if (learner_typeid == 1) {
00293 SVM_R_Train_(learner_typeid, dataset, module);
00294 }
00295 else if (learner_typeid == 2) {
00296 SVM_DE_Train_(learner_typeid, dataset, module);
00297 }
00298
00299
00300 SaveModel_(learner_typeid, "svm_model");
00301
00302 }
00303
00304
00313 template<typename TKernel>
00314 void SVM<TKernel>::SVM_C_Train_(int learner_typeid, const Dataset& dataset, datanode *module) {
00315 num_classes_ = dataset.n_labels();
00316
00317 dataset.GetLabels(train_labels_list_, train_labels_index_, train_labels_ct_, train_labels_startpos_);
00318
00319 index_t ct = 0;
00320 index_t i, j;
00321 for (i = 0; i < num_classes_; i++) {
00322 for (j = i+1; j < num_classes_; j++) {
00323 models_.PushBack();
00324
00325 SMO<Kernel> smo;
00326
00327 ArrayList<double> param_feed_db;
00328 param_feed_db.Init();
00329 param_feed_db.PushBack() = param_.b_;
00330 param_feed_db.PushBack() = param_.Cp_;
00331 param_feed_db.PushBack() = param_.Cn_;
00332 param_feed_db.PushBack() = param_.wss_;
00333 smo.InitPara(learner_typeid, param_feed_db);
00334
00335 smo.kernel().Init(fx_submodule(module, "kernel"));
00336
00337
00338
00339 Dataset dataset_bi;
00340 dataset_bi.InitBlank();
00341 dataset_bi.info().Init();
00342 dataset_bi.matrix().Init(num_features_+1, train_labels_ct_[i]+train_labels_ct_[j]);
00343 ArrayList<index_t> dataset_bi_index;
00344 dataset_bi_index.Init(train_labels_ct_[i]+train_labels_ct_[j]);
00345 for (index_t m = 0; m < train_labels_ct_[i]; m++) {
00346 Vector source, dest;
00347 dataset_bi.matrix().MakeColumnVector(m, &dest);
00348 dataset.matrix().MakeColumnVector(train_labels_index_[train_labels_startpos_[i]+m], &source);
00349 dest.CopyValues(source);
00350
00351 dataset_bi.matrix().set(num_features_, m, 1);
00352 dataset_bi_index[m] = train_labels_index_[train_labels_startpos_[i]+m];
00353 }
00354 for (index_t n = 0; n < train_labels_ct_[j]; n++) {
00355 Vector source, dest;
00356 dataset_bi.matrix().MakeColumnVector(n+train_labels_ct_[i], &dest);
00357 dataset.matrix().MakeColumnVector(train_labels_index_[train_labels_startpos_[j]+n], &source);
00358 dest.CopyValues(source);
00359
00360 dataset_bi.matrix().set(num_features_, n+train_labels_ct_[i], -1);
00361 dataset_bi_index[n+train_labels_ct_[i]] = train_labels_index_[train_labels_startpos_[j]+n];
00362 }
00363
00364
00365 smo.Train(learner_typeid, &dataset_bi);
00366
00367
00368 models_[ct].bias_ = smo.Bias();
00369 models_[ct].coef_.Init();
00370 smo.GetSVM(dataset_bi_index, models_[ct].coef_, trainset_sv_indicator_);
00371
00372 ct++;
00373 }
00374 }
00375
00376
00377 index_t k;
00378 sv_list_startpos_[0] = 0;
00379
00380 for (i = 0; i < num_classes_; i++) {
00381 ct = 0;
00382 for (j = 0; j < train_labels_ct_[i]; j++) {
00383 if (trainset_sv_indicator_[ train_labels_index_[train_labels_startpos_[i]+j] ]) {
00384 sv_index_.PushBack() = train_labels_index_[train_labels_startpos_[i]+j];
00385 total_num_sv_++;
00386 ct++;
00387 }
00388 }
00389 sv_list_ct_[i] = ct;
00390 if (i >= 1)
00391 sv_list_startpos_[i] = sv_list_startpos_[i-1] + sv_list_ct_[i-1];
00392 }
00393 sv_.Init(num_features_, total_num_sv_);
00394 for (i = 0; i < total_num_sv_; i++) {
00395 Vector source, dest;
00396 sv_.MakeColumnVector(i, &dest);
00397
00398 dataset.matrix().MakeColumnSubvector(sv_index_[i], 0, num_features_, &source);
00399 dest.CopyValues(source);
00400 }
00401
00402
00403 index_t ct_model = 0;
00404 index_t ct_bi_cv;
00405 index_t p;
00406 sv_coef_.Init(num_classes_-1, total_num_sv_);
00407 sv_coef_.SetZero();
00408 for (i = 0; i < num_classes_; i++) {
00409 for (j = i+1; j < num_classes_; j++) {
00410 ct_bi_cv = 0;
00411 p = sv_list_startpos_[i];
00412 for (k = 0; k < train_labels_ct_[i]; k++) {
00413 if (trainset_sv_indicator_[ train_labels_index_[train_labels_startpos_[i]+k] ]) {
00414 sv_coef_.set(j-1, p++, models_[ct_model].coef_[ct_bi_cv]);
00415 ct_bi_cv ++;
00416 }
00417 }
00418 p = sv_list_startpos_[j];
00419 for (k = 0; k < train_labels_ct_[j]; k++) {
00420 if (trainset_sv_indicator_[ train_labels_index_[train_labels_startpos_[j]+k] ]) {
00421 sv_coef_.set(i, p++, models_[ct_model].coef_[ct_bi_cv]);
00422 ct_bi_cv ++;
00423 }
00424 }
00425 ct_model++;
00426 }
00427 }
00428 }
00429
00437 template<typename TKernel>
00438 void SVM<TKernel>::SVM_R_Train_(int learner_typeid, const Dataset& dataset, datanode *module) {
00439 index_t i;
00440 ArrayList<index_t> dataset_index;
00441 dataset_index.Init(n_data_);
00442 for (i=0; i<n_data_; i++)
00443 dataset_index[i] = i;
00444
00445 models_.PushBack();
00446
00447 SMO<Kernel> smo;
00448
00449 ArrayList<double> param_feed_db;
00450 param_feed_db.Init();
00451 param_feed_db.PushBack() = param_.b_;
00452 param_feed_db.PushBack() = param_.C_;
00453 param_feed_db.PushBack() = param_.epsilon_;
00454 param_feed_db.PushBack() = param_.wss_;
00455 smo.InitPara(learner_typeid, param_feed_db);
00456
00457 smo.kernel().Init(fx_submodule(module, "kernel"));
00458
00459 smo.Train(learner_typeid, &dataset);
00460
00461
00462 models_[0].bias_ = smo.Bias();
00463 models_[0].coef_.Init();
00464 smo.GetSVM(dataset_index, models_[0].coef_, trainset_sv_indicator_);
00465
00466 for (i = 0; i < n_data_; i++) {
00467 if (trainset_sv_indicator_[i]) {
00468 sv_index_.PushBack() = i;
00469 total_num_sv_++;
00470 }
00471 }
00472
00473
00474 sv_.Init(num_features_, total_num_sv_);
00475 for (i = 0; i < total_num_sv_; i++) {
00476 Vector source, dest;
00477 sv_.MakeColumnVector(i, &dest);
00478
00479 dataset.matrix().MakeColumnSubvector(sv_index_[i], 0, num_features_, &source);
00480 dest.CopyValues(source);
00481 }
00482 sv_coef_.Init(1, total_num_sv_);
00483 for (i = 0; i < total_num_sv_; i++) {
00484 sv_coef_.set(0, i, models_[0].coef_[i]);
00485 }
00486
00487 }
00488
00496 template<typename TKernel>
00497 void SVM<TKernel>::SVM_DE_Train_(int learner_typeid, const Dataset& dataset, datanode *module) {
00498
00499 }
00500
00501
00510 template<typename TKernel>
00511 double SVM<TKernel>::Predict(int learner_typeid, const Vector& datum) {
00512 double predicted_value = INFINITY;
00513 if (learner_typeid == 0) {
00514 predicted_value = SVM_C_Predict_(datum);
00515 }
00516 else if (learner_typeid == 1) {
00517 predicted_value = SVM_R_Predict_(datum);
00518 }
00519 else if (learner_typeid == 2) {
00520 predicted_value = SVM_DE_Predict_(datum);
00521 }
00522 return predicted_value;
00523 }
00524
00532 template<typename TKernel>
00533 double SVM<TKernel>::SVM_C_Predict_(const Vector& datum) {
00534 index_t i, j, k;
00535 ArrayList<double> keval;
00536 keval.Init(total_num_sv_);
00537 for (i = 0; i < total_num_sv_; i++) {
00538 keval[i] = param_.kernel_.Eval(datum.ptr(), sv_.GetColumnPtr(i), num_features_);
00539 }
00540 ArrayList<double> values;
00541 values.Init(num_models_);
00542 index_t ct = 0;
00543 for (i = 0; i < num_classes_; i++) {
00544 for (j = i+1; j < num_classes_; j++) {
00545 double sum = 0;
00546 for(k = 0; k < sv_list_ct_[i]; k++) {
00547 sum += sv_coef_.get(j-1, sv_list_startpos_[i]+k) * keval[sv_list_startpos_[i]+k];
00548 }
00549 for(k = 0; k < sv_list_ct_[j]; k++) {
00550 sum += sv_coef_.get(i, sv_list_startpos_[j]+k) * keval[sv_list_startpos_[j]+k];
00551 }
00552 sum += models_[ct].bias_;
00553 values[ct] = sum;
00554 ct++;
00555 }
00556 }
00557
00558 ArrayList<index_t> vote;
00559 vote.Init(num_classes_);
00560 for (i = 0; i < num_classes_; i++) {
00561 vote[i] = 0;
00562 }
00563 ct = 0;
00564 for (i = 0; i < num_classes_; i++) {
00565 for (j = i+1; j < num_classes_; j++) {
00566 if(values[ct] > 0.0) {
00567 vote[i] = vote[i] + 1;
00568 }
00569 else {
00570 vote[j] = vote[j] + 1;
00571 }
00572 ct++;
00573 }
00574 }
00575 index_t vote_max_idx = 0;
00576 for (i = 1; i < num_classes_; i++) {
00577 if (vote[i] >= vote[vote_max_idx]) {
00578 vote_max_idx = i;
00579 }
00580 }
00581 return train_labels_list_[vote_max_idx];
00582 }
00583
00591 template<typename TKernel>
00592 double SVM<TKernel>::SVM_R_Predict_(const Vector& datum) {
00593 index_t i;
00594 double sum = 0.0;
00595 for (i = 0; i < total_num_sv_; i++) {
00596 sum += sv_coef_.get(0, i) * param_.kernel_.Eval(datum.ptr(), sv_.GetColumnPtr(i), num_features_);
00597 }
00598 sum += models_[0].bias_;
00599 return sum;
00600 }
00601
00609 template<typename TKernel>
00610 double SVM<TKernel>::SVM_DE_Predict_(const Vector& datum) {
00611
00612 return 0.0;
00613 }
00614
00615
00616
00628 template<typename TKernel>
00629 void SVM<TKernel>::BatchPredict(int learner_typeid, Dataset& testset, String predictedvalue_filename) {
00630 FILE *fp = fopen(predictedvalue_filename, "w");
00631 if (fp == NULL) {
00632 fprintf(stderr, "Cannot save predicted values to file!");
00633 return;
00634 }
00635
00636 num_features_ = testset.n_features()-1;
00637 for (index_t i = 0; i < testset.n_points(); i++) {
00638 Vector testvec;
00639 testset.matrix().MakeColumnSubvector(i, 0, num_features_, &testvec);
00640 double predictedvalue = Predict(learner_typeid, testvec);
00641
00642
00643
00644 fprintf(fp, "%f\n", predictedvalue);
00645 }
00646 fclose(fp);
00647
00648
00649
00650
00651 }
00652
00661 template<typename TKernel>
00662 void SVM<TKernel>::LoadModelBatchPredict(int learner_typeid, Dataset& testset, String model_filename, String predictedvalue_filename) {
00663 LoadModel_(learner_typeid, model_filename);
00664 BatchPredict(learner_typeid, testset, predictedvalue_filename);
00665 }
00666
00667
00674
00675 template<typename TKernel>
00676 void SVM<TKernel>::SaveModel_(int learner_typeid, String model_filename) {
00677 FILE *fp = fopen(model_filename, "w");
00678 if (fp == NULL) {
00679 fprintf(stderr, "Cannot save trained model to file!");
00680 return;
00681 }
00682 index_t i, j;
00683
00684 if (learner_typeid == 0) {
00685 fprintf(fp, "svm_type SVM_C\n");
00686 fprintf(fp, "total_num_sv %d\n", total_num_sv_);
00687 fprintf(fp, "num_classes %d\n", num_classes_);
00688
00689 fprintf(fp, "labels ");
00690 for (i = 0; i < num_classes_; i++)
00691 fprintf(fp, "%f ", train_labels_list_[i]);
00692 fprintf(fp, "\n");
00693
00694 fprintf(fp, "sv_list_startpos ");
00695 for (i =0; i < num_classes_; i++)
00696 fprintf(fp, "%d ", sv_list_startpos_[i]);
00697 fprintf(fp, "\n");
00698 fprintf(fp, "sv_list_ct ");
00699 for (i =0; i < num_classes_; i++)
00700 fprintf(fp, "%d ", sv_list_ct_[i]);
00701 fprintf(fp, "\n");
00702 }
00703 else if (learner_typeid == 1) {
00704 fprintf(fp, "svm_type SVM_R\n");
00705 fprintf(fp, "total_num_sv %d\n", total_num_sv_);
00706 fprintf(fp, "sv_index ");
00707 for (i = 0; i < total_num_sv_; i++)
00708 fprintf(fp, "%d ", sv_index_[i]);
00709 fprintf(fp, "\n");
00710 }
00711 else if (learner_typeid == 2) {
00712 fprintf(fp, "svm_type SVM_DE\n");
00713 fprintf(fp, "total_num_sv %d\n", total_num_sv_);
00714 fprintf(fp, "sv_index ");
00715 for (i = 0; i < total_num_sv_; i++)
00716 fprintf(fp, "%d ", sv_index_[i]);
00717 fprintf(fp, "\n");
00718 }
00719
00720
00721 fprintf(fp, "kernel_name %s\n", param_.kernelname_.c_str());
00722 fprintf(fp, "kernel_typeid %d\n", param_.kerneltypeid_);
00723 param_.kernel_.SaveParam(fp);
00724
00725
00726 fprintf(fp, "bias ");
00727 for (i = 0; i < num_models_; i++)
00728 fprintf(fp, "%f ", models_[i].bias_);
00729 fprintf(fp, "\n");
00730
00731 fprintf(fp, "SV_coefs\n");
00732 for (i = 0; i < total_num_sv_; i++) {
00733 for (j = 0; j < num_classes_-1; j++) {
00734 fprintf(fp, "%f ", sv_coef_.get(j,i));
00735 }
00736 fprintf(fp, "\n");
00737 }
00738
00739 fprintf(fp, "SVs\n");
00740 for (i = 0; i < total_num_sv_; i++) {
00741 for (j = 0; j < num_features_; j++) {
00742 fprintf(fp, "%f ", sv_.get(j,i));
00743 }
00744 fprintf(fp, "\n");
00745 }
00746 fclose(fp);
00747 }
00748
00755
00756 template<typename TKernel>
00757 void SVM<TKernel>::LoadModel_(int learner_typeid, String model_filename) {
00758 if (learner_typeid == 0) {
00759 train_labels_list_.Renew();
00760 train_labels_list_.Init(num_classes_);
00761 }
00762
00763
00764 FILE *fp = fopen(model_filename, "r");
00765 if (fp == NULL) {
00766 fprintf(stderr, "Cannot open SVM model file!");
00767 return;
00768 }
00769 char cmd[80];
00770 int i, j; int temp_d; double temp_f;
00771 for (i = 0; i < num_models_; i++) {
00772 models_.PushBack();
00773 models_[i].coef_.Init();
00774 }
00775 while (1) {
00776 fscanf(fp,"%80s",cmd);
00777 if(strcmp(cmd,"svm_type")==0) {
00778 fscanf(fp,"%80s", cmd);
00779 if (strcmp(cmd,"SVM_C")==0)
00780 learner_typeid_ = 0;
00781 else if (strcmp(cmd,"SVM_R")==0)
00782 learner_typeid_ = 1;
00783 else if (strcmp(cmd,"SVM_DE")==0)
00784 learner_typeid_ = 2;
00785 }
00786 else if (strcmp(cmd, "total_num_sv")==0) {
00787 fscanf(fp,"%d",&total_num_sv_);
00788 }
00789
00790 else if (strcmp(cmd, "num_classes")==0) {
00791 fscanf(fp,"%d",&num_classes_);
00792 }
00793 else if (strcmp(cmd, "labels")==0) {
00794 for (i=0; i<num_classes_; i++) {
00795 fscanf(fp,"%lf",&temp_f);
00796 train_labels_list_[i] = temp_f;
00797 }
00798 }
00799 else if (strcmp(cmd, "sv_list_startpos")==0) {
00800 for ( i= 0; i < num_classes_; i++) {
00801 fscanf(fp,"%d",&temp_d);
00802 sv_list_startpos_[i]= temp_d;
00803 }
00804 }
00805 else if (strcmp(cmd, "sv_list_ct")==0) {
00806 for ( i= 0; i < num_classes_; i++) {
00807 fscanf(fp,"%d",&temp_d);
00808 sv_list_ct_[i]= temp_d;
00809 }
00810 }
00811
00812 else if (strcmp(cmd, "sv_index")==0) {
00813 for ( i= 0; i < total_num_sv_; i++) {
00814 fscanf(fp,"%d",&temp_d);
00815 sv_index_.PushBack() = temp_d;
00816 }
00817 }
00818
00819 else if (strcmp(cmd, "kernel_name")==0) {
00820 fscanf(fp,"%80s",param_.kernelname_.c_str());
00821 }
00822 else if (strcmp(cmd, "kernel_typeid")==0) {
00823 fscanf(fp,"%d",¶m_.kerneltypeid_);
00824 }
00825 else if (strcmp(cmd, "sigma")==0) {
00826 fscanf(fp,"%lf",¶m_.kernel_.kpara_[0]);
00827 }
00828 else if (strcmp(cmd, "gamma")==0) {
00829 fscanf(fp,"%lf",¶m_.kernel_.kpara_[1]);
00830 }
00831
00832 else if (strcmp(cmd, "bias")==0) {
00833 for ( i= 0; i < num_models_; i++) {
00834 fscanf(fp,"%lf",&temp_f);
00835 models_[i].bias_= temp_f;
00836 }
00837 break;
00838 }
00839 }
00840
00841
00842 sv_coef_.Init(num_classes_-1, total_num_sv_);
00843 sv_coef_.SetZero();
00844 sv_.Init(num_features_, total_num_sv_);
00845 while (1) {
00846 fscanf(fp,"%80s",cmd);
00847 if (strcmp(cmd, "SV_coefs")==0) {
00848 for (i = 0; i < total_num_sv_; i++) {
00849 for (j = 0; j < num_classes_-1; j++) {
00850 fscanf(fp,"%lf",&temp_f);
00851 sv_coef_.set(j, i, temp_f);
00852 }
00853 }
00854 }
00855 else if (strcmp(cmd, "SVs")==0) {
00856 for (i = 0; i < total_num_sv_; i++) {
00857 for (j = 0; j < num_features_; j++) {
00858 fscanf(fp,"%lf",&temp_f);
00859 sv_.set(j, i, temp_f);
00860 }
00861 }
00862 break;
00863 }
00864 }
00865 fclose(fp);
00866 }
00867
00868 #endif