optimizer.h

00001 /* MLPACK 0.2
00002  *
00003  * Copyright (c) 2008, 2009 Alexander Gray,
00004  *                          Garry Boyer,
00005  *                          Ryan Riegel,
00006  *                          Nikolaos Vasiloglou,
00007  *                          Dongryeol Lee,
00008  *                          Chip Mappus, 
00009  *                          Nishant Mehta,
00010  *                          Hua Ouyang,
00011  *                          Parikshit Ram,
00012  *                          Long Tran,
00013  *                          Wee Chin Wong
00014  *
00015  * Copyright (c) 2008, 2009 Georgia Institute of Technology
00016  *
00017  * This program is free software; you can redistribute it and/or
00018  * modify it under the terms of the GNU General Public License as
00019  * published by the Free Software Foundation; either version 2 of the
00020  * License, or (at your option) any later version.
00021  *
00022  * This program is distributed in the hope that it will be useful, but
00023  * WITHOUT ANY WARRANTY; without even the implied warranty of
00024  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
00025  * General Public License for more details.
00026  *
00027  * You should have received a copy of the GNU General Public License
00028  * along with this program; if not, write to the Free Software
00029  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
00030  * 02110-1301, USA.
00031  */
00039 #ifndef OPTIMIZER_H_
00040 #define OPTIMIZER_H_
00041 #ifndef HAVE_STD
00042 #define HAVE_STD
00043 #endif
00044 
00045 #ifndef HAVE_NAMESPACES
00046 #define HAVE_NAMESPACES
00047 #endif
00048 
00049 #include "fastlib/fastlib.h"
00050 #include <newmat.h>
00051 #include <NLF.h>
00052 #include <OptCG.h>
00053 #include <OptLBFGS.h>
00054 #include <OptFDNewton.h>
00055 #include <OptNewton.h>
00056 #include <OptQNewton.h>
00057 #include <BoundConstraint.h>
00058 #include <CompoundConstraint.h>
00059 #include <LinearEquation.h>
00060 #include <LinearInequality.h>
00061 #include <NonLinearEquation.h>
00062 #include <NonLinearInequality.h>
00063 
00070 namespace optim {
00071  namespace optpp {
00084 typedef OPTPP::OptCG       CG;
00085 typedef OPTPP::OptQNewton  QNewton;
00086 typedef OPTPP::OptQNewton  BFGS;
00087 typedef OPTPP::OptFDNewton FDNewton;
00088 typedef OPTPP::OptNewton   Newton;
00089 typedef OPTPP::OptLBFGS    LBFGS;
00090 
00109 enum ConstraintType {
00110   NoConstraint       = 0,
00111   BoundConstraint    = 2,
00112   LinearEquality     = 4,
00113   LinearInequality   = 8,
00114   NonLinearEquality  = 16,
00115   NonLinearInequality= 32
00116 };
00117 
00233 template <typename Method, 
00234          typename Objective, 
00235          ConstraintType Constraint=NoConstraint>
00236 class StaticOptppOptimizer;
00237 
00243 // The default is good for CG and L-BFGS
00244 template<typename Method>
00245 class OptimizationTrait {
00246  public:
00247   typedef OPTPP::NLF1 NlpType;
00248   static void InitializeMethod(fx_module *module, Method *method) {
00249     std::string search_strategy = fx_param_str(module, 
00250                                       "search_strategy",
00251                                       "line_search");
00252     if (search_strategy=="line_search") {
00253       method->setSearchStrategy(OPTPP::LineSearch);
00254     } else {
00255       if (search_strategy=="trust_region") {
00256         method->setSearchStrategy(OPTPP::TrustRegion);
00257       } else {
00258         if (search_strategy=="trust_pds") {
00259           method->setSearchStrategy(OPTPP::TrustPDS);
00260         } else {
00261           FATAL("The specified search strategy %s is not supported", 
00262               search_strategy.c_str());
00263         }
00264       }
00265     } 
00266     if (method->checkDeriv()==false) {
00267       NONFATAL("Warning finite difference derivative/hessian doesn't much "
00268           "analytic");
00269     }    
00270   }
00271 };
00272 
00276 // Generic Newton 
00277 template<>
00278 class OptimizationTrait<OPTPP::OptNewtonLike> {
00279  public:
00280   typedef OPTPP::NLF1 NlpType;
00281   static void InitializeMethod(fx_module *module, 
00282       OPTPP::OptNewtonLike *method) {
00283      std::string search_strategy = fx_param_str(module, 
00284                                       "search_strategy",
00285                                       "line_search");
00286     if (search_strategy=="line_search") {
00287       method->setSearchStrategy(OPTPP::LineSearch);
00288     } else {
00289       if (search_strategy=="trust_region") {
00290         method->setSearchStrategy(OPTPP::TrustRegion);
00291         method->setTRSize(fx_param_double(module, "trust_region_size", 
00292               method->getTRSize()));
00293       } else {
00294         if (search_strategy=="trust_pds") {
00295           method->setSearchStrategy(OPTPP::TrustPDS);
00296         } else {
00297           FATAL("The specified search strategy %s is not supported", 
00298               search_strategy.c_str());
00299         }
00300       }
00301     } 
00302     if (method->checkDeriv()==false) {
00303       NONFATAL("Warning finite difference derivative/hessian doesn't much "
00304          " analytic");
00305     }    
00306   }
00307 };
00308 
00314 // Quasi-Newton with Finite Difference approximation on the Hessian
00315 template<>
00316 class OptimizationTrait<OPTPP::OptFDNewton> {
00317  public:
00318   typedef OPTPP::NLF1 NlpType;
00319   static void InitializeMethod(fx_module *module, OPTPP::OptFDNewton *method) {
00320     OptimizationTrait<OPTPP::OptNewtonLike>::InitializeMethod(
00321         module, method);  
00322   }
00323 };
00324 
00329 // Quasi-Newton BFGS 
00330 template<>
00331 class OptimizationTrait<OPTPP::OptQNewton> {
00332  public:
00333   typedef OPTPP::NLF1 NlpType;
00334   static void InitializeMethod(fx_module *module, OPTPP::OptQNewton *method) {
00335     OptimizationTrait<OPTPP::OptNewtonLike>::InitializeMethod(
00336         module, method);  
00337   }
00338 };
00339 
00344 // Newton  with analytic expression for the Hessian
00345 template<>
00346 class OptimizationTrait<OPTPP::OptNewton> {
00347  public:
00348   typedef OPTPP::NLF2 NlpType;
00349   static void InitializeMethod(fx_module *module, OPTPP::OptNewton *method) {
00350    OptimizationTrait<OPTPP::OptNewtonLike>::InitializeMethod(
00351         module, method);  
00352  
00353   }
00354 };
00355 
00356 // In this section we define the traits for  constraints
00357 
00358 
00367 template<typename Method, typename Objective, bool Applicable>
00368 class BoundConstraintTrait {
00369  public:
00370   static void UpdateConstraints(Objective *object, 
00371        OPTPP::OptppArray<OPTPP::Constraint> *constraint_array) {  
00372   }
00373 };
00374 
00391 template<typename Method, typename Objective>
00392 class BoundConstraintTrait<Method, Objective, true> {
00393  public:
00394   static void UpdateConstraints(Objective *objective, 
00395        OPTPP::OptppArray<OPTPP::Constraint> *constraint_array) {
00396     index_t dimension = objective->dimension();
00397     OPTPP::Constraint bc;
00398     Vector lower_bound;
00399     Vector upper_bound;
00400     lower_bound.Init(dimension);
00401     upper_bound.Init(dimension);
00402     objective->GetBoundConstraint(&lower_bound, &upper_bound);
00403     if (lower_bound.length()!=BIG_BAD_NUMBER and 
00404         upper_bound.length()==BIG_BAD_NUMBER) {
00405       NEWMAT::ColumnVector lower(lower_bound.ptr(), lower_bound.length());
00406       bc = new OPTPP::BoundConstraint(dimension, lower); 
00407     } else {
00408       if (lower_bound.length()==BIG_BAD_NUMBER and 
00409           upper_bound.length()!=BIG_BAD_NUMBER){
00410         NEWMAT::ColumnVector upper(upper_bound.ptr(), upper_bound.length());
00411         bc = new OPTPP::BoundConstraint(dimension, upper, false);
00412       } else {
00413         NEWMAT::ColumnVector lower(lower_bound.ptr(), lower_bound.length());
00414         NEWMAT::ColumnVector upper(upper_bound.ptr(), upper_bound.length());
00415         bc = new OPTPP::BoundConstraint(dimension, lower, upper);
00416       }
00417     }
00418     constraint_array->append(bc);
00419   }
00420 
00421 };
00433 template<typename Method, typename Objective, bool Applicable>
00434 class LinearEqualityTrait {
00435  public:
00436   static void UpdateConstraints(Objective *objective, 
00437        OPTPP::OptppArray<OPTPP::Constraint> *constraint_array) {
00438   
00439   }
00440 
00441 };
00442 
00458 template<typename Method, typename Objective>
00459 class LinearEqualityTrait<Method, Objective, true> {
00460  public:
00461    static void UpdateConstraints(Objective *objective, 
00462        OPTPP::OptppArray<OPTPP::Constraint> *constraint_array) {
00463      index_t dimension = objective->dimension();
00464      Matrix a_mat;
00465      Vector b_vec;
00466      objective->GetLinearEquality(&a_mat, &b_vec);
00467      NEWMAT::Matrix alpha(a_mat.ptr(), a_mat.n_rows(), a_mat.n_cols());
00468      NEWMAT::ColumnVector beta(b_vec.ptr(), b_vec.length());  
00469      OPTPP::Constraint  leq = new OPTPP::LinearEquation(alpha, beta);
00470      constraint_array->append(leq);
00471   }
00472 
00473 };
00474 
00493 template<typename Method, typename Objective, bool Applicable>
00494 class LinearInequalityTrait {
00495  public:
00496   static void UpdateConstraints(Objective *object, 
00497        OPTPP::OptppArray<OPTPP::Constraint> *constraint_array) {
00498   }
00499 
00500 };
00501 
00502 template<typename Method, typename Objective>
00503 class LinearInequalityTrait<Method, Objective, true> {
00504  public:
00505   static void UpdateConstraints(Objective *objective, 
00506        OPTPP::OptppArray<OPTPP::Constraint> *constraint_array) {  
00507     index_t dimension = objective->dimension();
00508     OPTPP::Constraint lineq;
00509     Matrix a_mat;
00510     Vector left_b;
00511     Vector right_b;
00513     objective->GetLinearInequality(&a_mat, &left_b, &right_b);
00514     NEWMAT::Matrix alpha(a_mat.ptr(), a_mat.n_rows(), a_mat.n_cols());
00515     DEBUG_ASSERT(a_mat.n_rows()!=BIG_BAD_NUMBER);
00516     DEBUG_ASSERT(left_b.length()!=BIG_BAD_NUMBER or 
00517         right_b.length()!=BIG_BAD_NUMBER); 
00518     if (left_b.length()!=BIG_BAD_NUMBER and right_b.length()==BIG_BAD_NUMBER) {
00519       NEWMAT::ColumnVector left(left_b.ptr(), left_b.length());
00520       lineq = new OPTPP::LinearInequality(alpha, left); 
00521     } else {
00522       if (left_b.length()==BIG_BAD_NUMBER and 
00523           right_b.length()!=BIG_BAD_NUMBER){
00524         NEWMAT::ColumnVector right(right_b.ptr(), right_b.length());
00525         lineq = new OPTPP::LinearInequality(alpha, right, false);
00526       } else {
00527         NEWMAT::ColumnVector left(left_b.ptr(), left_b.length());
00528         NEWMAT::ColumnVector right(right_b.ptr(), right_b.length());
00529         lineq = new OPTPP::LinearInequality(alpha, left, right);
00530       }
00531     }
00532     constraint_array->append(lineq);
00533   }
00534 };
00535 
00552 template<typename Method, typename Objective, bool Applicable>
00553 class NonLinearEqualityTrait {
00554  public:
00555   static void UpdateConstraints(Objective *object, 
00556        OPTPP::OptppArray<OPTPP::Constraint> *constraint_array) {
00557   }
00558 };
00559 
00560 template<typename Method, typename Objective>
00561 class NonLinearEqualityTrait<Method, Objective, true> {
00562  public:
00563   static void UpdateConstraints(Objective *objective, 
00564        OPTPP::OptppArray<OPTPP::Constraint> *constraint_array) {
00565     index_t dimension = objective->dimension();
00566     OPTPP::NLP *nlp = new OPTPP::NLP(
00567         new typename OptimizationTrait<Method>::NlpType(
00568           StaticOptppOptimizer<Method, Objective>::
00569           ComputeNonLinearEqualityConstraints));
00570     OPTPP::Constraint eq = new OPTPP::NonLinearEquation(nlp,
00571         objective->num_of_non_linear_equalities());
00572     constraint_array->append(eq);
00573   }
00574 };
00575 
00576 template<typename Method, typename Objective, bool Applicable>
00577 class NonLinearInequalityTrait {
00578  public: 
00579   static void UpdateConstraints(Objective *objective, 
00580       OPTPP::OptppArray<OPTPP::Constraint> *constraint_array) {
00581   }
00582   
00583 };
00584 
00605 template<typename Method, typename Objective>
00606 class NonLinearInequalityTrait<Method, Objective, true> {
00607  public: 
00608     static void UpdateConstraints(Objective *objective, 
00609        OPTPP::OptppArray<OPTPP::Constraint> *constraint_array) {
00610     index_t dimension = objective->dimension();
00611     OPTPP::NLP *nlp = new OPTPP::NLP(
00612         new typename OptimizationTrait<Method>::NlpType(
00613           StaticOptppOptimizer<Method, Objective>::
00614           ComputeNonLinearInequalityConstraints));
00615     Vector left_b;
00616     Vector right_b;
00617     objective->GetNonLinearInequalityConstraintBounds(&left_b, &right_b);
00618     OPTPP::Constraint ineq; 
00619     if (left_b.length()==BIG_BAD_NUMBER and 
00620         right_b.length()==BIG_BAD_NUMBER) {
00621       ineq = new OPTPP::NonLinearInequality(nlp, 
00622           objective->num_of_non_linear_inequalities());
00623     } else {
00624       if (left_b.length()!=BIG_BAD_NUMBER and 
00625           right_b.length()==BIG_BAD_NUMBER) {
00626         NEWMAT::ColumnVector left(left_b.ptr(), left_b.length());    
00627         ineq = new OPTPP::NonLinearInequality(nlp, left,
00628             objective->num_of_non_linear_inequalities());  
00629       } else {   
00630         if (left_b.length()==BIG_BAD_NUMBER and right_b.length()!=BIG_BAD_NUMBER) {
00631           NEWMAT::ColumnVector right(right_b.ptr(), right_b.length());    
00632           ineq = new OPTPP::NonLinearInequality(nlp, right, false,
00633               objective->num_of_non_linear_inequalities());  
00634    
00635         } else {
00636           if (left_b.length()!=BIG_BAD_NUMBER and
00637               right_b.length()!=BIG_BAD_NUMBER) {
00638             NEWMAT::ColumnVector right(right_b.ptr(), right_b.length());    
00639             NEWMAT::ColumnVector left(left_b.ptr(), left_b.length());    
00640             ineq = new OPTPP::NonLinearInequality(nlp, left, right,
00641                 objective->num_of_non_linear_inequalities());
00642           
00643           }
00644         }
00645       }
00646     } 
00647   } 
00648 };
00649 
00650 template <typename Method, typename Objective, ConstraintType Constraint>
00651 class StaticOptppOptimizer {
00652  public:
00653   StaticOptppOptimizer() {
00654     objective_=NULL;
00655     method_=NULL;
00656     nlp_=NULL;
00657     compound_constraint_=NULL;
00658   }
00659   ~StaticOptppOptimizer() {
00660     Destruct();
00661   }
00662   void Init(fx_module *module, Objective *objective) {
00663     module_ = module;
00664     objective_ = objective;
00665     dimension_=objective_->dimension();
00666     // Load the constraints
00667     OPTPP::OptppArray<OPTPP::Constraint> constraint_array;
00668     BoundConstraintTrait<Method, Objective, 
00669      (bool)(Constraint & BoundConstraint)>::UpdateConstraints(objective_, 
00670           &constraint_array);
00671     LinearEqualityTrait<Method, Objective, 
00672      (bool)(Constraint & LinearEquality)>::UpdateConstraints(objective_, 
00673           &constraint_array);
00674     LinearInequalityTrait<Method, Objective, 
00675      (bool)(Constraint & LinearInequality)>::UpdateConstraints(objective_, 
00676           &constraint_array);
00677     NonLinearEqualityTrait<Method, Objective, 
00678      (bool)(Constraint & NonLinearEquality)>::UpdateConstraints(objective_, 
00679           &constraint_array);
00680     NonLinearInequalityTrait<Method, Objective, 
00681      (bool)(Constraint & NonLinearInequality)>::UpdateConstraints(objective_, 
00682           &constraint_array);
00683  
00684     nlp_ = new typename OptimizationTrait<Method>::NlpType(dimension_, ComputeObjective, 
00685         Initialize, (OPTPP::CompoundConstraint *)NULL);
00686     
00687     // Setup the tolerances 
00688     OPTPP::TOLS tols;
00689     tols.setDefaultTol();
00690     if (fx_param_exists(module_, "function_tol")) {
00691       tols.setFTol(fx_param_double_req(module_, "function_tol"));
00692     } else {
00693       if (fx_param_exists(module_, "step_tol")) {
00694         tols.setStepTol(fx_param_double_req(module_, "step_tol"));
00695       } else {
00696         if (fx_param_exists(module_, "grad_tol")) {
00697           tols.setGTol(fx_param_double_req(module_, "grad_tol"));
00698         } 
00699       }
00700     }
00701     if (fx_param_exists(module_, "max_step")) {
00702       tols.setMaxStep(fx_param_double_req(module_, "max_step"));
00703     }
00704     if (fx_param_exists(module_, "min_step")) {
00705       tols.setMinStep(fx_param_double_req(module_, "min_step"));
00706     }
00707     if (fx_param_exists(module_, "line_search_tol")) {
00708       tols.setLSTol(fx_param_double_req(module_, "min_step"));
00709     }
00710   
00711     tols.setMaxIter(fx_param_int(module_, "max_iter", 1000));
00712     method_ = new Method(nlp_, tols);
00713     // setting some generic options for the optimization method
00714     const char *status_file=fx_param_str(module_, "status_file", "status.txt" );
00715     if (method_->setOutputFile(status_file, 0)==false) {
00716       FATAL("Failed to open the status file %s for writing", status_file);
00717     }
00718 
00719     OptimizationTrait<Method>::InitializeMethod(module_, method_);
00720     
00721   }
00722   void Destruct() {
00723     if (method_!=NULL) {
00724       delete method_;
00725       method_=NULL;
00726     }
00727     if (nlp_!=NULL) {
00728       delete nlp_;
00729       nlp_=NULL;
00730     }
00731     if (compound_constraint_!=NULL) {
00732       delete compound_constraint_;
00733       compound_constraint_=NULL;
00734     }
00735   }
00736   void Optimize(Vector *result) {
00737     method_->optimize();
00738     NEWMAT::ColumnVector x=method_->getXPrev();
00739     result->Copy(x.data(), dimension_); 
00740     fx_result_bool(module_, "converged", method_->checkConvg());
00741     fx_result_int(module_, "iterations", method_->getIter());
00742     method_->cleanup();
00743   }
00744  
00745  private:
00746   fx_module *module_;
00747   static Objective *objective_;
00748   typename OptimizationTrait<Method>::NlpType *nlp_;
00749   OPTPP::CompoundConstraint *compound_constraint_;
00750 
00751   Method *method_; 
00752   index_t dimension_;
00753   
00754   static void Initialize(int ndim, NEWMAT::ColumnVector &x) {
00755     Vector vec;  
00756     //vec.Alias(x.data(), ndim);
00757     vec.Alias(const_cast<double*>(x.data()), ndim); 
00758     objective_->GiveInit(&vec);
00759     
00760   }
00761   static void ComputeObjective(int ndim, const NEWMAT::ColumnVector &x, 
00762       double &fx, int &result) {
00763     Vector vec;  
00765     vec.Alias(const_cast<double*>(x.data()), ndim); 
00766     objective_->ComputeObjective(vec, &fx);
00767     result = OPTPP::NLPFunction;
00768   };
00769   
00770   static void ComputeObjective(int mode, int ndim, const NEWMAT::ColumnVector &x, 
00771       double &fx, NEWMAT::ColumnVector &gx, int &result) {
00772     Vector vecx;  
00774     vecx.Alias(const_cast<double*>(x.data()), ndim);
00775     if (mode & OPTPP::NLPFunction) {
00776       objective_->ComputeObjective(vecx, &fx);
00777       result = OPTPP::NLPFunction;   
00778     } 
00779     if (mode & OPTPP::NLPGradient) {
00780        Vector vecg;
00781        vecg.Alias(gx.data(), ndim);
00782        objective_->ComputeGradient(vecx, &vecg);
00783        result = OPTPP::NLPGradient;   
00784     }
00785   }
00786 
00790   static void CopyFLMatrixToNMSymMatrix(int ndim, const Matrix& flmat,
00791       NEWMAT::SymmetricMatrix &hx) {
00792     for(int m=1; m <= ndim; ++m)
00793       for(int n=1; n <= m; ++n) //only need the lower half
00794         hx(m,n) = flmat.get(m-1, n-1);
00795   }
00796     
00797   static void ComputeObjective(int mode, int ndim, const NEWMAT::ColumnVector &x, 
00798       double &fx, NEWMAT::ColumnVector &gx, 
00799     NEWMAT::SymmetricMatrix &hx, int &result) {
00800     Vector vecx;  
00802     vecx.Alias(const_cast<double*>(x.data()), ndim);
00803     if (mode & OPTPP::NLPFunction) {
00804       objective_->ComputeObjective(vecx, &fx);
00805       result = OPTPP::NLPFunction;
00806     }
00807     if (mode & OPTPP::NLPGradient) {
00808       Vector vecg;
00809       vecg.Alias(gx.data(), ndim);
00810       objective_->ComputeGradient(vecx, &vecg); 
00811       result=OPTPP::NLPGradient;
00812     }
00813     if (mode & OPTPP::NLPHessian) {
00814       Matrix hessian;
00815       hessian.Init(ndim, ndim);
00818       objective_->ComputeHessian(vecx, &hessian);  
00819       CopyFLMatrixToNMSymMatrix(ndim, hessian, /*&*/hx);
00820       result = OPTPP::NLPHessian;
00821     }
00822   } 
00823 
00824   static void ComputeNonLinearEqualityConstraints(int mode, 
00825       const NEWMAT::ColumnVector &x, NEWMAT::ColumnVector &cvalue, 
00826       int &result) {
00827     Vector vecx;  
00829     vecx.Alias(const_cast<double*>(x.data()), x.Nrows());
00830     Vector vecc;
00831     vecc.Alias(cvalue.data(), cvalue.Nrows());
00832     objective_->ComputeNonLinearEqualityConstraints(vecx, &vecc);
00833     result = OPTPP::NLPConstraint;
00834   
00835   }
00836   static void ComputeNonLinearEqualityConstraints(int mode, 
00837       int ndim, const NEWMAT::ColumnVector &x,  
00838       NEWMAT::ColumnVector &cvalue, NEWMAT::Matrix &cJacobian,
00839       int &result) {
00840     Vector vecx;  
00842     vecx.Alias(const_cast<double*>(x.data()), ndim);
00843     if (mode & OPTPP::NLPConstraint) {
00844       Vector vecc;
00845       vecc.Alias(cvalue.data(), ndim);
00846       objective_->ComputeNonLinearEqualityConstraints(vecx, &vecc);
00847       result = OPTPP::NLPConstraint;
00848     } 
00849     if (mode & OPTPP::NLPCJacobian) {
00850        Matrix cjacob;
00851        cjacob.Alias(cJacobian.data(), cJacobian.Nrows(), cJacobian.Ncols());
00852        objective_->ComputeNonLinearEqualityConstraintsJacobian(vecx, &cjacob);
00853        result = OPTPP::NLPCJacobian;   
00854     } 
00855   }
00856   static void ComputeNonLinearEqualityConstraints(int mode, 
00857       int ndim, const NEWMAT::ColumnVector &x,  
00858       NEWMAT::ColumnVector &cvalue, NEWMAT::Matrix &cJacobian,
00859       OPTPP::OptppArray<NEWMAT::SymmetricMatrix> &cHessian,
00860       int &result) {
00861     FATAL("Not supported yet");
00862   }
00863  
00864   static void ComputeNonLinearInequalityConstraints(int mode, 
00865       const NEWMAT::ColumnVector &x, NEWMAT::ColumnVector &cvalue, 
00866       int &result) {
00867     Vector vecx;  
00869     vecx.Alias(const_cast<double*>(x.data()), x.Nrows());
00870     Vector vecc;
00871     vecc.Alias(cvalue.data(), x.Nrows());
00872     objective_->ComputeNonLinearInequalityConstraints(vecx, &vecc);
00873     result = OPTPP::NLPConstraint;  
00874   }
00875 
00876   // We still have to test wether cJacobian and Hessian etc get initialized
00877   // properly
00878   static void ComputeNonLinearInequalityConstraints(int mode, 
00879       int ncon, const NEWMAT::ColumnVector &x,  
00880       NEWMAT::ColumnVector &cvalue, NEWMAT::Matrix &cJacobian,
00881       int &result) {
00882     Vector vecx;  
00884     vecx.Alias(const_cast<double*>(x.data()), x.Nrows());
00885     if (mode & OPTPP::NLPConstraint) {
00886       Vector vecc;
00887       vecc.Alias(cvalue.data(), x.Nrows());
00888       objective_->ComputeNonLinearInequalityConstraints(vecx, &vecc);
00889       result = OPTPP::NLPConstraint;
00890     } 
00891     if (mode & OPTPP::NLPCJacobian) {
00892        Matrix cjacob;
00893        cjacob.Alias(cJacobian.data(), cJacobian.Nrows(), cJacobian.Ncols());
00894        objective_->ComputeNonLinearInequalityConstraintsJacobian(vecx, 
00895            &cjacob);
00896        result = OPTPP::NLPCJacobian;   
00897     }
00898   
00899   }
00900   static void ComputeNonLinearInequalityConstraints(int mode, 
00901       int ndim, const NEWMAT::ColumnVector &x,  
00902       NEWMAT::ColumnVector &cvalue, NEWMAT::Matrix &cJacobian,
00903       OPTPP::OptppArray<NEWMAT::SymmetricMatrix> &cHessian,
00904       int &result) {
00905     FATAL("Not supported yet");
00906   }
00907 
00908 };
00909 
00910 template<typename Method, typename Objective, ConstraintType Constraint>
00911   Objective *StaticOptppOptimizer<Method, Objective, 
00912       Constraint>::objective_=NULL;
00913 };  //Namespace optpp
00914 }; // Namespace optim
00915 #endif
Generated on Mon Jan 24 12:04:37 2011 for FASTlib by  doxygen 1.6.3