00001
00002
00003
00004
00005
00006
00007
00008 #ifndef _TNLP2FPNLP_HPP_
00009 #define _TNLP2FPNLP_HPP_
00010
00011 #include "IpTNLP.hpp"
00012 #include "BonTMINLP.hpp"
00013 #include "IpSmartPtr.hpp"
00014 #include "BonTypes.hpp"
00015
00016 namespace Bonmin
00017 {
00022 class TNLP2FPNLP : public Ipopt::TNLP
00023 {
00024 public:
00028 TNLP2FPNLP(const SmartPtr<TNLP> tnlp, double objectiveScalingFactor = 100);
00029
00031 TNLP2FPNLP(const SmartPtr<TNLP> tnlp, const SmartPtr<TNLP2FPNLP> other);
00032
00034 virtual ~TNLP2FPNLP();
00036 void use(Ipopt::SmartPtr<TNLP> tnlp){
00037 tnlp_ = GetRawPtr(tnlp);}
00040
00041 void set_use_feasibility_pump_objective(bool use_feasibility_pump_objective)
00042 { use_feasibility_pump_objective_ = use_feasibility_pump_objective; }
00043
00046 void set_use_cutoff_constraint(bool use_cutoff_constraint)
00047 { use_cutoff_constraint_ = use_cutoff_constraint; }
00048
00050 void set_use_local_branching_constraint(bool use_local_branching_constraint)
00051 { use_local_branching_constraint_ = use_local_branching_constraint; }
00053
00056
00057 void set_cutoff(Number cutoff);
00058
00060 void set_rhs_local_branching_constraint(double rhs_local_branching_constraint)
00061 { assert(rhs_local_branching_constraint >= 0);
00062 rhs_local_branching_constraint_ = rhs_local_branching_constraint; }
00064
00073 void set_dist2point_obj(int n, const Number * vals, const Index * inds);
00074
00076 void setSigma(double sigma){
00077 assert(sigma >= 0.);
00078 sigma_ = sigma;}
00080 void setLambda(double lambda){
00081 assert(lambda >= 0. && lambda <= 1.);
00082 lambda_ = lambda;}
00084 void setNorm(int norm){
00085 assert(norm >0 && norm < 3);
00086 norm_ = norm;}
00088
00092 virtual bool get_nlp_info(Index& n, Index& m, Index& nnz_jac_g,
00093 Index& nnz_h_lag, TNLP::IndexStyleEnum& index_style);
00094
00097 virtual bool get_bounds_info(Index n, Number* x_l, Number* x_u,
00098 Index m, Number* g_l, Number* g_u);
00099
00102 virtual bool get_starting_point(Index n, bool init_x, Number* x,
00103 bool init_z, Number* z_L, Number* z_U,
00104 Index m, bool init_lambda,
00105 Number* lambda)
00106 {
00107 int m2 = m;
00108 if(use_cutoff_constraint_) {
00109 m2--;
00110 if(lambda!=NULL)lambda[m2] = 0;
00111 }
00112 if(use_local_branching_constraint_) {
00113 m2--;
00114 if(lambda!= NULL)lambda[m2] = 0;
00115 }
00116 int ret_code = tnlp_->get_starting_point(n, init_x, x,
00117 init_z, z_L, z_U, m2, init_lambda, lambda);
00118 return ret_code;
00119 }
00120
00122 virtual bool eval_f(Index n, const Number* x, bool new_x,
00123 Number& obj_value);
00124
00127 virtual bool eval_grad_f(Index n, const Number* x, bool new_x,
00128 Number* grad_f);
00129
00132 virtual bool eval_g(Index n, const Number* x, bool new_x,
00133 Index m, Number* g);
00134
00136 virtual bool eval_jac_g(Index n, const Number* x, bool new_x,
00137 Index m, Index nele_jac, Index* iRow,
00138 Index *jCol, Number* values);
00139
00141 virtual bool eval_h(Index n, const Number* x, bool new_x,
00142 Number obj_factor, Index m, const Number* lambda,
00143 bool new_lambda, Index nele_hess,
00144 Index* iRow, Index* jCol, Number* values);
00146
00150 virtual void finalize_solution(SolverReturn status,
00151 Index n, const Number* x, const Number* z_L, const Number* z_U,
00152 Index m, const Number* g, const Number* lambda,
00153 Number obj_value,
00154 const IpoptData* ip_data,
00155 IpoptCalculatedQuantities* ip_cq);
00157
00160 void setObjectiveScaling(double value)
00161 {
00162 objectiveScalingFactor_ = value;
00163 }
00164 double getObjectiveScaling() const
00165 {
00166 return objectiveScalingFactor_;
00167 }
00168
00169 private:
00173 double dist2point(const Number *x);
00175
00184 TNLP2FPNLP();
00185
00187 TNLP2FPNLP(const TNLP2FPNLP&);
00188
00190 void operator=(const TNLP2FPNLP&);
00192
00194 SmartPtr<TNLP> tnlp_;
00195
00198
00199 vector<Index> inds_;
00201 vector<Number> vals_;
00204 double lambda_;
00206 double sigma_;
00208 int norm_;
00210
00212 double objectiveScalingFactor_;
00213
00216
00217 bool use_feasibility_pump_objective_;
00218
00221 bool use_cutoff_constraint_;
00222
00224 bool use_local_branching_constraint_;
00226
00229
00230 double cutoff_;
00231
00233 double rhs_local_branching_constraint_;
00235
00237 TNLP::IndexStyleEnum index_style_;
00238
00239 };
00240
00241 }
00242
00243 #endif