00001
00002
00003
00004
00005
00006
00007
00008
00009
00010 #if defined(_MSC_VER)
00011
00012 # pragma warning(disable:4786)
00013 #endif
00014
00015 #include "BonHeuristicDiveMIPVectorLength.hpp"
00016 #include "CbcModel.hpp"
00017
00018 namespace Bonmin
00019 {
00020 #if 0
00021 HeuristicDiveMIPVectorLength::HeuristicDiveMIPVectorLength()
00022 :
00023 HeuristicDiveMIP(),
00024 columnLength_(NULL)
00025 {}
00026 #endif
00027
00028 HeuristicDiveMIPVectorLength::HeuristicDiveMIPVectorLength(BonminSetup * setup)
00029 :
00030 HeuristicDiveMIP(setup),
00031 columnLength_(NULL)
00032 {
00033 Initialize(setup->options());
00034 }
00035
00036 HeuristicDiveMIPVectorLength::HeuristicDiveMIPVectorLength(const HeuristicDiveMIPVectorLength ©)
00037 :
00038 HeuristicDiveMIP(copy),
00039 columnLength_(NULL)
00040 {
00041 }
00042
00043 HeuristicDiveMIPVectorLength &
00044 HeuristicDiveMIPVectorLength::operator=( const HeuristicDiveMIPVectorLength& rhs)
00045 {
00046 if (this!=&rhs) {
00047 HeuristicDiveMIP::operator=(rhs);
00048 delete [] columnLength_;
00049 columnLength_ = NULL;
00050 }
00051 return *this;
00052 }
00053
00054 CbcHeuristic *
00055 HeuristicDiveMIPVectorLength::clone() const
00056 {
00057 return new HeuristicDiveMIPVectorLength(*this);
00058 }
00059
00060 void
00061 HeuristicDiveMIPVectorLength::setInternalVariables(TMINLP2TNLP* minlp)
00062 {
00063 delete [] columnLength_;
00064
00065 int numberColumns;
00066 int numberRows;
00067 int nnz_jac_g;
00068 int nnz_h_lag;
00069 Ipopt::TNLP::IndexStyleEnum index_style;
00070 minlp->get_nlp_info(numberColumns, numberRows, nnz_jac_g,
00071 nnz_h_lag, index_style);
00072
00073 const double* x_sol = minlp->x_sol();
00074
00075
00076
00077
00078 int* indexRow = new int[nnz_jac_g];
00079 int* indexCol = new int[nnz_jac_g];
00080 minlp->eval_jac_g(numberColumns, x_sol, false,
00081 numberRows, nnz_jac_g,
00082 indexRow, indexCol, 0);
00083 columnLength_ = new int[numberColumns];
00084 int indexCorrection = (index_style == Ipopt::TNLP::C_STYLE) ? 0 : 1;
00085 int iniCol = -1;
00086 for(int i=0; i<nnz_jac_g; i++) {
00087 int thisIndexCol = indexCol[i]-indexCorrection;
00088 if(indexCol[i] != iniCol) {
00089 iniCol = indexCol[i];
00090 columnLength_[thisIndexCol] = 1;
00091 }
00092 else {
00093 columnLength_[thisIndexCol]++;
00094 }
00095 }
00096
00097 }
00098
00099 void
00100 HeuristicDiveMIPVectorLength::selectVariableToBranch(TMINLP2TNLP* minlp,
00101 const vector<int> & integerColumns,
00102 const double* newSolution,
00103 int& bestColumn,
00104 int& bestRound)
00105 {
00106 double integerTolerance = model_->getDblParam(CbcModel::CbcIntegerTolerance);
00107
00108 const double* x_l = minlp->x_l();
00109 const double* x_u = minlp->x_u();
00110
00111 int numberColumns;
00112 int numberRows;
00113 int nnz_jac_g;
00114 int nnz_h_lag;
00115 Ipopt::TNLP::IndexStyleEnum index_style;
00116 minlp->get_nlp_info(numberColumns, numberRows, nnz_jac_g,
00117 nnz_h_lag, index_style);
00118
00119 double* gradient_f = new double[numberColumns];
00120
00121 double bestScore = COIN_DBL_MAX;
00122 bestColumn = -1;
00123 bestRound = -1;
00124 minlp->eval_grad_f(numberColumns,newSolution,true,gradient_f);
00125 for(int iIntCol=0; iIntCol<(int)integerColumns.size(); iIntCol++) {
00126 int iColumn = integerColumns[iIntCol];
00127 double value=newSolution[iColumn];
00128 if (fabs(floor(value+0.5)-value)>integerTolerance) {
00129 double below = floor(value);
00130 double downFraction = COIN_DBL_MAX;
00131
00132 double gradient = gradient_f[iColumn];
00133 if(below >= x_l[iColumn])
00134 downFraction = value-below;
00135 double above = ceil(value);
00136 double upFraction = COIN_DBL_MAX;
00137 if(above <= x_u[iColumn])
00138 upFraction = ceil(value)-value;
00139 double objdelta = 0;
00140 int round = 0;
00141 if(gradient>=0.0 && upFraction < COIN_DBL_MAX) {
00142 objdelta = gradient*upFraction;
00143 round = 1;
00144 } else if(gradient<0.0 && downFraction < COIN_DBL_MAX) {
00145 objdelta = gradient*downFraction;
00146 round = -1;
00147 } else if(upFraction < COIN_DBL_MAX) {
00148 objdelta = gradient*upFraction;
00149 round = 1;
00150 } else {
00151 objdelta = gradient*downFraction;
00152 round = -1;
00153 }
00154 double score = (objdelta + 1e-6)/((double)columnLength_[iColumn]+1.0);
00155 if(score<bestScore) {
00156 bestScore = score;
00157 bestColumn = iColumn;
00158 bestRound = round;
00159 }
00160 }
00161 }
00162
00163 delete [] gradient_f;
00164
00165 }
00166
00167 void
00168 HeuristicDiveMIPVectorLength::registerOptions(Ipopt::SmartPtr<Bonmin::RegisteredOptions> roptions){
00169 roptions->SetRegisteringCategory("MINLP Heuristics", RegisteredOptions::BonminCategory);
00170 roptions->AddStringOption2(
00171 "heuristic_dive_MIP_vectorLength",
00172 "if yes runs the Dive MIP VectorLength heuristic",
00173 "no",
00174 "no", "don't run it",
00175 "yes", "runs the heuristic",
00176 "");
00177 roptions->setOptionExtraInfo("heuristic_dive_MIP_vectorLength", 63);
00178 }
00179
00180 void
00181 HeuristicDiveMIPVectorLength::Initialize(Ipopt::SmartPtr<Ipopt::OptionsList> options){
00182 }
00183
00184 }