00001
00002
00003
00004
00005
00006
00007
00008
00009
00010 #include "BonHeuristicDiveMIP.hpp"
00011 #include "CoinHelperFunctions.hpp"
00012 #include "CbcModel.hpp"
00013 #include "BonHeuristicDive.hpp"
00014 #include "BonSubMipSolver.hpp"
00015 #include "BonCbcLpStrategy.hpp"
00016
00017 #ifdef COIN_HAS_CPX
00018 #include "OsiCpxSolverInterface.hpp"
00019 #endif
00020
00021 #ifdef COIN_HAS_CLP
00022 #include "OsiClpSolverInterface.hpp"
00023 #endif
00024
00025 #include "OsiAuxInfo.hpp"
00026
00027 #include "CoinTime.hpp"
00028
00029 #include <fstream>
00030
00031 #include <iomanip>
00032
00033 #include "CoinHelperFunctions.hpp"
00034
00035
00036
00037 using namespace std;
00038
00039 namespace Bonmin
00040 {
00041 #if 0
00042 HeuristicDiveMIP::HeuristicDiveMIP()
00043 :
00044 CbcHeuristic(),
00045 setup_(NULL),
00046 howOften_(100),
00047 emptyInterface_(NULL),
00048 strategy_(NULL)
00049 {}
00050 #endif
00051
00052 HeuristicDiveMIP::HeuristicDiveMIP(BonminSetup * setup)
00053 :
00054 CbcHeuristic(),
00055 setup_(setup),
00056 howOften_(100),
00057 emptyInterface_(NULL),
00058 strategy_(NULL)
00059 {
00060 Initialize(setup);
00061 }
00062
00063 void
00064 HeuristicDiveMIP::Initialize(BonminSetup * b){
00065 int ivalue;
00066 b->options()->GetEnumValue("milp_solver",ivalue,b->prefix());
00067 if (ivalue <= 0) {
00068 emptyInterface_ = new OsiClpSolverInterface;
00069 strategy_ = new CbcStrategyDefault;
00070 }
00071 else if (ivalue == 1) {
00072 emptyInterface_ = new OsiClpSolverInterface;
00073 std::string prefix = "milp_solver.";
00074 strategy_ = new CbcStrategyChooseCuts(*b, prefix);
00075 }
00076 else if (ivalue == 2) {
00077 #ifdef COIN_HAS_CPX
00078 OsiCpxSolverInterface * cpxSolver = new OsiCpxSolverInterface;
00079 emptyInterface_ = cpxSolver;
00080 #else
00081 std::cerr << "You have set an option to use CPLEX as the milp subsolver. However, \n"
00082 << "apparently CPLEX is not configured to be used in bonmin.\n"
00083 << "See the manual for configuring CPLEX\n";
00084 throw -1;
00085 #endif
00086 }
00087 }
00088
00089 HeuristicDiveMIP::HeuristicDiveMIP(const HeuristicDiveMIP ©)
00090 :
00091 CbcHeuristic(copy),
00092 setup_(copy.setup_),
00093 howOften_(copy.howOften_),
00094 emptyInterface_(copy.emptyInterface_->clone()),
00095 strategy_(copy.strategy_->clone())
00096 {}
00097
00098 HeuristicDiveMIP &
00099 HeuristicDiveMIP::operator=(const HeuristicDiveMIP & rhs)
00100 {
00101 if(this != &rhs) {
00102 CbcHeuristic::operator=(rhs);
00103 setup_ = rhs.setup_;
00104 howOften_ = rhs.howOften_;
00105 delete emptyInterface_;
00106 delete strategy_;
00107 if(rhs.emptyInterface_)
00108 emptyInterface_ = rhs.emptyInterface_->clone();
00109 if(rhs.strategy_)
00110 strategy_ = rhs.strategy_->clone();
00111 }
00112 return *this;
00113 }
00114
00115 HeuristicDiveMIP::~HeuristicDiveMIP(){
00116 delete emptyInterface_;
00117 delete strategy_;
00118 }
00119
00120 struct MatComp{
00121 const int * iRow;
00122 const int * jCol;
00124 bool operator()(int i,int j){
00125 return (jCol[i] < jCol[j]) || (jCol[i] == jCol[j] && iRow[i] < iRow[j]);
00126 }
00127 };
00128
00129
00130 int
00131 HeuristicDiveMIP::solution(double &solutionValue, double *betterSolution)
00132 {
00133 if(model_->getNodeCount() || model_->getCurrentPassNumber() > 1) return 0;
00134 if ((model_->getNodeCount()%howOften_)!=0||model_->getCurrentPassNumber()>1)
00135 return 0;
00136
00137 int returnCode = 0;
00138
00139 OsiTMINLPInterface * nlp = NULL;
00140 if(setup_->getAlgorithm() == B_BB)
00141 nlp = dynamic_cast<OsiTMINLPInterface *>(model_->solver()->clone());
00142 else
00143 nlp = dynamic_cast<OsiTMINLPInterface *>(setup_->nonlinearSolver()->clone());
00144
00145 TMINLP2TNLP* minlp = nlp->problem();
00146
00147
00148 double integerTolerance = model_->getDblParam(CbcModel::CbcIntegerTolerance);
00149 double primalTolerance = 1.0e-6;
00150
00151 int numberColumns;
00152 int numberRows;
00153 int nnz_jac_g;
00154 int nnz_h_lag;
00155 TNLP::IndexStyleEnum index_style;
00156 minlp->get_nlp_info(numberColumns, numberRows, nnz_jac_g,
00157 nnz_h_lag, index_style);
00158
00159 const Bonmin::TMINLP::VariableType* variableType = minlp->var_types();
00160 const double* x_sol = minlp->x_sol();
00161 const double* x_l = minlp->x_l();
00162 const double* x_u = minlp->x_u();
00163
00164 const double* g_l = minlp->g_l();
00165 const double* g_u = minlp->g_u();
00166
00167 adjustPrimalTolerance(minlp, primalTolerance);
00168
00169 assert(isNlpFeasible(minlp, primalTolerance));
00170
00171
00172 TMINLP* tminlp = nlp->model();
00173 Ipopt::TNLP::LinearityType* variableLinearNonLinear = new
00174 Ipopt::TNLP::LinearityType [numberColumns];
00175 tminlp->get_variables_linearity(numberColumns, variableLinearNonLinear);
00176 vector<int> linearVariable;
00177 vector<int> nonlinearVariable;
00178 for (int iColumn=0;iColumn<numberColumns;iColumn++) {
00179 if (variableLinearNonLinear[iColumn]==Ipopt::TNLP::LINEAR)
00180 linearVariable.push_back(iColumn);
00181 else
00182 nonlinearVariable.push_back(iColumn);
00183 }
00184 int numberLinearColumns = linearVariable.size();
00185 int numberNonlinearColumns = nonlinearVariable.size();
00186
00187
00188
00189
00190
00191 int* indexRow = new int[nnz_jac_g];
00192 int* indexCol = new int[nnz_jac_g];
00193 minlp->eval_jac_g(numberColumns, x_sol, false,
00194 numberRows, nnz_jac_g,
00195 indexRow, indexCol, 0);
00196
00197 vector<int> sortedIndex(nnz_jac_g);
00198 CoinIotaN(sortedIndex(), nnz_jac_g, 0);
00199 MatComp c;
00200 c.iRow = indexRow;
00201 c.jCol = indexCol;
00202 std::sort(sortedIndex.begin(), sortedIndex.end(), c);
00203
00204 int* row = new int[nnz_jac_g];
00205 int* columnStart = new int[numberColumns];
00206 int* columnLength = new int[numberColumns];
00207 CoinZeroN(columnStart, numberColumns);
00208 CoinZeroN(columnLength, numberColumns);
00209 vector<vector<int> > column(numberRows);
00210
00211
00212 vector<vector<int> > columnInt(numberRows);
00213
00214
00215 std::vector<int> numberColumnsLinear(numberRows, 0);
00216
00217
00218 int indexCorrection = (index_style == TNLP::C_STYLE) ? 0 : 1;
00219 int iniCol = -1;
00220 for(int i=0; i<nnz_jac_g; i++) {
00221 int thisIndexCol = indexCol[sortedIndex[i]]-indexCorrection;
00222 if(indexCol[sortedIndex[i]] != iniCol) {
00223 iniCol = indexCol[sortedIndex[i]];
00224 columnStart[thisIndexCol] = i;
00225 columnLength[thisIndexCol] = 1;
00226 }
00227 else {
00228 columnLength[thisIndexCol]++;
00229 }
00230 row[i] = indexRow[sortedIndex[i]]-indexCorrection;
00231 column[row[i]].push_back(thisIndexCol);
00232 if (variableType[thisIndexCol] != Bonmin::TMINLP::CONTINUOUS)
00233 columnInt[row[i]].push_back(thisIndexCol);
00234 if(variableLinearNonLinear[thisIndexCol] == Ipopt::TNLP::LINEAR)
00235 numberColumnsLinear[row[i]]++;
00236 }
00237
00238
00239 double* newSolution = new double [numberColumns];
00240 memcpy(newSolution,x_sol,numberColumns*sizeof(double));
00241 double* new_g_sol = new double [numberRows];
00242
00243 double* gradient_f = new double[numberColumns];
00244 minlp->eval_grad_f(numberColumns,newSolution,true,gradient_f);
00245
00246
00247 vector<int> integerNonlinearColumns;
00248 int numberFractionalNonlinearVariables = 0;
00249 for (int iNLCol=0;iNLCol<numberNonlinearColumns;iNLCol++) {
00250 int iColumn = nonlinearVariable[iNLCol];
00251 if (variableType[iColumn] != Bonmin::TMINLP::CONTINUOUS) {
00252 integerNonlinearColumns.push_back(iColumn);
00253 double value=newSolution[iColumn];
00254 if (fabs(floor(value+0.5)-value)>integerTolerance) {
00255 numberFractionalNonlinearVariables++;
00256 }
00257 }
00258 }
00259
00260 setInternalVariables(minlp);
00261
00262 int iteration = -1;
00263 while(numberFractionalNonlinearVariables) {
00264 iteration++;
00265
00266
00267 int bestColumn = -1;
00268 int bestRound = -1;
00269 selectVariableToBranch(minlp, integerNonlinearColumns, newSolution,
00270 bestColumn, bestRound);
00271
00272 if(bestColumn >= 0) {
00273 if(bestRound < 0)
00274 minlp->SetVariableUpperBound(bestColumn, floor(newSolution[bestColumn]));
00275 else
00276 minlp->SetVariableLowerBound(bestColumn, ceil(newSolution[bestColumn]));
00277 } else {
00278 break;
00279 }
00280
00281 nlp->initialSolve();
00282
00283 if(minlp->optimization_status() != SUCCESS) {
00284 break;
00285 }
00286
00287 memcpy(newSolution,x_sol,numberColumns*sizeof(double));
00288
00289 numberFractionalNonlinearVariables = 0;
00290 for(int iIntCol=0; iIntCol<(int)integerNonlinearColumns.size(); iIntCol++) {
00291 int iColumn = integerNonlinearColumns[iIntCol];
00292 double value=newSolution[iColumn];
00293 if (fabs(floor(value+0.5)-value)>integerTolerance)
00294 numberFractionalNonlinearVariables++;
00295 }
00296
00297 double newSolutionValue;
00298 minlp->eval_f(numberColumns, newSolution, true, newSolutionValue);
00299 }
00300
00301
00302
00303 int numberFractionalLinearVariables = 0;
00304 for (int iLCol=0;iLCol<numberLinearColumns;iLCol++) {
00305 int iColumn = linearVariable[iLCol];
00306 if (variableType[iColumn] != Bonmin::TMINLP::CONTINUOUS) {
00307 double value=newSolution[iColumn];
00308 if (fabs(floor(value+0.5)-value)>integerTolerance) {
00309 numberFractionalLinearVariables++;
00310 }
00311 }
00312 }
00313
00314 bool feasible = true;
00315 if(numberFractionalLinearVariables) {
00316 int numberMIPRows = 0;
00317 int* mapRows = new int[numberRows];
00318 for(int iRow=0; iRow<numberRows; iRow++) {
00319 mapRows[iRow] = -1;
00320 if(numberColumnsLinear[iRow] > 0) {
00321 mapRows[iRow] = numberMIPRows++;
00322 }
00323 }
00324
00325
00326
00327 int numberIntegerLinearColumns = 0;
00328 for (int iLCol=0;iLCol<numberLinearColumns;iLCol++) {
00329 int iColumn = linearVariable[iLCol];
00330 newSolution[iColumn] = 0.0;
00331 if (variableType[iColumn] != Bonmin::TMINLP::CONTINUOUS)
00332 numberIntegerLinearColumns++;
00333 }
00334
00335 minlp->eval_g(numberColumns, newSolution, true,
00336 numberRows, new_g_sol);
00337 double* row_lb = new double[numberMIPRows];
00338 double* row_ub = new double[numberMIPRows];
00339 for(int iRow=0; iRow<numberRows; iRow++) {
00340 if(mapRows[iRow] > -1) {
00341 assert(mapRows[iRow] < numberMIPRows);
00342 if(g_l[iRow] == (-1.0) * nlp->getInfinity())
00343 row_lb[mapRows[iRow]] = g_l[iRow];
00344 else
00345 row_lb[mapRows[iRow]] = g_l[iRow] - new_g_sol[iRow];
00346 if(g_u[iRow] == nlp->getInfinity())
00347 row_ub[mapRows[iRow]] = g_u[iRow];
00348 else
00349 row_ub[mapRows[iRow]] = g_u[iRow] - new_g_sol[iRow];
00350 }
00351 }
00352
00353
00354 double* jac_g = new double [nnz_jac_g];
00355 minlp->eval_jac_g(numberColumns, x_sol, false,
00356 numberRows, nnz_jac_g,
00357 0, 0, jac_g);
00358
00359
00360
00361 CoinPackedMatrix* matrix = new CoinPackedMatrix(true,0,0);
00362 matrix->setDimensions(numberMIPRows,0);
00363
00364
00365
00366 double* objective = new double[numberLinearColumns];
00367 double* col_lb = new double[numberLinearColumns];
00368 double* col_ub = new double[numberLinearColumns];
00369 int* indexIntegerColumn = new int[numberIntegerLinearColumns];
00370 int numberIndexIntegerColumn = 0;
00371 for (int iLCol=0;iLCol<numberLinearColumns;iLCol++) {
00372 int iColumn = linearVariable[iLCol];
00373 objective[iLCol] = gradient_f[iColumn];
00374 col_lb[iLCol] = x_l[iColumn];
00375 col_ub[iLCol] = x_u[iColumn];
00376 CoinPackedVector newRow;
00377 for (int j=columnStart[iColumn];
00378 j<columnStart[iColumn]+columnLength[iColumn];j++) {
00379 int iRow = row[j];
00380 newRow.insert(mapRows[iRow], jac_g[sortedIndex[j]]);
00381 }
00382 matrix->appendCol(newRow);
00383 if (variableType[iColumn] != Bonmin::TMINLP::CONTINUOUS)
00384 indexIntegerColumn[numberIndexIntegerColumn++] = iLCol;
00385 }
00386
00387
00388 OsiSolverInterface *si;
00389 si = emptyInterface_->clone();
00390 si->messageHandler()->setLogLevel(0);
00391 si->passInMessageHandler(model_->messageHandler());
00392
00393 si->loadProblem(*matrix, col_lb, col_ub, objective, row_lb, row_ub);
00394 si->setInteger(indexIntegerColumn, numberIndexIntegerColumn);
00395
00396 SubMipSolver mip(si, strategy_);
00397 mip.optimize(DBL_MAX, 0, 60);
00398
00399 if(mip.getLastSolution()) {
00400 const double* solution = mip.getLastSolution();
00401 assert(si->getNumCols() == numberLinearColumns);
00402 for (int iLCol=0;iLCol<numberLinearColumns;iLCol++) {
00403 int iColumn = linearVariable[iLCol];
00404 newSolution[iColumn] = solution[iLCol];
00405 }
00406 }
00407 else
00408 feasible = false;
00409
00410 delete [] mapRows;
00411 delete [] row_lb;
00412 delete [] row_ub;
00413 delete [] jac_g;
00414 delete matrix;
00415 delete [] objective;
00416 delete [] col_lb;
00417 delete [] col_ub;
00418 delete [] indexIntegerColumn;
00419 delete si;
00420 }
00421
00422 #if 0
00423 bool feasible = true;
00424 for (int iColumn=0;iColumn<numberColumns;iColumn++) {
00425 double value=newSolution[iColumn];
00426 if(value < x_l[iColumn] || value > x_u[iColumn]) {
00427 feasible = false;
00428 break;
00429 }
00430 if (variableType[iColumn] != Bonmin::TMINLP::CONTINUOUS) {
00431 if (fabs(floor(value+0.5)-value)>integerTolerance) {
00432 feasible = false;
00433 break;
00434 }
00435 }
00436 }
00437 minlp->eval_g(numberColumns, newSolution, true,
00438 numberRows, new_g_sol);
00439 for(int iRow=0; iRow<numberRows; iRow++) {
00440 if(new_g_sol[iRow]<g_l[iRow]-primalTolerance ||
00441 new_g_sol[iRow]>g_u[iRow]+primalTolerance) {
00442 if(minlp->optimization_status() != SUCCESS) {
00443 feasible = false;
00444 break;
00445 } else {
00446 #ifdef DEBUG_BON_HEURISTIC_DIVE_MIP
00447 cout<<"It should be infeasible because: "<<endl;
00448 cout<<"g_l["<<iRow<<"]= "<<g_l[iRow]<<" "
00449 <<"g_sol["<<iRow<<"]= "<<new_g_sol[iRow]<<" "
00450 <<"g_u["<<iRow<<"]= "<<g_u[iRow]<<endl;
00451 cout<<"primalTolerance= "<<primalTolerance<<endl;
00452 #endif
00453 feasible = false;
00454 break;
00455 }
00456 }
00457 }
00458 #else
00459 if(feasible) {
00460
00461 for (int iColumn=0;iColumn<numberColumns;iColumn++) {
00462 if (variableType[iColumn] != Bonmin::TMINLP::CONTINUOUS) {
00463 double value=newSolution[iColumn];
00464 if (fabs(floor(value+0.5)-value)>integerTolerance) {
00465 #ifdef DEBUG_BON_HEURISTIC_DIVE_MIP
00466 cout<<"It should be infeasible because: "<<endl;
00467 cout<<"variable "<<iColumn<<" is not integer"<<endl;
00468 #endif
00469 feasible = false;
00470 break;
00471 }
00472 else {
00473 value=floor(newSolution[iColumn]+0.5);
00474 minlp->SetVariableUpperBound(iColumn, value);
00475 minlp->SetVariableLowerBound(iColumn, value);
00476 }
00477 }
00478 }
00479 if(feasible) {
00480 nlp->initialSolve();
00481 if(minlp->optimization_status() != SUCCESS) {
00482 feasible = false;
00483 }
00484 memcpy(newSolution,x_sol,numberColumns*sizeof(double));
00485 }
00486 }
00487 #endif
00488
00489 if(feasible) {
00490 double newSolutionValue;
00491 minlp->eval_f(numberColumns, newSolution, true, newSolutionValue);
00492 if(newSolutionValue < solutionValue) {
00493 memcpy(betterSolution,newSolution,numberColumns*sizeof(double));
00494 solutionValue = newSolutionValue;
00495 returnCode = 1;
00496 }
00497 }
00498
00499 delete [] variableLinearNonLinear;
00500 delete [] indexRow;
00501 delete [] indexCol;
00502 delete [] row;
00503 delete [] columnStart;
00504 delete [] columnLength;
00505 delete [] newSolution;
00506 delete [] new_g_sol;
00507 delete nlp;
00508
00509 #ifdef DEBUG_BON_HEURISTIC_DIVE_MIP
00510 std::cout<<"DiveMIP returnCode = "<<returnCode<<std::endl;
00511 #endif
00512
00513 return returnCode;
00514 }
00515 }