00001
00002
00003
00004
00005
00006
00007
00008
00009
00010 #include "BonminConfig.h"
00011 #include "BonHeuristicDiveMIP.hpp"
00012 #include "CoinHelperFunctions.hpp"
00013 #include "CbcModel.hpp"
00014 #include "BonHeuristicDive.hpp"
00015 #include "BonSubMipSolver.hpp"
00016 #include "BonCbcLpStrategy.hpp"
00017
00018 #ifdef COIN_HAS_CPX
00019 #include "OsiCpxSolverInterface.hpp"
00020 #endif
00021
00022 #include "OsiClpSolverInterface.hpp"
00023
00024 #include "OsiAuxInfo.hpp"
00025
00026 #include "CoinTime.hpp"
00027
00028 #include <fstream>
00029
00030 #include <iomanip>
00031
00032 #include "CoinHelperFunctions.hpp"
00033
00034
00035
00036 using namespace std;
00037
00038 namespace Bonmin
00039 {
00040 HeuristicDiveMIP::HeuristicDiveMIP(BonminSetup * setup)
00041 :
00042 CbcHeuristic(),
00043 setup_(setup),
00044 howOften_(100),
00045 mip_(NULL)
00046 {
00047 Initialize(setup);
00048 }
00049
00050 void
00051 HeuristicDiveMIP::Initialize(BonminSetup * b){
00052 delete mip_;
00053 mip_ = new SubMipSolver (*b, b->prefix());
00054
00055 }
00056
00057 HeuristicDiveMIP::HeuristicDiveMIP(const HeuristicDiveMIP ©)
00058 :
00059 CbcHeuristic(copy),
00060 setup_(copy.setup_),
00061 howOften_(copy.howOften_),
00062 mip_(new SubMipSolver(*copy.mip_))
00063 {
00064 }
00065
00066 HeuristicDiveMIP &
00067 HeuristicDiveMIP::operator=(const HeuristicDiveMIP & rhs)
00068 {
00069 if(this != &rhs) {
00070 CbcHeuristic::operator=(rhs);
00071 setup_ = rhs.setup_;
00072 howOften_ = rhs.howOften_;
00073 delete mip_;
00074 if(rhs.mip_)
00075 mip_ = new SubMipSolver(*rhs.mip_);
00076 }
00077 return *this;
00078 }
00079
00080 HeuristicDiveMIP::~HeuristicDiveMIP(){
00081 delete mip_;
00082 }
00083
00084 struct MatComp{
00085 const int * iRow;
00086 const int * jCol;
00088 bool operator()(int i,int j){
00089 return (jCol[i] < jCol[j]) || (jCol[i] == jCol[j] && iRow[i] < iRow[j]);
00090 }
00091 };
00092
00093
00094 int
00095 HeuristicDiveMIP::solution(double &solutionValue, double *betterSolution)
00096 {
00097 if(model_->getNodeCount() || model_->getCurrentPassNumber() > 1) return 0;
00098 if ((model_->getNodeCount()%howOften_)!=0||model_->getCurrentPassNumber()>1)
00099 return 0;
00100
00101 int returnCode = 0;
00102
00103 OsiTMINLPInterface * nlp = NULL;
00104 if(setup_->getAlgorithm() == B_BB)
00105 nlp = dynamic_cast<OsiTMINLPInterface *>(model_->solver()->clone());
00106 else
00107 nlp = dynamic_cast<OsiTMINLPInterface *>(setup_->nonlinearSolver()->clone());
00108
00109 TMINLP2TNLP* minlp = nlp->problem();
00110
00111
00112 double integerTolerance = model_->getDblParam(CbcModel::CbcIntegerTolerance);
00113 double primalTolerance = 1.0e-6;
00114
00115 int numberColumns;
00116 int numberRows;
00117 int nnz_jac_g;
00118 int nnz_h_lag;
00119 Ipopt::TNLP::IndexStyleEnum index_style;
00120 minlp->get_nlp_info(numberColumns, numberRows, nnz_jac_g,
00121 nnz_h_lag, index_style);
00122
00123 const Bonmin::TMINLP::VariableType* variableType = minlp->var_types();
00124 const double* x_sol = minlp->x_sol();
00125 const double* x_l = minlp->x_l();
00126 const double* x_u = minlp->x_u();
00127
00128 const double* g_l = minlp->g_l();
00129 const double* g_u = minlp->g_u();
00130
00131 adjustPrimalTolerance(minlp, primalTolerance);
00132
00133 assert(isNlpFeasible(minlp, primalTolerance));
00134
00135
00136 TMINLP* tminlp = nlp->model();
00137 Ipopt::TNLP::LinearityType* variableLinearNonLinear = new
00138 Ipopt::TNLP::LinearityType [numberColumns];
00139 tminlp->get_variables_linearity(numberColumns, variableLinearNonLinear);
00140 vector<int> linearVariable;
00141 vector<int> nonlinearVariable;
00142 for (int iColumn=0;iColumn<numberColumns;iColumn++) {
00143 if (variableLinearNonLinear[iColumn]==Ipopt::TNLP::LINEAR)
00144 linearVariable.push_back(iColumn);
00145 else
00146 nonlinearVariable.push_back(iColumn);
00147 }
00148 size_t numberLinearColumns = linearVariable.size();
00149 size_t numberNonlinearColumns = nonlinearVariable.size();
00150
00151
00152
00153
00154
00155 int* indexRow = new int[nnz_jac_g];
00156 int* indexCol = new int[nnz_jac_g];
00157 minlp->eval_jac_g(numberColumns, x_sol, false,
00158 numberRows, nnz_jac_g,
00159 indexRow, indexCol, 0);
00160
00161 vector<int> sortedIndex(nnz_jac_g);
00162 CoinIotaN(sortedIndex(), nnz_jac_g, 0);
00163 MatComp c;
00164 c.iRow = indexRow;
00165 c.jCol = indexCol;
00166 std::sort(sortedIndex.begin(), sortedIndex.end(), c);
00167
00168 int* row = new int[nnz_jac_g];
00169 int* columnStart = new int[numberColumns];
00170 int* columnLength = new int[numberColumns];
00171 CoinZeroN(columnStart, numberColumns);
00172 CoinZeroN(columnLength, numberColumns);
00173 vector<vector<int> > column(numberRows);
00174
00175
00176 vector<vector<int> > columnInt(numberRows);
00177
00178
00179 std::vector<int> numberColumnsLinear(numberRows, 0);
00180
00181
00182 int indexCorrection = (index_style == Ipopt::TNLP::C_STYLE) ? 0 : 1;
00183 int iniCol = -1;
00184 for(int i=0; i<nnz_jac_g; i++) {
00185 int thisIndexCol = indexCol[sortedIndex[i]]-indexCorrection;
00186 if(indexCol[sortedIndex[i]] != iniCol) {
00187 iniCol = indexCol[sortedIndex[i]];
00188 columnStart[thisIndexCol] = i;
00189 columnLength[thisIndexCol] = 1;
00190 }
00191 else {
00192 columnLength[thisIndexCol]++;
00193 }
00194 row[i] = indexRow[sortedIndex[i]]-indexCorrection;
00195 column[row[i]].push_back(thisIndexCol);
00196 if (variableType[thisIndexCol] != Bonmin::TMINLP::CONTINUOUS)
00197 columnInt[row[i]].push_back(thisIndexCol);
00198 if(variableLinearNonLinear[thisIndexCol] == Ipopt::TNLP::LINEAR)
00199 numberColumnsLinear[row[i]]++;
00200 }
00201
00202
00203 double* newSolution = new double [numberColumns];
00204 memcpy(newSolution,x_sol,numberColumns*sizeof(double));
00205 double* new_g_sol = new double [numberRows];
00206
00207
00208
00209 vector<int> integerNonlinearColumns;
00210 int numberFractionalNonlinearVariables = 0;
00211 for (size_t iNLCol=0;iNLCol<numberNonlinearColumns;iNLCol++) {
00212 int iColumn = nonlinearVariable[iNLCol];
00213 if (variableType[iColumn] != Bonmin::TMINLP::CONTINUOUS) {
00214 integerNonlinearColumns.push_back(iColumn);
00215 double value=newSolution[iColumn];
00216 if (fabs(floor(value+0.5)-value)>integerTolerance) {
00217 numberFractionalNonlinearVariables++;
00218 }
00219 }
00220 }
00221
00222 setInternalVariables(minlp);
00223
00224 int iteration = -1;
00225 while(numberFractionalNonlinearVariables) {
00226 iteration++;
00227
00228
00229 int bestColumn = -1;
00230 int bestRound = -1;
00231 selectVariableToBranch(minlp, integerNonlinearColumns, newSolution,
00232 bestColumn, bestRound);
00233
00234 if(bestColumn >= 0) {
00235 if(bestRound < 0)
00236 minlp->SetVariableUpperBound(bestColumn, floor(newSolution[bestColumn]));
00237 else
00238 minlp->SetVariableLowerBound(bestColumn, ceil(newSolution[bestColumn]));
00239 } else {
00240 break;
00241 }
00242
00243 nlp->initialSolve();
00244
00245 if(minlp->optimization_status() != Ipopt::SUCCESS) {
00246 break;
00247 }
00248
00249 memcpy(newSolution,x_sol,numberColumns*sizeof(double));
00250
00251 numberFractionalNonlinearVariables = 0;
00252 for(int iIntCol=0; iIntCol<(int)integerNonlinearColumns.size(); iIntCol++) {
00253 int iColumn = integerNonlinearColumns[iIntCol];
00254 double value=newSolution[iColumn];
00255 if (fabs(floor(value+0.5)-value)>integerTolerance)
00256 numberFractionalNonlinearVariables++;
00257 }
00258
00259 double newSolutionValue;
00260 minlp->eval_f(numberColumns, newSolution, true, newSolutionValue);
00261 }
00262
00263
00264
00265 int numberFractionalLinearVariables = 0;
00266 for (size_t iLCol=0;iLCol<numberLinearColumns;iLCol++) {
00267 int iColumn = linearVariable[iLCol];
00268 if (variableType[iColumn] != Bonmin::TMINLP::CONTINUOUS) {
00269 double value=newSolution[iColumn];
00270 if (fabs(floor(value+0.5)-value)>integerTolerance) {
00271 numberFractionalLinearVariables++;
00272 }
00273 }
00274 }
00275
00276 bool feasible = true;
00277 if(numberFractionalLinearVariables) {
00278 int numberMIPRows = 0;
00279 int* mapRows = new int[numberRows];
00280 for(int iRow=0; iRow<numberRows; iRow++) {
00281 mapRows[iRow] = -1;
00282 if(numberColumnsLinear[iRow] > 0) {
00283 mapRows[iRow] = numberMIPRows++;
00284 }
00285 }
00286
00287
00288
00289 int numberIntegerLinearColumns = 0;
00290 for (size_t iLCol=0;iLCol<numberLinearColumns;iLCol++) {
00291 int iColumn = linearVariable[iLCol];
00292 newSolution[iColumn] = 0.0;
00293 if (variableType[iColumn] != Bonmin::TMINLP::CONTINUOUS)
00294 numberIntegerLinearColumns++;
00295 }
00296
00297 double* gradient_f = new double[numberColumns];
00298 minlp->eval_grad_f(numberColumns,newSolution,true,gradient_f);
00299
00300 minlp->eval_g(numberColumns, newSolution, true,
00301 numberRows, new_g_sol);
00302 double* row_lb = new double[numberMIPRows];
00303 double* row_ub = new double[numberMIPRows];
00304 for(int iRow=0; iRow<numberRows; iRow++) {
00305 if(mapRows[iRow] > -1) {
00306 assert(mapRows[iRow] < numberMIPRows);
00307 if(g_l[iRow] == (-1.0) * nlp->getInfinity())
00308 row_lb[mapRows[iRow]] = g_l[iRow];
00309 else
00310 row_lb[mapRows[iRow]] = g_l[iRow] - new_g_sol[iRow];
00311 if(g_u[iRow] == nlp->getInfinity())
00312 row_ub[mapRows[iRow]] = g_u[iRow];
00313 else
00314 row_ub[mapRows[iRow]] = g_u[iRow] - new_g_sol[iRow];
00315 }
00316 }
00317
00318
00319 double* jac_g = new double [nnz_jac_g];
00320 minlp->eval_jac_g(numberColumns, x_sol, false,
00321 numberRows, nnz_jac_g,
00322 0, 0, jac_g);
00323
00324
00325
00326 CoinPackedMatrix* matrix = new CoinPackedMatrix(true,0,0);
00327 matrix->setDimensions(numberMIPRows,0);
00328
00329
00330
00331 double* objective = new double[numberLinearColumns];
00332 double* col_lb = new double[numberLinearColumns];
00333 double* col_ub = new double[numberLinearColumns];
00334 int* indexIntegerColumn = new int[numberIntegerLinearColumns];
00335 int numberIndexIntegerColumn = 0;
00336 for (size_t iLCol=0;iLCol<numberLinearColumns;iLCol++) {
00337 int iColumn = linearVariable[iLCol];
00338 objective[iLCol] = gradient_f[iColumn];
00339 col_lb[iLCol] = x_l[iColumn];
00340 col_ub[iLCol] = x_u[iColumn];
00341 CoinPackedVector newRow;
00342 int end = columnStart[iColumn]+columnLength[iColumn];
00343 for (int j=columnStart[iColumn];
00344 j< end;j++) {
00345 int iRow = row[j];
00346 newRow.insert(mapRows[iRow], jac_g[sortedIndex[j]]);
00347 }
00348 matrix->appendCol(newRow);
00349 if (variableType[iColumn] != Bonmin::TMINLP::CONTINUOUS)
00350 indexIntegerColumn[numberIndexIntegerColumn++] = static_cast<int>(iLCol);
00351 }
00352
00353
00354 OsiSolverInterface *si = mip_->solver();
00355 bool delete_si = false;
00356 if(si == NULL){
00357 si = new OsiClpSolverInterface;
00358 mip_->setLpSolver(si);
00359 delete_si = true;
00360 }
00361 CoinMessageHandler * handler = model_->messageHandler()->clone();
00362 si->passInMessageHandler(handler);
00363 si->messageHandler()->setLogLevel(0);
00364
00365 si->loadProblem(*matrix, col_lb, col_ub, objective, row_lb, row_ub);
00366 si->setInteger(indexIntegerColumn, numberIndexIntegerColumn);
00367
00368 mip_->optimize(DBL_MAX, 0, 60);
00369
00370 if(mip_->getLastSolution()) {
00371 const double* solution = mip_->getLastSolution();
00372 assert(si->getNumCols() == static_cast<int>(numberLinearColumns));
00373 for (size_t iLCol=0;iLCol<numberLinearColumns;iLCol++) {
00374 int iColumn = linearVariable[iLCol];
00375 newSolution[iColumn] = solution[iLCol];
00376 }
00377 }
00378 else
00379 feasible = false;
00380
00381 delete [] mapRows;
00382 delete [] row_lb;
00383 delete [] row_ub;
00384 delete [] jac_g;
00385 delete [] gradient_f;
00386 delete matrix;
00387 delete [] objective;
00388 delete [] col_lb;
00389 delete [] col_ub;
00390 delete [] indexIntegerColumn;
00391 if(delete_si){
00392 delete si;
00393 }
00394 delete handler;
00395 }
00396
00397 #if 0
00398 bool feasible = true;
00399 for (int iColumn=0;iColumn<numberColumns;iColumn++) {
00400 double value=newSolution[iColumn];
00401 if(value < x_l[iColumn] || value > x_u[iColumn]) {
00402 feasible = false;
00403 break;
00404 }
00405 if (variableType[iColumn] != Bonmin::TMINLP::CONTINUOUS) {
00406 if (fabs(floor(value+0.5)-value)>integerTolerance) {
00407 feasible = false;
00408 break;
00409 }
00410 }
00411 }
00412 minlp->eval_g(numberColumns, newSolution, true,
00413 numberRows, new_g_sol);
00414 for(int iRow=0; iRow<numberRows; iRow++) {
00415 if(new_g_sol[iRow]<g_l[iRow]-primalTolerance ||
00416 new_g_sol[iRow]>g_u[iRow]+primalTolerance) {
00417 if(minlp->optimization_status() != SUCCESS) {
00418 feasible = false;
00419 break;
00420 } else {
00421 #ifdef DEBUG_BON_HEURISTIC_DIVE_MIP
00422 cout<<"It should be infeasible because: "<<endl;
00423 cout<<"g_l["<<iRow<<"]= "<<g_l[iRow]<<" "
00424 <<"g_sol["<<iRow<<"]= "<<new_g_sol[iRow]<<" "
00425 <<"g_u["<<iRow<<"]= "<<g_u[iRow]<<endl;
00426 cout<<"primalTolerance= "<<primalTolerance<<endl;
00427 #endif
00428 feasible = false;
00429 break;
00430 }
00431 }
00432 }
00433 #else
00434 if(feasible) {
00435
00436 for (int iColumn=0;iColumn<numberColumns;iColumn++) {
00437 if (variableType[iColumn] != Bonmin::TMINLP::CONTINUOUS) {
00438 double value=newSolution[iColumn];
00439 if (fabs(floor(value+0.5)-value)>integerTolerance) {
00440 #ifdef DEBUG_BON_HEURISTIC_DIVE_MIP
00441 cout<<"It should be infeasible because: "<<endl;
00442 cout<<"variable "<<iColumn<<" is not integer"<<endl;
00443 #endif
00444 feasible = false;
00445 break;
00446 }
00447 else {
00448 value=floor(newSolution[iColumn]+0.5);
00449 minlp->SetVariableUpperBound(iColumn, value);
00450 minlp->SetVariableLowerBound(iColumn, value);
00451 }
00452 }
00453 }
00454 if(feasible) {
00455 nlp->initialSolve();
00456 if(minlp->optimization_status() != Ipopt::SUCCESS) {
00457 feasible = false;
00458 }
00459 memcpy(newSolution,x_sol,numberColumns*sizeof(double));
00460 }
00461 }
00462 #endif
00463
00464 if(feasible) {
00465 double newSolutionValue;
00466 minlp->eval_f(numberColumns, newSolution, true, newSolutionValue);
00467 if(newSolutionValue < solutionValue) {
00468 memcpy(betterSolution,newSolution,numberColumns*sizeof(double));
00469 solutionValue = newSolutionValue;
00470 returnCode = 1;
00471 }
00472 }
00473
00474 delete [] variableLinearNonLinear;
00475 delete [] indexRow;
00476 delete [] indexCol;
00477 delete [] row;
00478 delete [] columnStart;
00479 delete [] columnLength;
00480 delete [] newSolution;
00481 delete [] new_g_sol;
00482 delete nlp;
00483
00484 #ifdef DEBUG_BON_HEURISTIC_DIVE_MIP
00485 std::cout<<"DiveMIP returnCode = "<<returnCode<<std::endl;
00486 #endif
00487
00488 return returnCode;
00489 }
00490 }