00001
00002
00003
00004
00005
00006
00007
00008
00009
00010 #include "BonHeuristicDive.hpp"
00011 #include "CoinHelperFunctions.hpp"
00012 #include "CbcModel.hpp"
00013
00014 #include "OsiAuxInfo.hpp"
00015
00016 #include "CoinTime.hpp"
00017
00018 #include <fstream>
00019
00020 #include <iomanip>
00021
00022
00023
00024 using namespace std;
00025
00026 namespace Bonmin
00027 {
00028 HeuristicDive::HeuristicDive()
00029 :
00030 CbcHeuristic(),
00031 setup_(NULL),
00032 percentageToFix_(0.2),
00033 howOften_(100)
00034 {}
00035
00036 HeuristicDive::HeuristicDive(BonminSetup * setup)
00037 :
00038 CbcHeuristic(),
00039 setup_(setup),
00040 percentageToFix_(0.2),
00041 howOften_(100)
00042 {
00043
00044 }
00045
00046 HeuristicDive::HeuristicDive(const HeuristicDive ©)
00047 :
00048 CbcHeuristic(copy),
00049 setup_(copy.setup_),
00050 percentageToFix_(copy.percentageToFix_),
00051 howOften_(copy.howOften_)
00052 {}
00053
00054 HeuristicDive &
00055 HeuristicDive::operator=(const HeuristicDive & rhs)
00056 {
00057 if(this != &rhs) {
00058 CbcHeuristic::operator=(rhs);
00059 setup_ = rhs.setup_;
00060 percentageToFix_ = rhs.percentageToFix_;
00061 howOften_ = rhs.howOften_;
00062 }
00063 return *this;
00064 }
00065
00066 int
00067 HeuristicDive::solution(double &solutionValue, double *betterSolution)
00068 {
00069
00070 if ((model_->getNodeCount()%howOften_)!=0||model_->getCurrentPassNumber()>1)
00071 return 0;
00072
00073 int returnCode = 0;
00074
00075 OsiTMINLPInterface * nlp = NULL;
00076 if(setup_->getAlgorithm() == B_BB)
00077 nlp = dynamic_cast<OsiTMINLPInterface *>(model_->solver()->clone());
00078 else
00079 nlp = dynamic_cast<OsiTMINLPInterface *>(setup_->nonlinearSolver()->clone());
00080
00081 TMINLP2TNLP* minlp = nlp->problem();
00082
00083
00084 double integerTolerance = model_->getDblParam(CbcModel::CbcIntegerTolerance);
00085 double primalTolerance = 1.0e-6;
00086
00087 int numberColumns;
00088 int numberRows;
00089 int nnz_jac_g;
00090 int nnz_h_lag;
00091 TNLP::IndexStyleEnum index_style;
00092 minlp->get_nlp_info(numberColumns, numberRows, nnz_jac_g,
00093 nnz_h_lag, index_style);
00094
00095 const Bonmin::TMINLP::VariableType* variableType = minlp->var_types();
00096 const double* x_sol = minlp->x_sol();
00097 const double* x_l = minlp->x_l();
00098 const double* x_u = minlp->x_u();
00099
00100 const double* g_l = minlp->g_l();
00101 const double* g_u = minlp->g_u();
00102
00103 adjustPrimalTolerance(minlp, primalTolerance);
00104
00105 assert(isNlpFeasible(minlp, primalTolerance));
00106
00107
00108 double* newSolution = new double [numberColumns];
00109 memcpy(newSolution,x_sol,numberColumns*sizeof(double));
00110 double* new_g_sol = new double [numberRows];
00111
00112
00113
00114 vector<int> integerColumns;
00115 int numberFractionalVariables = 0;
00116 for (int iColumn=0;iColumn<numberColumns;iColumn++) {
00117 if (variableType[iColumn] != Bonmin::TMINLP::CONTINUOUS) {
00118 integerColumns.push_back(iColumn);
00119 double value=newSolution[iColumn];
00120 if (fabs(floor(value+0.5)-value)>integerTolerance) {
00121 numberFractionalVariables++;
00122 }
00123 }
00124 }
00125
00126 setInternalVariables(minlp);
00127
00128
00129 int numberIntegers = (int) integerColumns.size();
00130 int* columnFixed = new int [numberIntegers];
00131 double* originalBound = new double [numberIntegers];
00132 bool * fixedAtLowerBound = new bool [numberIntegers];
00133
00134 const int maxNumberAtBoundToFix = (int) floor(percentageToFix_ * numberIntegers);
00135
00136 int iteration = -1;
00137 while(numberFractionalVariables) {
00138 iteration++;
00139
00140
00141 int bestColumn = -1;
00142 int bestRound = -1;
00143 selectVariableToBranch(minlp, integerColumns, newSolution,
00144 bestColumn, bestRound);
00145
00146
00147 int numberAtBoundFixed = 0;
00148 for (int i=0; i<numberIntegers; i++) {
00149 int iColumn = integerColumns[i];
00150 double value=newSolution[iColumn];
00151 if(fabs(floor(value+0.5)-value)<=integerTolerance &&
00152 numberAtBoundFixed < maxNumberAtBoundToFix) {
00153
00154 if (fabs(x_l[iColumn]-value)<=integerTolerance &&
00155 x_l[iColumn] != x_u[iColumn]) {
00156 columnFixed[numberAtBoundFixed] = iColumn;
00157 originalBound[numberAtBoundFixed] = x_u[iColumn];
00158 fixedAtLowerBound[numberAtBoundFixed] = true;
00159 minlp->SetVariableUpperBound(iColumn, x_l[iColumn]);
00160 numberAtBoundFixed++;
00161 }
00162 else if(fabs(x_u[iColumn]-value)<=integerTolerance &&
00163 x_l[iColumn] != x_u[iColumn]) {
00164 columnFixed[numberAtBoundFixed] = iColumn;
00165 originalBound[numberAtBoundFixed] = x_l[iColumn];
00166 fixedAtLowerBound[numberAtBoundFixed] = false;
00167 minlp->SetVariableLowerBound(iColumn, x_u[iColumn]);
00168 numberAtBoundFixed++;
00169 }
00170 if(numberAtBoundFixed == maxNumberAtBoundToFix)
00171 break;
00172 }
00173 }
00174
00175 double originalBoundBestColumn;
00176 if(bestColumn >= 0) {
00177 if(bestRound < 0) {
00178 originalBoundBestColumn = x_u[bestColumn];
00179 minlp->SetVariableUpperBound(bestColumn, floor(newSolution[bestColumn]));
00180 }
00181 else {
00182 originalBoundBestColumn = x_l[bestColumn];
00183 minlp->SetVariableLowerBound(bestColumn, ceil(newSolution[bestColumn]));
00184 }
00185 } else {
00186 break;
00187 }
00188 int originalBestRound = bestRound;
00189 while (1) {
00190
00191 nlp->initialSolve();
00192
00193 if(minlp->optimization_status() != SUCCESS) {
00194 if(numberAtBoundFixed > 0) {
00195
00196 for(int i=0; i<numberAtBoundFixed; i++) {
00197 int iColFixed = columnFixed[i];
00198 if(fixedAtLowerBound[i])
00199 minlp->SetVariableUpperBound(iColFixed, originalBound[i]);
00200 else
00201 minlp->SetVariableLowerBound(iColFixed, originalBound[i]);
00202 }
00203 numberAtBoundFixed = 0;
00204 }
00205 else if(bestRound == originalBestRound) {
00206 bestRound *= (-1);
00207 if(bestRound < 0) {
00208 minlp->SetVariableLowerBound(bestColumn, originalBoundBestColumn);
00209 minlp->SetVariableUpperBound(bestColumn, floor(newSolution[bestColumn]));
00210 }
00211 else {
00212 minlp->SetVariableLowerBound(bestColumn, ceil(newSolution[bestColumn]));
00213 minlp->SetVariableUpperBound(bestColumn, originalBoundBestColumn);
00214 }
00215 }
00216 else
00217 break;
00218 }
00219 else
00220 break;
00221 }
00222
00223 if(minlp->optimization_status() != SUCCESS) {
00224 break;
00225 }
00226
00227 memcpy(newSolution,x_sol,numberColumns*sizeof(double));
00228
00229 double newSolutionValue;
00230 minlp->eval_f(numberColumns, newSolution, true, newSolutionValue);
00231 if(newSolutionValue >= solutionValue)
00232 break;
00233
00234 numberFractionalVariables = 0;
00235 for(int iIntCol=0; iIntCol<(int)integerColumns.size(); iIntCol++) {
00236 int iColumn = integerColumns[iIntCol];
00237 double value=newSolution[iColumn];
00238 if (fabs(floor(value+0.5)-value)>integerTolerance)
00239 numberFractionalVariables++;
00240 }
00241
00242 }
00243
00244 bool feasible = true;
00245 for (int iColumn=0;iColumn<numberColumns;iColumn++) {
00246 double value=newSolution[iColumn];
00247 if(value < x_l[iColumn] || value > x_u[iColumn]) {
00248 feasible = false;
00249 break;
00250 }
00251 if (variableType[iColumn] != Bonmin::TMINLP::CONTINUOUS) {
00252 if (fabs(floor(value+0.5)-value)>integerTolerance) {
00253 feasible = false;
00254 break;
00255 }
00256 }
00257 }
00258 minlp->eval_g(numberColumns, newSolution, true,
00259 numberRows, new_g_sol);
00260 for(int iRow=0; iRow<numberRows; iRow++) {
00261 if(new_g_sol[iRow]<g_l[iRow]-primalTolerance ||
00262 new_g_sol[iRow]>g_u[iRow]+primalTolerance) {
00263 if(minlp->optimization_status() != SUCCESS) {
00264 feasible = false;
00265 break;
00266 } else {
00267 #ifdef DEBUG_BON_HEURISTIC_DIVE
00268 cout<<"It should be infeasible because: "<<endl;
00269 cout<<"g_l["<<iRow<<"]= "<<g_l[iRow]<<" "
00270 <<"g_sol["<<iRow<<"]= "<<new_g_sol[iRow]<<" "
00271 <<"g_u["<<iRow<<"]= "<<g_u[iRow]<<endl;
00272 cout<<"primalTolerance= "<<primalTolerance<<endl;
00273 #endif
00274 feasible = false;
00275 break;
00276 }
00277 }
00278 }
00279
00280 if(feasible) {
00281 double newSolutionValue;
00282 minlp->eval_f(numberColumns, newSolution, true, newSolutionValue);
00283 if(newSolutionValue < solutionValue) {
00284 memcpy(betterSolution,newSolution,numberColumns*sizeof(double));
00285 solutionValue = newSolutionValue;
00286 returnCode = 1;
00287 }
00288 }
00289
00290 delete [] newSolution;
00291 delete [] new_g_sol;
00292 delete nlp;
00293 delete [] columnFixed;
00294 delete [] originalBound;
00295 delete [] fixedAtLowerBound;
00296
00297 #ifdef DEBUG_BON_HEURISTIC_DIVE
00298 std::cout<<"Dive returnCode = "<<returnCode<<std::endl;
00299 #endif
00300
00301 return returnCode;
00302 }
00303
00304
00305 bool
00306 isNlpFeasible(TMINLP2TNLP* minlp, const double primalTolerance)
00307 {
00308 int numberColumns;
00309 int numberRows;
00310 int nnz_jac_g;
00311 int nnz_h_lag;
00312 TNLP::IndexStyleEnum index_style;
00313 minlp->get_nlp_info(numberColumns, numberRows, nnz_jac_g,
00314 nnz_h_lag, index_style);
00315
00316 const double* x_sol = minlp->x_sol();
00317 const double* x_l = minlp->x_l();
00318 const double* x_u = minlp->x_u();
00319 const double* g_sol = minlp->g_sol();
00320 const double* g_l = minlp->g_l();
00321 const double* g_u = minlp->g_u();
00322
00323
00324 for (int iColumn=0;iColumn<numberColumns;iColumn++) {
00325 double value=x_sol[iColumn];
00326 if(value < x_l[iColumn] || value > x_u[iColumn]) {
00327 return false;
00328 }
00329 }
00330 for(int iRow=0; iRow<numberRows; iRow++) {
00331 if(g_sol[iRow]<g_l[iRow]-primalTolerance ||
00332 g_sol[iRow]>g_u[iRow]+primalTolerance) {
00333 return false;
00334 }
00335 }
00336
00337 return true;
00338 }
00339
00340 void
00341 adjustPrimalTolerance(TMINLP2TNLP* minlp, double & primalTolerance)
00342 {
00343 int numberColumns;
00344 int numberRows;
00345 int nnz_jac_g;
00346 int nnz_h_lag;
00347 TNLP::IndexStyleEnum index_style;
00348 minlp->get_nlp_info(numberColumns, numberRows, nnz_jac_g,
00349 nnz_h_lag, index_style);
00350
00351 const double* g_sol = minlp->g_sol();
00352 const double* g_l = minlp->g_l();
00353 const double* g_u = minlp->g_u();
00354
00355 for(int iRow=0; iRow<numberRows; iRow++) {
00356 if(g_sol[iRow]<g_l[iRow]-primalTolerance) {
00357 primalTolerance = g_l[iRow]-g_sol[iRow];
00358 } else if(g_sol[iRow]>g_u[iRow]+primalTolerance) {
00359 primalTolerance = g_sol[iRow]-g_u[iRow];
00360 }
00361 }
00362 }
00363
00364 }
00365