BonHeuristicDive.cpp
Go to the documentation of this file.
1 // Copyright (C) 2007, International Business Machines Corporation and others.
2 // All Rights Reserved.
3 // This code is published under the Eclipse Public License.
4 //
5 // Authors :
6 // Joao P. Goncalves, International Business Machines Corporation
7 //
8 // Date : November 12, 2007
9 
10 #include "BonHeuristicDive.hpp"
11 #include "CoinHelperFunctions.hpp"
12 #include "CbcModel.hpp"
13 
14 #include "OsiAuxInfo.hpp"
15 
16 #include "CoinTime.hpp"
17 
18 #include <fstream>
19 
20 #include <iomanip>
21 
22 //#define DEBUG_BON_HEURISTIC_DIVE
23 
24 using namespace std;
25 
26 namespace Bonmin
27 {
28  HeuristicDive::HeuristicDive()
29  :
30  CbcHeuristic(),
31  setup_(NULL),
32  percentageToFix_(0.2),
33  howOften_(100)
34  {}
35 
37  :
38  CbcHeuristic(),
39  setup_(setup),
40  percentageToFix_(0.2),
41  howOften_(100)
42  {
43  // Initialize(setup->options());
44  }
45 
47  :
48  CbcHeuristic(copy),
49  setup_(copy.setup_),
50  percentageToFix_(copy.percentageToFix_),
51  howOften_(copy.howOften_)
52  {}
53 
56  {
57  if(this != &rhs) {
58  CbcHeuristic::operator=(rhs);
59  setup_ = rhs.setup_;
61  howOften_ = rhs.howOften_;
62  }
63  return *this;
64  }
65 
66  int
67  HeuristicDive::solution(double &solutionValue, double *betterSolution)
68  {
69  // if(model_->getNodeCount() || model_->getCurrentPassNumber() > 1) return 0;
70  if ((model_->getNodeCount()%howOften_)!=0||model_->getCurrentPassNumber()>1)
71  return 0;
72 
73  int returnCode = 0; // 0 means it didn't find a feasible solution
74 
75  OsiTMINLPInterface * nlp = NULL;
76  if(setup_->getAlgorithm() == B_BB)
77  nlp = dynamic_cast<OsiTMINLPInterface *>(model_->solver()->clone());
78  else
79  nlp = dynamic_cast<OsiTMINLPInterface *>(setup_->nonlinearSolver()->clone());
80 
81  TMINLP2TNLP* minlp = nlp->problem();
82 
83  // set tolerances
84  double integerTolerance = model_->getDblParam(CbcModel::CbcIntegerTolerance);
85  double primalTolerance = 1.0e-6;
86 
87  int numberColumns;
88  int numberRows;
89  int nnz_jac_g;
90  int nnz_h_lag;
91  Ipopt::TNLP::IndexStyleEnum index_style;
92  minlp->get_nlp_info(numberColumns, numberRows, nnz_jac_g,
93  nnz_h_lag, index_style);
94 
95  const Bonmin::TMINLP::VariableType* variableType = minlp->var_types();
96  const double* x_sol = minlp->x_sol();
97  const double* x_l = minlp->x_l();
98  const double* x_u = minlp->x_u();
99  //const double* g_sol = minlp->g_sol();
100  const double* g_l = minlp->g_l();
101  const double* g_u = minlp->g_u();
102 
103  adjustPrimalTolerance(minlp, primalTolerance);
104 
105  assert(isNlpFeasible(minlp, primalTolerance));
106 
107  // Get solution array for heuristic solution
108  double* newSolution = new double [numberColumns];
109  memcpy(newSolution,x_sol,numberColumns*sizeof(double));
110  double* new_g_sol = new double [numberRows];
111 
112 
113  // create a set with the indices of the fractional variables
114  vector<int> integerColumns; // stores the integer variables
115  int numberFractionalVariables = 0;
116  for (int iColumn=0;iColumn<numberColumns;iColumn++) {
117  if (variableType[iColumn] != Bonmin::TMINLP::CONTINUOUS) {
118  integerColumns.push_back(iColumn);
119  double value=newSolution[iColumn];
120  if (fabs(floor(value+0.5)-value)>integerTolerance) {
121  numberFractionalVariables++;
122  }
123  }
124  }
125 
126  setInternalVariables(minlp);
127 
128  // vectors to store the latest variables fixed at their bounds
129  int numberIntegers = (int) integerColumns.size();
130  int* columnFixed = new int [numberIntegers];
131  double* originalBound = new double [numberIntegers];
132  bool * fixedAtLowerBound = new bool [numberIntegers];
133 
134  const int maxNumberAtBoundToFix = (int) floor(percentageToFix_ * numberIntegers);
135 
136  int iteration = -1;
137  while(numberFractionalVariables) {
138  iteration++;
139 
140  // select a fractional variable to bound
141  int bestColumn = -1;
142  int bestRound = -1; // -1 rounds down, +1 rounds up
143  selectVariableToBranch(minlp, integerColumns, newSolution,
144  bestColumn, bestRound);
145 
146  // fix integer variables that are at their bounds
147  int numberAtBoundFixed = 0;
148  for (int i=0; i<numberIntegers; i++) {
149  int iColumn = integerColumns[i];
150  double value=newSolution[iColumn];
151  if(fabs(floor(value+0.5)-value)<=integerTolerance &&
152  numberAtBoundFixed < maxNumberAtBoundToFix) {
153  // fix the variable at one of its bounds
154  if (fabs(x_l[iColumn]-value)<=integerTolerance &&
155  x_l[iColumn] != x_u[iColumn]) {
156  columnFixed[numberAtBoundFixed] = iColumn;
157  originalBound[numberAtBoundFixed] = x_u[iColumn];
158  fixedAtLowerBound[numberAtBoundFixed] = true;
159  minlp->SetVariableUpperBound(iColumn, x_l[iColumn]);
160  numberAtBoundFixed++;
161  }
162  else if(fabs(x_u[iColumn]-value)<=integerTolerance &&
163  x_l[iColumn] != x_u[iColumn]) {
164  columnFixed[numberAtBoundFixed] = iColumn;
165  originalBound[numberAtBoundFixed] = x_l[iColumn];
166  fixedAtLowerBound[numberAtBoundFixed] = false;
167  minlp->SetVariableLowerBound(iColumn, x_u[iColumn]);
168  numberAtBoundFixed++;
169  }
170  if(numberAtBoundFixed == maxNumberAtBoundToFix)
171  break;
172  }
173  }
174 
175  double originalBoundBestColumn;
176  if(bestColumn >= 0) {
177  if(bestRound < 0) {
178  originalBoundBestColumn = x_u[bestColumn];
179  minlp->SetVariableUpperBound(bestColumn, floor(newSolution[bestColumn]));
180  }
181  else {
182  originalBoundBestColumn = x_l[bestColumn];
183  minlp->SetVariableLowerBound(bestColumn, ceil(newSolution[bestColumn]));
184  }
185  } else {
186  break;
187  }
188  int originalBestRound = bestRound;
189  while (1) {
190 
191  nlp->initialSolve();
192 
193  if(minlp->optimization_status() != Ipopt::SUCCESS) {
194  if(numberAtBoundFixed > 0) {
195  // Remove the bound fix for variables that were at bounds
196  for(int i=0; i<numberAtBoundFixed; i++) {
197  int iColFixed = columnFixed[i];
198  if(fixedAtLowerBound[i])
199  minlp->SetVariableUpperBound(iColFixed, originalBound[i]);
200  else
201  minlp->SetVariableLowerBound(iColFixed, originalBound[i]);
202  }
203  numberAtBoundFixed = 0;
204  }
205  else if(bestRound == originalBestRound) {
206  bestRound *= (-1);
207  if(bestRound < 0) {
208  minlp->SetVariableLowerBound(bestColumn, originalBoundBestColumn);
209  minlp->SetVariableUpperBound(bestColumn, floor(newSolution[bestColumn]));
210  }
211  else {
212  minlp->SetVariableLowerBound(bestColumn, ceil(newSolution[bestColumn]));
213  minlp->SetVariableUpperBound(bestColumn, originalBoundBestColumn);
214  }
215  }
216  else
217  break;
218  }
219  else
220  break;
221  }
222 
223  if(minlp->optimization_status() != Ipopt::SUCCESS) {
224  break;
225  }
226 
227  memcpy(newSolution,x_sol,numberColumns*sizeof(double));
228 
229  double newSolutionValue;
230  minlp->eval_f(numberColumns, newSolution, true, newSolutionValue);
231  if(newSolutionValue >= solutionValue)
232  break;
233 
234  numberFractionalVariables = 0;
235  for(int iIntCol=0; iIntCol<(int)integerColumns.size(); iIntCol++) {
236  int iColumn = integerColumns[iIntCol];
237  double value=newSolution[iColumn];
238  if (fabs(floor(value+0.5)-value)>integerTolerance)
239  numberFractionalVariables++;
240  }
241 
242  }
243 
244  bool feasible = true;
245  for (int iColumn=0;iColumn<numberColumns;iColumn++) {
246  double value=newSolution[iColumn];
247  if(value < x_l[iColumn] || value > x_u[iColumn]) {
248  feasible = false;
249  break;
250  }
251  if (variableType[iColumn] != Bonmin::TMINLP::CONTINUOUS) {
252  if (fabs(floor(value+0.5)-value)>integerTolerance) {
253  feasible = false;
254  break;
255  }
256  }
257  }
258  minlp->eval_g(numberColumns, newSolution, true,
259  numberRows, new_g_sol);
260  for(int iRow=0; iRow<numberRows; iRow++) {
261  if(new_g_sol[iRow]<g_l[iRow]-primalTolerance ||
262  new_g_sol[iRow]>g_u[iRow]+primalTolerance) {
263  if(minlp->optimization_status() != Ipopt::SUCCESS) {
264  feasible = false;
265  break;
266  } else {
267 #ifdef DEBUG_BON_HEURISTIC_DIVE
268  cout<<"It should be infeasible because: "<<endl;
269  cout<<"g_l["<<iRow<<"]= "<<g_l[iRow]<<" "
270  <<"g_sol["<<iRow<<"]= "<<new_g_sol[iRow]<<" "
271  <<"g_u["<<iRow<<"]= "<<g_u[iRow]<<endl;
272  cout<<"primalTolerance= "<<primalTolerance<<endl;
273 #endif
274  feasible = false;
275  break;
276  }
277  }
278  }
279 
280  if(feasible) {
281  double newSolutionValue;
282  minlp->eval_f(numberColumns, newSolution, true, newSolutionValue);
283  if(newSolutionValue < solutionValue) {
284  memcpy(betterSolution,newSolution,numberColumns*sizeof(double));
285  solutionValue = newSolutionValue;
286  returnCode = 1;
287  }
288  }
289 
290  delete [] newSolution;
291  delete [] new_g_sol;
292  delete nlp;
293  delete [] columnFixed;
294  delete [] originalBound;
295  delete [] fixedAtLowerBound;
296 
297 #ifdef DEBUG_BON_HEURISTIC_DIVE
298  std::cout<<"Dive returnCode = "<<returnCode<<std::endl;
299 #endif
300 
301  return returnCode;
302  }
303 
304 
305  bool
306  isNlpFeasible(TMINLP2TNLP* minlp, const double primalTolerance)
307  {
308  int numberColumns;
309  int numberRows;
310  int nnz_jac_g;
311  int nnz_h_lag;
312  Ipopt::TNLP::IndexStyleEnum index_style;
313  minlp->get_nlp_info(numberColumns, numberRows, nnz_jac_g,
314  nnz_h_lag, index_style);
315 
316  const double* x_sol = minlp->x_sol();
317  const double* x_l = minlp->x_l();
318  const double* x_u = minlp->x_u();
319  const double* g_sol = minlp->g_sol();
320  const double* g_l = minlp->g_l();
321  const double* g_u = minlp->g_u();
322 
323  // check if the problem is feasible
324  for (int iColumn=0;iColumn<numberColumns;iColumn++) {
325  double value=x_sol[iColumn];
326  if(value < x_l[iColumn] || value > x_u[iColumn]) {
327  return false;
328  }
329  }
330  for(int iRow=0; iRow<numberRows; iRow++) {
331  if(g_sol[iRow]<g_l[iRow]-primalTolerance ||
332  g_sol[iRow]>g_u[iRow]+primalTolerance) {
333  return false;
334  }
335  }
336 
337  return true;
338  }
339 
340  void
341  adjustPrimalTolerance(TMINLP2TNLP* minlp, double & primalTolerance)
342  {
343  int numberColumns;
344  int numberRows;
345  int nnz_jac_g;
346  int nnz_h_lag;
347  Ipopt::TNLP::IndexStyleEnum index_style;
348  minlp->get_nlp_info(numberColumns, numberRows, nnz_jac_g,
349  nnz_h_lag, index_style);
350 
351  const double* g_sol = minlp->g_sol();
352  const double* g_l = minlp->g_l();
353  const double* g_u = minlp->g_u();
354 
355  for(int iRow=0; iRow<numberRows; iRow++) {
356  if(g_sol[iRow]<g_l[iRow]-primalTolerance) {
357  primalTolerance = g_l[iRow]-g_sol[iRow];
358  } else if(g_sol[iRow]>g_u[iRow]+primalTolerance) {
359  primalTolerance = g_sol[iRow]-g_u[iRow];
360  }
361  }
362  }
363 
364 }
365 
virtual void selectVariableToBranch(TMINLP2TNLP *minlp, const vector< int > &integerColumns, const double *newSolution, int &bestColumn, int &bestRound)=0
Selects the next variable to branch on.
HeuristicDive()
Default constructor.
int howOften_
How often to do (code can change)
bool isNlpFeasible(TMINLP2TNLP *minlp, const double primalTolerance)
checks if the NLP relaxation of the problem is feasible
const TMINLP2TNLP * problem() const
get pointer to the TMINLP2TNLP adapter
This is class provides an Osi interface for a Mixed Integer Linear Program expressed as a TMINLP (so ...
void adjustPrimalTolerance(TMINLP2TNLP *minlp, double &primalTolerance)
Adjusts the primalTolerance in case some of the constraints are violated.
OsiSolverInterface * clone(bool copyData=true) const
Virtual copy constructor.
void SetVariableLowerBound(Ipopt::Index var_no, Ipopt::Number x_l)
Change the lower bound on the variable.
BonminSetup * setup_
Setup to use for local searches (will make copies).
HeuristicDive & operator=(const HeuristicDive &rhs)
Assignment operator.
Ipopt::SolverReturn optimization_status() const
Get Optimization status.
void SetVariableUpperBound(Ipopt::Index var_no, Ipopt::Number x_u)
Change the upper bound on the variable.
virtual void initialSolve()
Solve initial continuous relaxation.
Bonmin::Algorithm getAlgorithm()
Get the algorithm used.
const Ipopt::Number * x_l()
Get the current values for the lower bounds.
const Ipopt::Number * g_sol() const
get the g solution (activities)
virtual bool eval_g(Ipopt::Index n, const Ipopt::Number *x, bool new_x, Ipopt::Index m, Ipopt::Number *g)
Returns the vector of constraint values in x.
const Ipopt::Number * x_u()
Get the current values for the upper bounds.
static int
Definition: OSdtoa.cpp:2173
const Ipopt::Number * g_l()
Get the current values for constraints lower bounds.
const Ipopt::Number * x_sol() const
get the solution values
virtual void setInternalVariables(TMINLP2TNLP *minlp)=0
sets internal variables
OsiTMINLPInterface * nonlinearSolver()
Pointer to the non-linear solver used.
virtual bool eval_f(Ipopt::Index n, const Ipopt::Number *x, bool new_x, Ipopt::Number &obj_value)
Returns the value of the objective function in x.
This is an adapter class that converts a TMINLP to a TNLP to be solved by Ipopt.
virtual bool get_nlp_info(Ipopt::Index &n, Ipopt::Index &m, Ipopt::Index &nnz_jac_g, Ipopt::Index &nnz_h_lag, TNLP::IndexStyleEnum &index_style)
This call is just passed onto the TMINLP object.
virtual int solution(double &solutionValue, double *betterSolution)
Performs heuristic.
double percentageToFix_
Percentage of integer variables to fix at bounds.
const Ipopt::Number * g_u()
Get the current values for constraints upper bounds.
VariableType
Type of the variables.
Definition: BonTMINLP.hpp:192
const TMINLP::VariableType * var_types()
Get the variable types.