20 #include "OSCommonUtil.h"
24 using std::ostringstream;
48 const double *
const daX,
49 const double *
const daLambda,
60 cout <<
"*** Problem not defined <wrapperEvalFC>\n";
63 if (evalRequestCode != KTR_RC_EVALFC)
65 cout <<
"*** Bad request code " << evalRequestCode
66 <<
" <wrapperEvalFC>\n";
84 const double *
const daX,
85 const double *
const daLambda,
96 cout <<
"*** Problem not defined <wrapperEvalGA>\n";
99 if (evalRequestCode != KTR_RC_EVALGA)
101 cout <<
"*** Bad request code " << evalRequestCode
102 <<
" <wrapperEvalGA>\n";
120 const double *
const daX,
121 const double *
const daLambda,
132 cout <<
"*** Problem not defined <wrapperEvalHorHV>\n";
135 if (evalRequestCode == KTR_RC_EVALH)
137 if (
g_pOptProblem->areDerivativesImplemented (nCAN_COMPUTE_H) ==
false)
139 cout <<
"*** This problem not evaluate H <wrapperEvalHorHV>\n";
142 return(
g_pOptProblem->evalH (daX, daLambda, daH, userParams) );
144 else if (evalRequestCode == KTR_RC_EVALHV)
146 if (
g_pOptProblem->areDerivativesImplemented (nCAN_COMPUTE_HV) ==
false)
148 cout <<
"*** This problem not evaluate H*v <wrapperEvalHorHV>\n";
151 return(
g_pOptProblem->evalHV (daX, daLambda, daHV, userParams) );
155 cout <<
"*** Bad request code " << evalRequestCode
156 <<
" <wrapperEvalHorHV>\n";
171 __declspec(dllexport) NlpProblemDef * getNlpProblemDef (
void)
173 NlpProblemDef * getNlpProblemDef (
void)
221 if (_daXInit == NULL)
223 cout <<
"*** Must call 'loadProblemIntoKnitro' before 'KnitroSolver::getInitialX'\n";
224 exit( EXIT_FAILURE );
227 for (
int i = 0; i < _nN; i++)
229 daX[i] = _daXInit[i];
248 cout <<
"number variables !!!!!!!!!!!!!!!!!!!!!!!!!!!" << _nN << endl;
249 cout <<
"number constraints !!!!!!!!!!!!!!!!!!!!!!!!!!!" << _nM << endl;
262 std::cout <<
"GET BOUNDS INFORMATION FOR KNITRO!!!!!!!!!!!!!!!" << std::endl;
266 _daXLo =
new double[_nN];
267 _daXUp =
new double[_nN];
268 for(i = 0; i < _nN; i++)
270 if( mdVarLB[ i] == -
OSDBL_MAX) _daXLo[i] = -KTR_INFBOUND;
271 else _daXLo[i] = mdVarLB[ i];
272 if( mdVarUB[ i] ==
OSDBL_MAX) _daXUp[i] = KTR_INFBOUND;
273 else _daXUp[i] = mdVarUB[ i];
283 _naCType =
new int[ _nM];
284 _daCLo =
new double[ _nM];
285 _daCUp =
new double[ _nM];
286 _naCType[0] = KTR_CONTYPE_LINEAR;
287 for(i = 0; i < _nM; i++)
289 if( mdConLB[ i] == -
OSDBL_MAX) _daCLo[i] = -KTR_INFBOUND;
290 else _daCLo[i] = mdConLB[ i];
291 if( mdConUB[ i] ==
OSDBL_MAX) _daCUp[i] = KTR_INFBOUND;
292 else _daCUp[i] = mdConUB[ i];
295 _naCType[i] = KTR_CONTYPE_GENERAL;
300 std::cout <<
"Call sparse jacobian" << std::endl;
311 std::cout <<
"Done calling sparse jacobian" << std::endl;
314 _naJacIndexVars =
new int[_nNnzJ];
315 _naJacIndexCons =
new int[_nNnzJ];
318 for(idx = 0; idx < _nM; idx++)
320 for(k = *(sparseJacobian->
starts + idx); k < *(sparseJacobian->
starts + idx + 1); k++)
322 _naJacIndexCons[i] = idx;
323 _naJacIndexVars[i] = *(sparseJacobian->
indexes +
k);
334 cout <<
"This is a linear program" << endl;
339 std::cout <<
"Get Lagrangain Hessian Sparsity Pattern " << std::endl;
341 std::cout <<
"Done Getting Lagrangain Hessian Sparsity Pattern " << std::endl;
343 _naHessRows =
new int[_nNnzH];
344 _naHessCols =
new int[_nNnzH];
345 for(i = 0; i < _nNnzH; i++)
347 _naHessCols[i] = *(sparseHessian->
hessColIdx + i);
348 _naHessRows[i] = *(sparseHessian->
hessRowIdx + i);
349 cout <<
"ROW HESS IDX !!!!!!!!!!!!!!!!!!!!!!!!!!!" << _naHessRows[i] << endl;
350 cout <<
"COL HESS IDX !!!!!!!!!!!!!!!!!!!!!!!!!!!" << _naHessCols[i] << endl;
358 double *mdXInit =
osinstance->getVariableInitialValues();
359 _daXInit =
new double[_nN];
362 for(i = 0; i < _nN; i++)
364 if( CommonUtil::ISOSNAN( mdXInit[ i]) ==
true)
366 _daXInit[ i] = 1.7171;
368 else _daXInit[ i] = mdXInit[ i];
373 for(i = 0; i < _nN; i++)
375 _daXInit[ i] = 1.7171;
380 double * daLambdaInit =
new double[_nM + _nN];
381 for (i = 0; i < _nM + _nN; i++)
382 daLambdaInit[i] = 0.0;
385 int iObjSense = KTR_OBJGOAL_MINIMIZE;
388 iObjSense = KTR_OBJGOAL_MAXIMIZE;
390 int iObjType = KTR_OBJTYPE_GENERAL;
391 i = KTR_init_problem (kc, _nN,
394 _nM, _naCType, _daCLo, _daCUp,
395 _nNnzJ, _naJacIndexVars, _naJacIndexCons,
396 _nNnzH, _naHessRows, _naHessCols,
397 _daXInit, daLambdaInit);
404 delete [] _naJacIndexVars;
405 delete [] _naJacIndexCons;
406 delete [] _naHessRows;
407 delete [] _naHessCols;
408 delete [] daLambdaInit;
412 cout <<
"*** KTR_init_problem() returned " << i <<
"\n";
424 if (nWhichDers == nCAN_COMPUTE_GA)
426 if (nWhichDers == nCAN_COMPUTE_H)
428 if (nWhichDers == nCAN_COMPUTE_HV)
451 if( CommonUtil::ISOSNAN( *dObj ))
return (-1);
459 for(i = 0; i < _nM; i++)
461 if( CommonUtil::ISOSNAN( (
double)conVals[ i] ) )
return (-1);
462 daC[i] = conVals[ i] ;
488 std::cout <<
"EVALUATING OBJ GRADIENT" << std::endl;
489 for(i = 0; i < _nN; i++)
491 if( CommonUtil::ISOSNAN( (
double)objGrad[ i] ) )
return (-1);
492 daG[i] = objGrad[ i] ;
502 std::cout <<
"EVALUATING JACOBIAN" << std::endl;
513 for(
int i = 0; i < _nNnzJ; i++)
515 daJ[ i] = sparseJacobian->
values[i];
517 cout <<
"daJ[i]:!!!!!!!!!!!! " << daJ[ i] << endl;
529 const double *
const daLambda,
535 double* objMultipliers =
new double[1];
536 objMultipliers[0] = 1;
546 for(i = 0; i < _nNnzH; i++)
549 std::cout <<
"Hessian Value = " << daH[ i] << std::endl;
551 delete[] objMultipliers;
560 const double *
const daLambda,
586 catch(
const ErrorClass& eclass)
588 std::cout <<
"THERE IS AN ERROR" << std::endl;
591 osrl = osrlwriter->writeOSrL(
osresult);
592 throw ErrorClass( osrl) ;
599 void Knitro::setSolverOptions() throw (
ErrorClass)
605 catch(
const ErrorClass& eclass)
607 std::cout <<
"THERE IS AN ERROR" << std::endl;
610 osrl = osrlwriter->writeOSrL(
osresult);
611 throw ErrorClass( osrl) ;
618 cout <<
"inside KnitroSolver destructor" << endl;
627 cout <<
"leaving KnitroSolver destructor" << endl;
637 if(osil.length() == 0 &&
osinstance == NULL)
throw ErrorClass(
"there is no instance");
638 clock_t start, finish;
641 bool newOSiLReader =
false;
646 newOSiLReader =
true;
652 duration = (double) (finish - start) / CLOCKS_PER_SEC;
653 cout <<
"Parsing took (seconds): "<< duration << endl;
666 cout <<
"*** KTR_new failed, maybe a license issue?\n";
667 exit( EXIT_FAILURE );
671 KTR_load_param_file (kc,
"knitro.opt");
673 if (pOptProb->loadProblemIntoKnitro ( kc) ==
false)
675 cout <<
"*** loadProblemIntoKnitro failed\n";
676 exit( EXIT_FAILURE );
686 cout <<
"*** KTR_set_func_callback failed\n";
687 exit( EXIT_FAILURE );
689 if (pOptProb->areDerivativesImplemented (nCAN_COMPUTE_GA) ==
true)
693 cout <<
"*** KTR_set_grad_callback failed\n";
694 exit( EXIT_FAILURE );
697 if ((pOptProb->areDerivativesImplemented (nCAN_COMPUTE_H) ==
true)
698 || (pOptProb->areDerivativesImplemented (nCAN_COMPUTE_HV) ==
true))
702 cout <<
"*** KTR_set_hess_callback failed\n";
703 exit( EXIT_FAILURE );
708 double * daX =
new double[pOptProb->getN()];
709 double * daLambda =
new double[pOptProb->getM() + pOptProb->getN()];
710 double* mdObjValues =
new double[1];
712 if (bWantToSolve ==
true)
716 int nStatus = KTR_solve (kc, daX, daLambda, 0, &dFinalObj,
717 NULL, NULL, NULL, NULL, NULL, NULL);
718 std::cout <<
"dFinalObj = " << dFinalObj << std::endl;
719 cout <<
"*** Final KNITRO status = " << nStatus <<
"\n";
720 cout <<
"dax[0] = " << daX[0] <<
"\n";
725 std::string message =
"Knitro solver finishes to the end.";
726 std::string solutionDescription =
"";
730 throw ErrorClass(
"OSResult error: setServiceName");
732 throw ErrorClass(
"OSResult error: setInstanceName");
739 throw ErrorClass(
"OSResult error: setVariableNumer");
741 throw ErrorClass(
"OSResult error: setObjectiveNumber");
743 throw ErrorClass(
"OSResult error: setConstraintNumber");
745 throw ErrorClass(
"OSResult error: setSolutionNumer");
749 throw ErrorClass(
"OSResult error: setGeneralMessage");
750 std::cout <<
"START CASES " << 0 << endl;
754 std::cout <<
"WE ARE IN CASE " << 0 << endl;
755 solutionDescription =
"LOCALLY OPTIMAL SOLUTION FOUND[KNITRO STATUS 0]: Knitro found a locally optimal point which satisfies the stopping criterion.If the problem is convex (for example, a linear program), then this point corresponds to a globally optimal solution.";
757 std::cout <<
"SET SOLUTION STATUS " << endl;
759 std::cout <<
"SET PRIMAL VALUES " << endl;
761 mdObjValues[0] = dFinalObj;
763 std::cout <<
"SET OBJECTIVE VALUES " << endl;
766 solutionDescription =
"Iteration limit reached[KNITRO STATUS -1]: The iteration limit was reached before being able to satisfy the required stopping criteria.";
770 mdObjValues[0] = dFinalObj;
774 solutionDescription =
"Convergence to an infeasible point[KNITRO STATUS -2]: Problem may be locally infeasible.The algorithm has converged to an infeasible point from which it cannot further decrease the infeasibility measure. This happens when the problem is infeasible, but may also occur on occasion for feasible problems with nonlinear constraints or badly scaled problems. It is recommended to try various initial points. If this occurs for a variety of initial points, it is likely the problem is infeasible.";
778 solutionDescription =
"Problem appears to be unbounded[KNITRO STATUS -3]: Iterate is feasible and objective magnitude > objrange. The objective function appears to be decreasing without bound, while satisfying the constraints.If the problem really is bounded, increase the size of the parameter objrange to avoid terminating with this message.";
782 solutionDescription =
"Relative change in solution estimate < xtol[KNITRO STATUS -4]: The relative change in the solution estimate is less than that specified by the paramater xtol.To try to get more accuracy one may decrease xtol. If xtol is very small already, it is an indication that no more significant progress can be made. If the current point is feasible, it is possible it may be optimal, however the stopping tests cannot be satisfied (perhaps because of degeneracy, ill-conditioning or bad scaling).";
786 mdObjValues[0] = dFinalObj;
790 solutionDescription =
"Current solution estimate cannot be improved. Point appears to be optimal, but desired accuracy could not be achieved.[KNITRO STATUS -5]: No more progress can be made, but the stopping tests are close to being satisfied (within a factor of 100) and so the current approximate solution is believed to be optimal.";
794 mdObjValues[0] = dFinalObj;
798 solutionDescription =
"Time limit reached[KNITRO STATUS -6]: The time limit was reached before being able to satisfy the required stopping criteria.";
802 mdObjValues[0] = dFinalObj;
816 solutionDescription =
"Input Error[KNITRO STATUS -50 to -60]: Termination values in this range imply some input error. If outlev>0 details of this error will be printed to standard output or the file knitro.log depending on the value of outmode.";
820 solutionDescription =
"Callback function error[KNITRO STATUS -90]: This termination value indicates that an error (i.e., negative return value) occurred in a user provided callback routine.";
824 solutionDescription =
"LP solver error[KNITRO STATUS -97]: This termination value indicates that an unrecoverable error occurred in the LP solver used in the active-set algorithm preventing the optimization from continuing.";
828 solutionDescription =
"Evaluation error[KNITRO STATUS -98]: This termination value indicates that an evaluation error occurred (e.g., divide by 0, taking the square root of a negative number), preventing the optimization from continuing.";
832 solutionDescription =
"Not enough memory available to solve problem[KNITRO STATUS -99]: This termination value indicates that there was not enough memory available to solve the problem.";
836 solutionDescription =
"OTHER[KNITRO]: other unknown solution status from Knitro solver";
841 osrl = osrlwriter->writeOSrL(
osresult);
846 pOptProb->getInitialX (daX);
847 KTR_check_first_ders (kc, daX, 2, 1.0
e-14, 1.0
e-14,
848 0, 0.0, NULL, NULL, NULL, NULL);
851 if(newOSiLReader ==
true)
858 delete [] mdObjValues;
866 catch(
const ErrorClass& eclass)
870 osrl = osrlwriter->writeOSrL(
osresult);
904 else cout <<
"problem is a maximization" << endl;
947 NlpProblemDef::~NlpProblemDef (
void)
bool loadProblemIntoKnitro(KTR_context_ptr kc)
Define the fixed problem definition information and pass it to KNITRO by calling KTR_init_problem.
double * getConstraintLowerBounds()
Get constraint lower bounds.
double * getVariableLowerBounds()
Get variable lower bounds.
double * getConstraintUpperBounds()
Get constraint upper bounds.
int evalH(const double *const daX, const double *const daLambda, double *const daH, void *userParams)
static int wrapperEvalHorHV(const int evalRequestCode, const int n, const int m, const int nnzJ, const int nnzH, const double *const daX, const double *const daLambda, double *const dObj, double *const daC, double *const daG, double *const daJ, double *const daH, double *const daHV, void *userParams)
By necessity this wrapper signature matches the function KTR_callback.
char * getVariableTypes()
Get variable initial values.
bool setSolutionStatus(int solIdx, std::string type, std::string description)
Set the [i]th optimization solution status, where i equals the given solution index.
bool setPrimalVariableValuesDense(int solIdx, double *x)
Set the [i]th optimization solution's primal variable values, where i equals the given solution index...
SparseHessianMatrix * calculateLagrangianHessian(double *x, double *objLambda, double *conLambda, bool new_x, int highestOrder)
Calculate the Hessian of the Lagrangian Expression Tree This method will build the CppAD expression t...
double * values
values holds a double array of nonzero partial derivatives
bool bUseExpTreeForFunEval
bUseExpTreeForFunEval is set to true if you wish to use the OS Expression Tree for function evaluatio...
int getVariableNumber()
Get number of variables.
static NlpProblemDef * g_pTheNlpProblemDefInstance
bool setServiceName(std::string serviceName)
Set service name.
bool setVariableNumber(int variableNumber)
Set the variable number.
std::string errormsg
errormsg is the error that is causing the exception to be thrown
static NlpProblemDef * g_pOptProblem
int getLinearConstraintCoefficientNumber()
Get number of specified (usually nonzero) linear constraint coefficient values.
std::string maxOrMin
declare the objective function to be a max or a min
int evalGA(const double *const daX, double *const daG, double *const daJ, void *userParams)
SparseJacobianMatrix * calculateAllConstraintFunctionGradients(double *x, double *objLambda, double *conLambda, bool new_x, int highestOrder)
Calculate the gradient of all constraint functions.
Take an OSResult object and write a string that validates against OSrL.
bool setDualVariableValuesDense(int solIdx, double *y)
Set the [i]th optimization solution's dual variable values, where i equals the given solution index...
int * hessColIdx
hessColIdx is an integer array of column indices in the range 0, ..., n - 1.
int getObjectiveNumber()
Get number of objectives.
bool setObjectiveNumber(int objectiveNumber)
Set the objective number.
bool setInstanceName(std::string instanceName)
Set instance name.
static int wrapperEvalFC(const int evalRequestCode, const int n, const int m, const int nnzJ, const int nnzH, const double *const daX, const double *const daLambda, double *const dObj, double *const daC, double *const daG, double *const daJ, double *const daH, double *const daHV, void *userParams)
OSInstance * readOSiL(const std::string &osil)
parse the OSiL model instance.
std::string * getVariableNames()
Get variable names.
int * hessRowIdx
hessRowIdx is an integer array of row indices in the range 0, ..., n - 1.
int getNumberOfNonlinearExpressions()
Get number of nonlinear expressions.
static int wrapperEvalGA(const int evalRequestCode, const int n, const int m, const int nnzJ, const int nnzH, const double *const daX, const double *const daLambda, double *const dObj, double *const daC, double *const daG, double *const daJ, double *const daH, double *const daHV, void *userParams)
By necessity this wrapper signature matches the function KTR_callback.
int numberOfObjectives
numberOfObjectives is the number of objective functions in the instance
int evalHV(const double *const daX, const double *const daLambda, double *const daHV, void *userParams)
int * indexes
indexes holds an integer array of rowIdx (or colIdx) elements in coefMatrix (AMatrix).
bool setSolutionNumber(int number)
set the number of solutions.
double ** getDenseObjectiveCoefficients()
getDenseObjectiveCoefficients.
SparseJacobianMatrix * getJacobianSparsityPattern()
void fint fint fint real fint real real real real real real real real real * e
bool areDerivativesImplemented(const DerivativesImplementedType nWhichDers)
~KnitroSolver()
the KnitroSolver class constructor
int valueSize
valueSize is the dimension of the values array
Used to read an OSiL string.
int * indexes
indexes holds an integer array of variable indices.
virtual void buildSolverInstance()
buildSolverInstance is a virtual function – the actual solvers will implement their own buildSolverIn...
double * calculateAllConstraintFunctionValues(double *x, double *objLambda, double *conLambda, bool new_x, int highestOrder)
Calculate all of the constraint function values.
KnitroProblem(OSInstance *osinstance_, OSResult *osresult_)
the IpoptProblemclass constructor
double ** calculateAllObjectiveFunctionGradients(double *x, double *objLambda, double *conLambda, bool new_x, int highestOrder)
Calculate the gradient of all objective functions.
int hessDimension
hessDimension is the number of nonzeros in each array.
int * varOneIndexes
varOneIndexes holds an integer array of the first variable indexes of all the quadratic terms...
SparseHessianMatrix * getLagrangianHessianSparsityPattern()
virtual ~KnitroProblem()
the IpoptProblem class destructor
std::string * getObjectiveMaxOrMins()
Get objective maxOrMins.
double * hessValues
hessValues is a double array of the Hessian values.
int evalFC(const double *const daX, double *const dObj, double *const daC, void *userParams)
SparseMatrix * getLinearConstraintCoefficientsInColumnMajor()
Get linear constraint coefficients in column major.
bool setGeneralMessage(std::string message)
Set the general message.
double * values
values holds a double array of value elements in coefMatrix (AMatrix), which contains nonzero element...
bool setGeneralStatusType(std::string type)
Set the general status type, which can be: success, error, warning.
a sparse Jacobian matrix data structure
std::string getInstanceName()
Get instance name.
InstanceData * instanceData
A pointer to an InstanceData object.
int * starts
starts holds an integer array of start elements, each start element points to the start of partials f...
int getConstraintNumber()
Get number of constraints.
Objective ** obj
coef is pointer to an array of ObjCoef object pointers
bool setConstraintNumber(int constraintNumber)
Set the constraint number.
virtual void solve()
solve results in an instance being read into the Knitro data structrues and optimized ...
void getInitialX(double *const daX)
int getNumberOfQuadraticTerms()
Get the number of specified (usually nonzero) qTerms in the quadratic coefficients.
Objectives * objectives
objectives is a pointer to a Objectives object
std::string getInstanceSource()
Get instance source.
bool initForAlgDiff()
This should be called by nonlinear solvers using callback functions.
int * rowIndexes
rowIndexes holds an integer array of row indexes of all the quadratic terms.
int * starts
starts holds an integer array of start elements in coefMatrix (AMatrix), which points to the start of...
double * calculateAllObjectiveFunctionValues(double *x, double *objLambda, double *conLambda, bool new_x, int highestOrder)
Calculate all of the objective function values.
QuadraticTerms * getQuadraticTerms()
Get all the quadratic terms in the instance.
double * getVariableUpperBounds()
Get variable upper bounds.
bool setObjectiveValuesDense(int solIdx, double *objectiveValues)
Set the [i]th optimization solution's objective values, where i equals the given solution index...
The in-memory representation of an OSiL instance..
double * coefficients
coefficients holds a double array all the quadratic term coefficients.
int * varTwoIndexes
varTwoIndexes holds an integer array of the second variable indexes of all the quadratic terms...
KnitroSolver()
the KnitroSolver class constructor
The in-memory representation of a SparseHessianMatrix..
std::string * getConstraintNames()
Get constraint names.
used for throwing exceptions.
Take an OSInstance object and write a string that validates against the OSiL schema.
void dataEchoCheck()
use this for debugging, print out the instance that the solver thinks it has and compare this with th...
std::string getInstanceDescription()
Get instance description.