| cutoff_ | Bonmin::TNLP2FPNLP | [private] |
| dist2point(const Number *x) | Bonmin::TNLP2FPNLP | [private] |
| eval_f(Index n, const Number *x, bool new_x, Number &obj_value) | Bonmin::TNLP2FPNLP | [virtual] |
| eval_g(Index n, const Number *x, bool new_x, Index m, Number *g) | Bonmin::TNLP2FPNLP | [virtual] |
| eval_grad_f(Index n, const Number *x, bool new_x, Number *grad_f) | Bonmin::TNLP2FPNLP | [virtual] |
| eval_h(Index n, const Number *x, bool new_x, Number obj_factor, Index m, const Number *lambda, bool new_lambda, Index nele_hess, Index *iRow, Index *jCol, Number *values) | Bonmin::TNLP2FPNLP | [virtual] |
| eval_jac_g(Index n, const Number *x, bool new_x, Index m, Index nele_jac, Index *iRow, Index *jCol, Number *values) | Bonmin::TNLP2FPNLP | [virtual] |
| finalize_solution(SolverReturn status, Index n, const Number *x, const Number *z_L, const Number *z_U, Index m, const Number *g, const Number *lambda, Number obj_value, const IpoptData *ip_data, IpoptCalculatedQuantities *ip_cq) | Bonmin::TNLP2FPNLP | [virtual] |
| get_bounds_info(Index n, Number *x_l, Number *x_u, Index m, Number *g_l, Number *g_u) | Bonmin::TNLP2FPNLP | [virtual] |
| get_nlp_info(Index &n, Index &m, Index &nnz_jac_g, Index &nnz_h_lag, TNLP::IndexStyleEnum &index_style) | Bonmin::TNLP2FPNLP | [virtual] |
| get_starting_point(Index n, bool init_x, Number *x, bool init_z, Number *z_L, Number *z_U, Index m, bool init_lambda, Number *lambda) | Bonmin::TNLP2FPNLP | [inline, virtual] |
| getObjectiveScaling() const | Bonmin::TNLP2FPNLP | [inline] |
| index_style_ | Bonmin::TNLP2FPNLP | [private] |
| inds_ | Bonmin::TNLP2FPNLP | [private] |
| lambda_ | Bonmin::TNLP2FPNLP | [private] |
| norm_ | Bonmin::TNLP2FPNLP | [private] |
| objectiveScalingFactor_ | Bonmin::TNLP2FPNLP | [private] |
| operator=(const TNLP2FPNLP &) | Bonmin::TNLP2FPNLP | [private] |
| rhs_local_branching_constraint_ | Bonmin::TNLP2FPNLP | [private] |
| set_cutoff(Number cutoff) | Bonmin::TNLP2FPNLP | |
| set_dist2point_obj(int n, const Number *vals, const Index *inds) | Bonmin::TNLP2FPNLP | |
| set_rhs_local_branching_constraint(double rhs_local_branching_constraint) | Bonmin::TNLP2FPNLP | [inline] |
| set_use_cutoff_constraint(bool use_cutoff_constraint) | Bonmin::TNLP2FPNLP | [inline] |
| set_use_feasibility_pump_objective(bool use_feasibility_pump_objective) | Bonmin::TNLP2FPNLP | [inline] |
| set_use_local_branching_constraint(bool use_local_branching_constraint) | Bonmin::TNLP2FPNLP | [inline] |
| setLambda(double lambda) | Bonmin::TNLP2FPNLP | [inline] |
| setNorm(int norm) | Bonmin::TNLP2FPNLP | [inline] |
| setObjectiveScaling(double value) | Bonmin::TNLP2FPNLP | [inline] |
| setSigma(double sigma) | Bonmin::TNLP2FPNLP | [inline] |
| sigma_ | Bonmin::TNLP2FPNLP | [private] |
| TNLP2FPNLP(const SmartPtr< TNLP > tnlp, double objectiveScalingFactor=100) | Bonmin::TNLP2FPNLP | |
| TNLP2FPNLP(const SmartPtr< TNLP > tnlp, const SmartPtr< TNLP2FPNLP > other) | Bonmin::TNLP2FPNLP | |
| TNLP2FPNLP() | Bonmin::TNLP2FPNLP | [private] |
| TNLP2FPNLP(const TNLP2FPNLP &) | Bonmin::TNLP2FPNLP | [private] |
| tnlp_ | Bonmin::TNLP2FPNLP | [private] |
| use(Ipopt::SmartPtr< TNLP > tnlp) | Bonmin::TNLP2FPNLP | [inline] |
| use_cutoff_constraint_ | Bonmin::TNLP2FPNLP | [private] |
| use_feasibility_pump_objective_ | Bonmin::TNLP2FPNLP | [private] |
| use_local_branching_constraint_ | Bonmin::TNLP2FPNLP | [private] |
| vals_ | Bonmin::TNLP2FPNLP | [private] |
| ~TNLP2FPNLP() | Bonmin::TNLP2FPNLP | [virtual] |